[heap] Refactor usages of the InNewSpace() predicate
This replaces InNewSpace with InYoungGeneration, which is a prerequisite for young large objects. Additional changes: - FROM_SPACE, TO_SPACE flags are renamed to FROM_PAGE, TO_PAGE. - A new LARGE_PAGE flag is added. - The external string table is refactored to track young string instead of new space strings. Bug: chromium:924547 Change-Id: Ia4e3ba1b72995c3400257a1f98559f091533e811 Reviewed-on: https://chromium-review.googlesource.com/c/1437274 Reviewed-by: Benedikt Meurer <bmeurer@chromium.org> Reviewed-by: Yang Guo <yangguo@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Commit-Queue: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#59156}
This commit is contained in:
parent
529663fcc3
commit
2423deb554
@ -6578,7 +6578,7 @@ bool v8::String::CanMakeExternal() {
|
||||
}
|
||||
|
||||
// Only old space strings should be externalized.
|
||||
return !i::Heap::InNewSpace(obj);
|
||||
return !i::Heap::InYoungGeneration(obj);
|
||||
}
|
||||
|
||||
bool v8::String::StringEquals(Local<String> that) {
|
||||
|
@ -361,7 +361,7 @@ TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
|
||||
|
||||
BIND(&generational_wb);
|
||||
{
|
||||
Label test_old_to_new_flags(this);
|
||||
Label test_old_to_young_flags(this);
|
||||
Label store_buffer_exit(this), store_buffer_incremental_wb(this);
|
||||
|
||||
// When incremental marking is not on, we skip cross generation pointer
|
||||
@ -371,23 +371,22 @@ TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
|
||||
// `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
|
||||
// which serves as the cross generation checking.
|
||||
Node* slot = Parameter(Descriptor::kSlot);
|
||||
Branch(IsMarking(), &test_old_to_new_flags, &store_buffer_exit);
|
||||
Branch(IsMarking(), &test_old_to_young_flags, &store_buffer_exit);
|
||||
|
||||
BIND(&test_old_to_new_flags);
|
||||
BIND(&test_old_to_young_flags);
|
||||
{
|
||||
Node* value = Load(MachineType::Pointer(), slot);
|
||||
|
||||
// TODO(albertnetymk): Try to cache the page flag for value and object,
|
||||
// instead of calling IsPageFlagSet each time.
|
||||
Node* value_in_new_space =
|
||||
IsPageFlagSet(value, MemoryChunk::kIsInNewSpaceMask);
|
||||
GotoIfNot(value_in_new_space, &incremental_wb);
|
||||
Node* value_is_young =
|
||||
IsPageFlagSet(value, MemoryChunk::kIsInYoungGenerationMask);
|
||||
GotoIfNot(value_is_young, &incremental_wb);
|
||||
|
||||
Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
|
||||
Node* object_in_new_space =
|
||||
IsPageFlagSet(object, MemoryChunk::kIsInNewSpaceMask);
|
||||
Branch(object_in_new_space, &incremental_wb,
|
||||
&store_buffer_incremental_wb);
|
||||
Node* object_is_young =
|
||||
IsPageFlagSet(object, MemoryChunk::kIsInYoungGenerationMask);
|
||||
Branch(object_is_young, &incremental_wb, &store_buffer_incremental_wb);
|
||||
}
|
||||
|
||||
BIND(&store_buffer_exit);
|
||||
|
@ -10712,9 +10712,16 @@ void CodeStubAssembler::TrapAllocationMemento(Node* object,
|
||||
TNode<IntPtrT> page_flags =
|
||||
UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
|
||||
IntPtrConstant(Page::kFlagsOffset)));
|
||||
GotoIf(WordEqual(WordAnd(page_flags,
|
||||
IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
|
||||
IntPtrConstant(0)),
|
||||
GotoIf(WordEqual(
|
||||
WordAnd(page_flags,
|
||||
IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
|
||||
IntPtrConstant(0)),
|
||||
&no_memento_found);
|
||||
// TODO(ulan): Support allocation memento for a large object by allocating
|
||||
// additional word for the memento after the large object.
|
||||
GotoIf(WordNotEqual(WordAnd(page_flags,
|
||||
IntPtrConstant(MemoryChunk::kIsLargePageMask)),
|
||||
IntPtrConstant(0)),
|
||||
&no_memento_found);
|
||||
}
|
||||
|
||||
|
@ -72,10 +72,11 @@ class IA32OperandGenerator final : public OperandGenerator {
|
||||
// really have to this here, then we need to find a way to put this
|
||||
// information on the HeapConstant node already.
|
||||
#if 0
|
||||
// Constants in new space cannot be used as immediates in V8 because
|
||||
// the GC does not scan code objects when collecting the new generation.
|
||||
// Constants in young generation cannot be used as immediates in V8
|
||||
// because the GC does not scan code objects when collecting the young
|
||||
// generation.
|
||||
Handle<HeapObject> value = HeapConstantOf(node->op());
|
||||
return !Heap::InNewSpace(*value);
|
||||
return !Heap::InYoungGeneration(*value);
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
|
@ -1449,7 +1449,7 @@ void JSObjectData::SerializeRecursive(JSHeapBroker* broker, int depth) {
|
||||
elements_object->map() == ReadOnlyRoots(isolate).fixed_cow_array_map();
|
||||
if (empty_or_cow) {
|
||||
// We need to make sure copy-on-write elements are tenured.
|
||||
if (Heap::InNewSpace(*elements_object)) {
|
||||
if (Heap::InYoungGeneration(*elements_object)) {
|
||||
elements_object = isolate->factory()->CopyAndTenureFixedCOWArray(
|
||||
Handle<FixedArray>::cast(elements_object));
|
||||
boilerplate->set_elements(*elements_object);
|
||||
@ -2027,7 +2027,7 @@ void JSObjectRef::EnsureElementsTenured() {
|
||||
AllowHeapAllocation allow_heap_allocation;
|
||||
|
||||
Handle<FixedArrayBase> object_elements = elements().object();
|
||||
if (Heap::InNewSpace(*object_elements)) {
|
||||
if (Heap::InYoungGeneration(*object_elements)) {
|
||||
// If we would like to pretenure a fixed cow array, we must ensure that
|
||||
// the array is already in old space, otherwise we'll create too many
|
||||
// old-to-new-space pointers (overflowing the store buffer).
|
||||
|
@ -2006,7 +2006,8 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
|
||||
// has too few used values, normalize it.
|
||||
const int kMinLengthForSparsenessCheck = 64;
|
||||
if (backing_store->length() < kMinLengthForSparsenessCheck) return;
|
||||
if (Heap::InNewSpace(*backing_store)) return;
|
||||
// TODO(ulan): Check if it works with young large objects.
|
||||
if (Heap::InYoungGeneration(*backing_store)) return;
|
||||
uint32_t length = 0;
|
||||
if (obj->IsJSArray()) {
|
||||
JSArray::cast(*obj)->length()->ToArrayLength(&length);
|
||||
|
@ -979,8 +979,9 @@ MaybeHandle<Map> GetInternalizedStringMap(Factory* f, Handle<String> string) {
|
||||
|
||||
MaybeHandle<Map> Factory::InternalizedStringMapForString(
|
||||
Handle<String> string) {
|
||||
// If the string is in new space it cannot be used as internalized.
|
||||
if (Heap::InNewSpace(*string)) return MaybeHandle<Map>();
|
||||
// If the string is in the young generation, it cannot be used as
|
||||
// internalized.
|
||||
if (Heap::InYoungGeneration(*string)) return MaybeHandle<Map>();
|
||||
|
||||
return GetInternalizedStringMap(this, string);
|
||||
}
|
||||
@ -1778,7 +1779,7 @@ Handle<BytecodeArray> Factory::NewBytecodeArray(
|
||||
isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
|
||||
}
|
||||
// Bytecode array is pretenured, so constant pool array should be too.
|
||||
DCHECK(!Heap::InNewSpace(*constant_pool));
|
||||
DCHECK(!Heap::InYoungGeneration(*constant_pool));
|
||||
|
||||
int size = BytecodeArray::SizeFor(length);
|
||||
HeapObject result =
|
||||
@ -2047,7 +2048,7 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
|
||||
HeapObject raw_clone = isolate()->heap()->AllocateRawWithRetryOrFail(
|
||||
adjusted_object_size, NEW_SPACE);
|
||||
|
||||
SLOW_DCHECK(Heap::InNewSpace(raw_clone));
|
||||
DCHECK(Heap::InYoungGeneration(raw_clone));
|
||||
// Since we know the clone is allocated in new space, we can copy
|
||||
// the contents without worrying about updating the write barrier.
|
||||
Heap::CopyBlock(raw_clone->address(), source->address(), object_size);
|
||||
@ -2235,7 +2236,7 @@ Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
|
||||
|
||||
Handle<FixedArray> Factory::CopyAndTenureFixedCOWArray(
|
||||
Handle<FixedArray> array) {
|
||||
DCHECK(Heap::InNewSpace(*array));
|
||||
DCHECK(Heap::InYoungGeneration(*array));
|
||||
Handle<FixedArray> result =
|
||||
CopyFixedArrayUpTo(array, array->length(), TENURED);
|
||||
|
||||
|
@ -351,45 +351,71 @@ bool Heap::InNewSpace(HeapObject heap_object) {
|
||||
if (result) {
|
||||
// If the object is in NEW_SPACE, then it's not in RO_SPACE so this is safe.
|
||||
Heap* heap = Heap::FromWritableHeapObject(heap_object);
|
||||
DCHECK(heap->gc_state_ != NOT_IN_GC || InToSpace(heap_object));
|
||||
DCHECK_IMPLIES(heap->gc_state_ == NOT_IN_GC, InToPage(heap_object));
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
bool Heap::InYoungGeneration(Object object) {
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
return object->IsHeapObject() && InYoungGeneration(HeapObject::cast(object));
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InYoungGeneration(MaybeObject object) {
|
||||
HeapObject heap_object;
|
||||
return object->GetHeapObject(&heap_object) && InYoungGeneration(heap_object);
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InYoungGeneration(HeapObject heap_object) {
|
||||
bool result = MemoryChunk::FromHeapObject(heap_object)->InYoungGeneration();
|
||||
#ifdef DEBUG
|
||||
// If in the young generation, then check we're either not in the middle of
|
||||
// GC or the object is in to-space.
|
||||
if (result) {
|
||||
// If the object is in the young generation, then it's not in RO_SPACE so
|
||||
// this is safe.
|
||||
Heap* heap = Heap::FromWritableHeapObject(heap_object);
|
||||
DCHECK_IMPLIES(heap->gc_state_ == NOT_IN_GC, InToPage(heap_object));
|
||||
}
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InFromSpace(Object object) {
|
||||
bool Heap::InFromPage(Object object) {
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
return object->IsHeapObject() && InFromSpace(HeapObject::cast(object));
|
||||
return object->IsHeapObject() && InFromPage(HeapObject::cast(object));
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InFromSpace(MaybeObject object) {
|
||||
bool Heap::InFromPage(MaybeObject object) {
|
||||
HeapObject heap_object;
|
||||
return object->GetHeapObject(&heap_object) && InFromSpace(heap_object);
|
||||
return object->GetHeapObject(&heap_object) && InFromPage(heap_object);
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InFromSpace(HeapObject heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)
|
||||
->IsFlagSet(Page::IN_FROM_SPACE);
|
||||
bool Heap::InFromPage(HeapObject heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)->IsFromPage();
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InToSpace(Object object) {
|
||||
bool Heap::InToPage(Object object) {
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
return object->IsHeapObject() && InToSpace(HeapObject::cast(object));
|
||||
return object->IsHeapObject() && InToPage(HeapObject::cast(object));
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InToSpace(MaybeObject object) {
|
||||
bool Heap::InToPage(MaybeObject object) {
|
||||
HeapObject heap_object;
|
||||
return object->GetHeapObject(&heap_object) && InToSpace(heap_object);
|
||||
return object->GetHeapObject(&heap_object) && InToPage(heap_object);
|
||||
}
|
||||
|
||||
// static
|
||||
bool Heap::InToSpace(HeapObject heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)->IsFlagSet(Page::IN_TO_SPACE);
|
||||
bool Heap::InToPage(HeapObject heap_object) {
|
||||
return MemoryChunk::FromHeapObject(heap_object)->IsToPage();
|
||||
}
|
||||
|
||||
bool Heap::InOldSpace(Object object) { return old_space_->Contains(object); }
|
||||
@ -489,11 +515,13 @@ AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject object) {
|
||||
void Heap::UpdateAllocationSite(Map map, HeapObject object,
|
||||
PretenuringFeedbackMap* pretenuring_feedback) {
|
||||
DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_);
|
||||
DCHECK(InFromSpace(object) ||
|
||||
(InToSpace(object) && Page::FromHeapObject(object)->IsFlagSet(
|
||||
Page::PAGE_NEW_NEW_PROMOTION)) ||
|
||||
(!InNewSpace(object) && Page::FromHeapObject(object)->IsFlagSet(
|
||||
Page::PAGE_NEW_OLD_PROMOTION)));
|
||||
#ifdef DEBUG
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
|
||||
DCHECK_IMPLIES(chunk->IsToPage(),
|
||||
chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION));
|
||||
DCHECK_IMPLIES(!chunk->InYoungGeneration(),
|
||||
chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION));
|
||||
#endif
|
||||
if (!FLAG_allocation_site_pretenuring ||
|
||||
!AllocationSite::CanTrack(map->instance_type())) {
|
||||
return;
|
||||
@ -514,9 +542,9 @@ void Heap::ExternalStringTable::AddString(String string) {
|
||||
DCHECK(!Contains(string));
|
||||
|
||||
if (InNewSpace(string)) {
|
||||
new_space_strings_.push_back(string);
|
||||
young_strings_.push_back(string);
|
||||
} else {
|
||||
old_space_strings_.push_back(string);
|
||||
old_strings_.push_back(string);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -29,8 +29,8 @@ struct MemoryChunk {
|
||||
static constexpr uintptr_t kHeapOffset =
|
||||
kFlagsOffset + kUIntptrSize + 4 * kSystemPointerSize;
|
||||
static constexpr uintptr_t kMarkingBit = uintptr_t{1} << 18;
|
||||
static constexpr uintptr_t kFromSpaceBit = uintptr_t{1} << 3;
|
||||
static constexpr uintptr_t kToSpaceBit = uintptr_t{1} << 4;
|
||||
static constexpr uintptr_t kFromPageBit = uintptr_t{1} << 3;
|
||||
static constexpr uintptr_t kToPageBit = uintptr_t{1} << 4;
|
||||
|
||||
V8_INLINE static heap_internals::MemoryChunk* FromHeapObject(
|
||||
HeapObject object) {
|
||||
@ -39,9 +39,9 @@ struct MemoryChunk {
|
||||
|
||||
V8_INLINE bool IsMarking() const { return GetFlags() & kMarkingBit; }
|
||||
|
||||
V8_INLINE bool InNewSpace() const {
|
||||
constexpr uintptr_t kNewSpaceMask = kFromSpaceBit | kToSpaceBit;
|
||||
return GetFlags() & kNewSpaceMask;
|
||||
V8_INLINE bool InYoungGeneration() const {
|
||||
constexpr uintptr_t kYoungGenerationMask = kFromPageBit | kToPageBit;
|
||||
return GetFlags() & kYoungGenerationMask;
|
||||
}
|
||||
|
||||
V8_INLINE uintptr_t GetFlags() const {
|
||||
@ -65,7 +65,8 @@ inline void GenerationalBarrierInternal(HeapObject object, Address slot,
|
||||
heap_internals::MemoryChunk* object_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(object);
|
||||
|
||||
if (!value_chunk->InNewSpace() || object_chunk->InNewSpace()) return;
|
||||
if (!value_chunk->InYoungGeneration() || object_chunk->InYoungGeneration())
|
||||
return;
|
||||
|
||||
Heap::GenerationalBarrierSlow(object, slot, value);
|
||||
}
|
||||
@ -116,7 +117,7 @@ inline void GenerationalBarrierForElements(Heap* heap, FixedArray array,
|
||||
int offset, int length) {
|
||||
heap_internals::MemoryChunk* array_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(array);
|
||||
if (array_chunk->InNewSpace()) return;
|
||||
if (array_chunk->InYoungGeneration()) return;
|
||||
|
||||
Heap::GenerationalBarrierForElementsSlow(heap, array, offset, length);
|
||||
}
|
||||
@ -125,7 +126,7 @@ inline void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
|
||||
HeapObject object) {
|
||||
heap_internals::MemoryChunk* object_chunk =
|
||||
heap_internals::MemoryChunk::FromHeapObject(object);
|
||||
if (!object_chunk->InNewSpace()) return;
|
||||
if (!object_chunk->InYoungGeneration()) return;
|
||||
Heap::GenerationalBarrierForCodeSlow(host, rinfo, object);
|
||||
}
|
||||
|
||||
|
177
src/heap/heap.cc
177
src/heap/heap.cc
@ -1969,7 +1969,7 @@ void Heap::EvacuateYoungGeneration() {
|
||||
new_space()->set_age_mark(new_space()->top());
|
||||
|
||||
// Fix up special trackers.
|
||||
external_string_table_.PromoteAllNewSpaceStrings();
|
||||
external_string_table_.PromoteYoung();
|
||||
// GlobalHandles are updated in PostGarbageCollectonProcessing
|
||||
|
||||
IncrementYoungSurvivorsCounter(new_space()->Size());
|
||||
@ -2061,11 +2061,11 @@ void Heap::ProtectUnprotectedMemoryChunks() {
|
||||
}
|
||||
|
||||
bool Heap::ExternalStringTable::Contains(String string) {
|
||||
for (size_t i = 0; i < new_space_strings_.size(); ++i) {
|
||||
if (new_space_strings_[i] == string) return true;
|
||||
for (size_t i = 0; i < young_strings_.size(); ++i) {
|
||||
if (young_strings_[i] == string) return true;
|
||||
}
|
||||
for (size_t i = 0; i < old_space_strings_.size(); ++i) {
|
||||
if (old_space_strings_[i] == string) return true;
|
||||
for (size_t i = 0; i < old_strings_.size(); ++i) {
|
||||
if (old_strings_[i] == string) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@ -2103,16 +2103,16 @@ String Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(
|
||||
return new_string->IsExternalString() ? new_string : String();
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::VerifyNewSpace() {
|
||||
void Heap::ExternalStringTable::VerifyYoung() {
|
||||
#ifdef DEBUG
|
||||
std::set<String> visited_map;
|
||||
std::map<MemoryChunk*, size_t> size_map;
|
||||
ExternalBackingStoreType type = ExternalBackingStoreType::kExternalString;
|
||||
for (size_t i = 0; i < new_space_strings_.size(); ++i) {
|
||||
String obj = String::cast(new_space_strings_[i]);
|
||||
for (size_t i = 0; i < young_strings_.size(); ++i) {
|
||||
String obj = String::cast(young_strings_[i]);
|
||||
MemoryChunk* mc = MemoryChunk::FromHeapObject(obj);
|
||||
DCHECK(mc->InNewSpace());
|
||||
DCHECK(heap_->InNewSpace(obj));
|
||||
DCHECK(mc->InYoungGeneration());
|
||||
DCHECK(heap_->InYoungGeneration(obj));
|
||||
DCHECK(!obj->IsTheHole(heap_->isolate()));
|
||||
DCHECK(obj->IsExternalString());
|
||||
// Note: we can have repeated elements in the table.
|
||||
@ -2131,12 +2131,12 @@ void Heap::ExternalStringTable::Verify() {
|
||||
std::set<String> visited_map;
|
||||
std::map<MemoryChunk*, size_t> size_map;
|
||||
ExternalBackingStoreType type = ExternalBackingStoreType::kExternalString;
|
||||
VerifyNewSpace();
|
||||
for (size_t i = 0; i < old_space_strings_.size(); ++i) {
|
||||
String obj = String::cast(old_space_strings_[i]);
|
||||
VerifyYoung();
|
||||
for (size_t i = 0; i < old_strings_.size(); ++i) {
|
||||
String obj = String::cast(old_strings_[i]);
|
||||
MemoryChunk* mc = MemoryChunk::FromHeapObject(obj);
|
||||
DCHECK(!mc->InNewSpace());
|
||||
DCHECK(!heap_->InNewSpace(obj));
|
||||
DCHECK(!mc->InYoungGeneration());
|
||||
DCHECK(!heap_->InYoungGeneration(obj));
|
||||
DCHECK(!obj->IsTheHole(heap_->isolate()));
|
||||
DCHECK(obj->IsExternalString());
|
||||
// Note: we can have repeated elements in the table.
|
||||
@ -2150,12 +2150,12 @@ void Heap::ExternalStringTable::Verify() {
|
||||
#endif
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::UpdateNewSpaceReferences(
|
||||
void Heap::ExternalStringTable::UpdateYoungReferences(
|
||||
Heap::ExternalStringTableUpdaterCallback updater_func) {
|
||||
if (new_space_strings_.empty()) return;
|
||||
if (young_strings_.empty()) return;
|
||||
|
||||
FullObjectSlot start(&new_space_strings_[0]);
|
||||
FullObjectSlot end(&new_space_strings_[new_space_strings_.size()]);
|
||||
FullObjectSlot start(&young_strings_[0]);
|
||||
FullObjectSlot end(&young_strings_[young_strings_.size()]);
|
||||
FullObjectSlot last = start;
|
||||
|
||||
for (FullObjectSlot p = start; p < end; ++p) {
|
||||
@ -2165,67 +2165,66 @@ void Heap::ExternalStringTable::UpdateNewSpaceReferences(
|
||||
|
||||
DCHECK(target->IsExternalString());
|
||||
|
||||
if (InNewSpace(target)) {
|
||||
if (InYoungGeneration(target)) {
|
||||
// String is still in new space. Update the table entry.
|
||||
last.store(target);
|
||||
++last;
|
||||
} else {
|
||||
// String got promoted. Move it to the old string list.
|
||||
old_space_strings_.push_back(target);
|
||||
old_strings_.push_back(target);
|
||||
}
|
||||
}
|
||||
|
||||
DCHECK(last <= end);
|
||||
new_space_strings_.resize(last - start);
|
||||
young_strings_.resize(last - start);
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) {
|
||||
VerifyNewSpace();
|
||||
VerifyYoung();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::PromoteAllNewSpaceStrings() {
|
||||
old_space_strings_.reserve(old_space_strings_.size() +
|
||||
new_space_strings_.size());
|
||||
std::move(std::begin(new_space_strings_), std::end(new_space_strings_),
|
||||
std::back_inserter(old_space_strings_));
|
||||
new_space_strings_.clear();
|
||||
void Heap::ExternalStringTable::PromoteYoung() {
|
||||
old_strings_.reserve(old_strings_.size() + young_strings_.size());
|
||||
std::move(std::begin(young_strings_), std::end(young_strings_),
|
||||
std::back_inserter(old_strings_));
|
||||
young_strings_.clear();
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::IterateNewSpaceStrings(RootVisitor* v) {
|
||||
if (!new_space_strings_.empty()) {
|
||||
void Heap::ExternalStringTable::IterateYoung(RootVisitor* v) {
|
||||
if (!young_strings_.empty()) {
|
||||
v->VisitRootPointers(
|
||||
Root::kExternalStringsTable, nullptr,
|
||||
FullObjectSlot(&new_space_strings_[0]),
|
||||
FullObjectSlot(&new_space_strings_[new_space_strings_.size()]));
|
||||
FullObjectSlot(&young_strings_[0]),
|
||||
FullObjectSlot(&young_strings_[young_strings_.size()]));
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::IterateAll(RootVisitor* v) {
|
||||
IterateNewSpaceStrings(v);
|
||||
if (!old_space_strings_.empty()) {
|
||||
IterateYoung(v);
|
||||
if (!old_strings_.empty()) {
|
||||
v->VisitRootPointers(
|
||||
Root::kExternalStringsTable, nullptr,
|
||||
FullObjectSlot(old_space_strings_.data()),
|
||||
FullObjectSlot(old_space_strings_.data() + old_space_strings_.size()));
|
||||
FullObjectSlot(old_strings_.data()),
|
||||
FullObjectSlot(old_strings_.data() + old_strings_.size()));
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::UpdateNewSpaceReferencesInExternalStringTable(
|
||||
void Heap::UpdateYoungReferencesInExternalStringTable(
|
||||
ExternalStringTableUpdaterCallback updater_func) {
|
||||
external_string_table_.UpdateNewSpaceReferences(updater_func);
|
||||
external_string_table_.UpdateYoungReferences(updater_func);
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::UpdateReferences(
|
||||
Heap::ExternalStringTableUpdaterCallback updater_func) {
|
||||
if (old_space_strings_.size() > 0) {
|
||||
FullObjectSlot start(old_space_strings_.data());
|
||||
FullObjectSlot end(old_space_strings_.data() + old_space_strings_.size());
|
||||
if (old_strings_.size() > 0) {
|
||||
FullObjectSlot start(old_strings_.data());
|
||||
FullObjectSlot end(old_strings_.data() + old_strings_.size());
|
||||
for (FullObjectSlot p = start; p < end; ++p)
|
||||
p.store(updater_func(heap_, p));
|
||||
}
|
||||
|
||||
UpdateNewSpaceReferences(updater_func);
|
||||
UpdateYoungReferences(updater_func);
|
||||
}
|
||||
|
||||
void Heap::UpdateReferencesInExternalStringTable(
|
||||
@ -2492,20 +2491,7 @@ bool Heap::IsImmovable(HeapObject object) {
|
||||
}
|
||||
|
||||
bool Heap::IsLargeObject(HeapObject object) {
|
||||
return IsLargeMemoryChunk(MemoryChunk::FromHeapObject(object));
|
||||
}
|
||||
|
||||
bool Heap::IsLargeMemoryChunk(MemoryChunk* chunk) {
|
||||
return chunk->owner()->identity() == NEW_LO_SPACE ||
|
||||
chunk->owner()->identity() == LO_SPACE ||
|
||||
chunk->owner()->identity() == CODE_LO_SPACE;
|
||||
}
|
||||
|
||||
bool Heap::IsInYoungGeneration(HeapObject object) {
|
||||
if (MemoryChunk::FromHeapObject(object)->IsInNewLargeObjectSpace()) {
|
||||
return !object->map_word().IsForwardingAddress();
|
||||
}
|
||||
return Heap::InNewSpace(object);
|
||||
return MemoryChunk::FromHeapObject(object)->IsLargePage();
|
||||
}
|
||||
|
||||
#ifdef ENABLE_SLOW_DCHECKS
|
||||
@ -2534,7 +2520,7 @@ class LeftTrimmerVerifierRootVisitor : public RootVisitor {
|
||||
namespace {
|
||||
bool MayContainRecordedSlots(HeapObject object) {
|
||||
// New space object do not have recorded slots.
|
||||
if (MemoryChunk::FromHeapObject(object)->InNewSpace()) return false;
|
||||
if (MemoryChunk::FromHeapObject(object)->InYoungGeneration()) return false;
|
||||
// Whitelist objects that definitely do not have pointers.
|
||||
if (object->IsByteArray() || object->IsFixedDoubleArray()) return false;
|
||||
// Conservatively return true for other objects.
|
||||
@ -3645,10 +3631,10 @@ class OldToNewSlotVerifyingVisitor : public SlotVerifyingVisitor {
|
||||
: SlotVerifyingVisitor(untyped, typed) {}
|
||||
|
||||
bool ShouldHaveBeenRecorded(HeapObject host, MaybeObject target) override {
|
||||
DCHECK_IMPLIES(target->IsStrongOrWeak() && Heap::InNewSpace(target),
|
||||
Heap::InToSpace(target));
|
||||
return target->IsStrongOrWeak() && Heap::InNewSpace(target) &&
|
||||
!Heap::InNewSpace(host);
|
||||
DCHECK_IMPLIES(target->IsStrongOrWeak() && Heap::InYoungGeneration(target),
|
||||
Heap::InToPage(target));
|
||||
return target->IsStrongOrWeak() && Heap::InYoungGeneration(target) &&
|
||||
!Heap::InYoungGeneration(host);
|
||||
}
|
||||
};
|
||||
|
||||
@ -3684,7 +3670,7 @@ void Heap::VerifyRememberedSetFor(HeapObject object) {
|
||||
Address end = start + object->Size();
|
||||
std::set<Address> old_to_new;
|
||||
std::set<std::pair<SlotType, Address> > typed_old_to_new;
|
||||
if (!InNewSpace(object)) {
|
||||
if (!InYoungGeneration(object)) {
|
||||
store_buffer()->MoveAllEntriesToRememberedSet();
|
||||
CollectSlots<OLD_TO_NEW>(chunk, start, end, &old_to_new, &typed_old_to_new);
|
||||
OldToNewSlotVerifyingVisitor visitor(&old_to_new, &typed_old_to_new);
|
||||
@ -4998,8 +4984,9 @@ Address Heap::store_buffer_overflow_function_address() {
|
||||
}
|
||||
|
||||
void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) {
|
||||
DCHECK(!IsLargeObject(object));
|
||||
Page* page = Page::FromAddress(slot.address());
|
||||
if (!page->InNewSpace()) {
|
||||
if (!page->InYoungGeneration()) {
|
||||
DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
|
||||
store_buffer()->DeleteEntry(slot.address());
|
||||
}
|
||||
@ -5007,7 +4994,8 @@ void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) {
|
||||
|
||||
#ifdef DEBUG
|
||||
void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
|
||||
if (InNewSpace(object)) return;
|
||||
DCHECK(!IsLargeObject(object));
|
||||
if (InYoungGeneration(object)) return;
|
||||
Page* page = Page::FromAddress(slot.address());
|
||||
DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
|
||||
store_buffer()->MoveAllEntriesToRememberedSet();
|
||||
@ -5020,7 +5008,8 @@ void Heap::VerifyClearedSlot(HeapObject object, ObjectSlot slot) {
|
||||
|
||||
void Heap::ClearRecordedSlotRange(Address start, Address end) {
|
||||
Page* page = Page::FromAddress(start);
|
||||
if (!page->InNewSpace()) {
|
||||
DCHECK(!page->IsLargePage());
|
||||
if (!page->InYoungGeneration()) {
|
||||
DCHECK_EQ(page->owner()->identity(), OLD_SPACE);
|
||||
store_buffer()->DeleteEntry(start, end);
|
||||
}
|
||||
@ -5247,11 +5236,11 @@ void Heap::UpdateTotalGCTime(double duration) {
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::CleanUpNewSpaceStrings() {
|
||||
void Heap::ExternalStringTable::CleanUpYoung() {
|
||||
int last = 0;
|
||||
Isolate* isolate = heap_->isolate();
|
||||
for (size_t i = 0; i < new_space_strings_.size(); ++i) {
|
||||
Object o = new_space_strings_[i];
|
||||
for (size_t i = 0; i < young_strings_.size(); ++i) {
|
||||
Object o = young_strings_[i];
|
||||
if (o->IsTheHole(isolate)) {
|
||||
continue;
|
||||
}
|
||||
@ -5259,21 +5248,21 @@ void Heap::ExternalStringTable::CleanUpNewSpaceStrings() {
|
||||
// will be processed. Re-processing it will add a duplicate to the vector.
|
||||
if (o->IsThinString()) continue;
|
||||
DCHECK(o->IsExternalString());
|
||||
if (InNewSpace(o)) {
|
||||
new_space_strings_[last++] = o;
|
||||
if (InYoungGeneration(o)) {
|
||||
young_strings_[last++] = o;
|
||||
} else {
|
||||
old_space_strings_.push_back(o);
|
||||
old_strings_.push_back(o);
|
||||
}
|
||||
}
|
||||
new_space_strings_.resize(last);
|
||||
young_strings_.resize(last);
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::CleanUpAll() {
|
||||
CleanUpNewSpaceStrings();
|
||||
CleanUpYoung();
|
||||
int last = 0;
|
||||
Isolate* isolate = heap_->isolate();
|
||||
for (size_t i = 0; i < old_space_strings_.size(); ++i) {
|
||||
Object o = old_space_strings_[i];
|
||||
for (size_t i = 0; i < old_strings_.size(); ++i) {
|
||||
Object o = old_strings_[i];
|
||||
if (o->IsTheHole(isolate)) {
|
||||
continue;
|
||||
}
|
||||
@ -5281,10 +5270,10 @@ void Heap::ExternalStringTable::CleanUpAll() {
|
||||
// will be processed. Re-processing it will add a duplicate to the vector.
|
||||
if (o->IsThinString()) continue;
|
||||
DCHECK(o->IsExternalString());
|
||||
DCHECK(!InNewSpace(o));
|
||||
old_space_strings_[last++] = o;
|
||||
DCHECK(!InYoungGeneration(o));
|
||||
old_strings_[last++] = o;
|
||||
}
|
||||
old_space_strings_.resize(last);
|
||||
old_strings_.resize(last);
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) {
|
||||
Verify();
|
||||
@ -5293,20 +5282,20 @@ void Heap::ExternalStringTable::CleanUpAll() {
|
||||
}
|
||||
|
||||
void Heap::ExternalStringTable::TearDown() {
|
||||
for (size_t i = 0; i < new_space_strings_.size(); ++i) {
|
||||
Object o = new_space_strings_[i];
|
||||
for (size_t i = 0; i < young_strings_.size(); ++i) {
|
||||
Object o = young_strings_[i];
|
||||
// Dont finalize thin strings.
|
||||
if (o->IsThinString()) continue;
|
||||
heap_->FinalizeExternalString(ExternalString::cast(o));
|
||||
}
|
||||
new_space_strings_.clear();
|
||||
for (size_t i = 0; i < old_space_strings_.size(); ++i) {
|
||||
Object o = old_space_strings_[i];
|
||||
young_strings_.clear();
|
||||
for (size_t i = 0; i < old_strings_.size(); ++i) {
|
||||
Object o = old_strings_[i];
|
||||
// Dont finalize thin strings.
|
||||
if (o->IsThinString()) continue;
|
||||
heap_->FinalizeExternalString(ExternalString::cast(o));
|
||||
}
|
||||
old_space_strings_.clear();
|
||||
old_strings_.clear();
|
||||
}
|
||||
|
||||
|
||||
@ -5681,7 +5670,7 @@ void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
|
||||
void Heap::GenerationalBarrierForElementsSlow(Heap* heap, FixedArray array,
|
||||
int offset, int length) {
|
||||
for (int i = 0; i < length; i++) {
|
||||
if (!InNewSpace(array->get(offset + i))) continue;
|
||||
if (!InYoungGeneration(array->get(offset + i))) continue;
|
||||
heap->store_buffer()->InsertEntry(
|
||||
array->RawFieldOfElementAt(offset + i).address());
|
||||
}
|
||||
@ -5689,7 +5678,7 @@ void Heap::GenerationalBarrierForElementsSlow(Heap* heap, FixedArray array,
|
||||
|
||||
void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
|
||||
HeapObject object) {
|
||||
DCHECK(InNewSpace(object));
|
||||
DCHECK(InYoungGeneration(object));
|
||||
Page* source_page = Page::FromHeapObject(host);
|
||||
RelocInfo::Mode rmode = rinfo->rmode();
|
||||
Address addr = rinfo->pc();
|
||||
@ -5756,7 +5745,7 @@ bool Heap::PageFlagsAreConsistent(HeapObject object) {
|
||||
|
||||
const bool generation_consistency =
|
||||
chunk->owner()->identity() != NEW_SPACE ||
|
||||
(chunk->InNewSpace() && slim_chunk->InNewSpace());
|
||||
(chunk->InYoungGeneration() && slim_chunk->InYoungGeneration());
|
||||
const bool marking_consistency =
|
||||
!heap->incremental_marking()->IsMarking() ||
|
||||
(chunk->IsFlagSet(MemoryChunk::INCREMENTAL_MARKING) &&
|
||||
@ -5768,12 +5757,12 @@ bool Heap::PageFlagsAreConsistent(HeapObject object) {
|
||||
static_assert(MemoryChunk::Flag::INCREMENTAL_MARKING ==
|
||||
heap_internals::MemoryChunk::kMarkingBit,
|
||||
"Incremental marking flag inconsistent");
|
||||
static_assert(MemoryChunk::Flag::IN_FROM_SPACE ==
|
||||
heap_internals::MemoryChunk::kFromSpaceBit,
|
||||
"From space flag inconsistent");
|
||||
static_assert(MemoryChunk::Flag::IN_TO_SPACE ==
|
||||
heap_internals::MemoryChunk::kToSpaceBit,
|
||||
"To space flag inconsistent");
|
||||
static_assert(MemoryChunk::Flag::FROM_PAGE ==
|
||||
heap_internals::MemoryChunk::kFromPageBit,
|
||||
"From page flag inconsistent");
|
||||
static_assert(MemoryChunk::Flag::TO_PAGE ==
|
||||
heap_internals::MemoryChunk::kToPageBit,
|
||||
"To page flag inconsistent");
|
||||
static_assert(MemoryChunk::kFlagsOffset ==
|
||||
heap_internals::MemoryChunk::kFlagsOffset,
|
||||
"Flag offset inconsistent");
|
||||
|
@ -379,10 +379,7 @@ class Heap {
|
||||
|
||||
bool IsImmovable(HeapObject object);
|
||||
|
||||
bool IsLargeObject(HeapObject object);
|
||||
bool IsLargeMemoryChunk(MemoryChunk* chunk);
|
||||
|
||||
bool IsInYoungGeneration(HeapObject object);
|
||||
static bool IsLargeObject(HeapObject object);
|
||||
|
||||
// Trim the given array from the left. Note that this relocates the object
|
||||
// start and hence is only valid if there is only a single reference to it.
|
||||
@ -912,15 +909,20 @@ class Heap {
|
||||
// ===========================================================================
|
||||
|
||||
// Returns whether the object resides in new space.
|
||||
static inline bool InYoungGeneration(Object object);
|
||||
static inline bool InYoungGeneration(MaybeObject object);
|
||||
static inline bool InYoungGeneration(HeapObject heap_object);
|
||||
// TODO(ulan): Remove once all call sites are changed to use
|
||||
// InYoungGeneration.
|
||||
static inline bool InNewSpace(Object object);
|
||||
static inline bool InNewSpace(MaybeObject object);
|
||||
static inline bool InNewSpace(HeapObject heap_object);
|
||||
static inline bool InFromSpace(Object object);
|
||||
static inline bool InFromSpace(MaybeObject object);
|
||||
static inline bool InFromSpace(HeapObject heap_object);
|
||||
static inline bool InToSpace(Object object);
|
||||
static inline bool InToSpace(MaybeObject object);
|
||||
static inline bool InToSpace(HeapObject heap_object);
|
||||
static inline bool InFromPage(Object object);
|
||||
static inline bool InFromPage(MaybeObject object);
|
||||
static inline bool InFromPage(HeapObject heap_object);
|
||||
static inline bool InToPage(Object object);
|
||||
static inline bool InToPage(MaybeObject object);
|
||||
static inline bool InToPage(HeapObject heap_object);
|
||||
|
||||
// Returns whether the object resides in old space.
|
||||
inline bool InOldSpace(Object object);
|
||||
@ -1295,32 +1297,32 @@ class Heap {
|
||||
bool Contains(String string);
|
||||
|
||||
void IterateAll(RootVisitor* v);
|
||||
void IterateNewSpaceStrings(RootVisitor* v);
|
||||
void PromoteAllNewSpaceStrings();
|
||||
void IterateYoung(RootVisitor* v);
|
||||
void PromoteYoung();
|
||||
|
||||
// Restores internal invariant and gets rid of collected strings. Must be
|
||||
// called after each Iterate*() that modified the strings.
|
||||
void CleanUpAll();
|
||||
void CleanUpNewSpaceStrings();
|
||||
void CleanUpYoung();
|
||||
|
||||
// Finalize all registered external strings and clear tables.
|
||||
void TearDown();
|
||||
|
||||
void UpdateNewSpaceReferences(
|
||||
void UpdateYoungReferences(
|
||||
Heap::ExternalStringTableUpdaterCallback updater_func);
|
||||
void UpdateReferences(
|
||||
Heap::ExternalStringTableUpdaterCallback updater_func);
|
||||
|
||||
private:
|
||||
void Verify();
|
||||
void VerifyNewSpace();
|
||||
void VerifyYoung();
|
||||
|
||||
Heap* const heap_;
|
||||
|
||||
// To speed up scavenge collections new space string are kept
|
||||
// separate from old space strings.
|
||||
std::vector<Object> new_space_strings_;
|
||||
std::vector<Object> old_space_strings_;
|
||||
// To speed up scavenge collections young string are kept separate from old
|
||||
// strings.
|
||||
std::vector<Object> young_strings_;
|
||||
std::vector<Object> old_strings_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
|
||||
};
|
||||
@ -1557,7 +1559,7 @@ class Heap {
|
||||
void Scavenge();
|
||||
void EvacuateYoungGeneration();
|
||||
|
||||
void UpdateNewSpaceReferencesInExternalStringTable(
|
||||
void UpdateYoungReferencesInExternalStringTable(
|
||||
ExternalStringTableUpdaterCallback updater_func);
|
||||
|
||||
void UpdateReferencesInExternalStringTable(
|
||||
|
@ -411,9 +411,7 @@ void IncrementalMarking::FinishBlackAllocation() {
|
||||
void IncrementalMarking::EnsureBlackAllocated(Address allocated, size_t size) {
|
||||
if (black_allocation() && allocated != kNullAddress) {
|
||||
HeapObject object = HeapObject::FromAddress(allocated);
|
||||
if (marking_state()->IsWhite(object) &&
|
||||
!(Heap::InNewSpace(object) ||
|
||||
heap_->new_lo_space()->Contains(object))) {
|
||||
if (marking_state()->IsWhite(object) && !Heap::InYoungGeneration(object)) {
|
||||
if (heap_->IsLargeObject(object)) {
|
||||
marking_state()->WhiteToBlack(object);
|
||||
} else {
|
||||
@ -542,7 +540,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
|
||||
HeapObject obj, HeapObject* out) -> bool {
|
||||
DCHECK(obj->IsHeapObject());
|
||||
// Only pointers to from space have to be updated.
|
||||
if (Heap::InFromSpace(obj)) {
|
||||
if (Heap::InFromPage(obj)) {
|
||||
MapWord map_word = obj->map_word();
|
||||
if (!map_word.IsForwardingAddress()) {
|
||||
// There may be objects on the marking deque that do not exist anymore,
|
||||
@ -556,27 +554,30 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
|
||||
DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
|
||||
*out = dest;
|
||||
return true;
|
||||
} else if (Heap::InToSpace(obj)) {
|
||||
// The object may be on a page that was moved in new space.
|
||||
DCHECK(Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE));
|
||||
} else if (Heap::InToPage(obj)) {
|
||||
// The object may be on a large page or on a page that was moved in new
|
||||
// space.
|
||||
DCHECK(Heap::IsLargeObject(obj) ||
|
||||
Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE));
|
||||
#ifdef ENABLE_MINOR_MC
|
||||
if (minor_marking_state->IsGrey(obj)) {
|
||||
*out = obj;
|
||||
return true;
|
||||
if (minor_marking_state->IsWhite(obj)) {
|
||||
return false;
|
||||
}
|
||||
#endif // ENABLE_MINOR_MC
|
||||
return false;
|
||||
// Either a large object or an object marked by the minor mark-compactor.
|
||||
*out = obj;
|
||||
return true;
|
||||
} else {
|
||||
// The object may be on a page that was moved from new to old space. Only
|
||||
// applicable during minor MC garbage collections.
|
||||
if (Page::FromHeapObject(obj)->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
|
||||
#ifdef ENABLE_MINOR_MC
|
||||
if (minor_marking_state->IsGrey(obj)) {
|
||||
*out = obj;
|
||||
return true;
|
||||
if (minor_marking_state->IsWhite(obj)) {
|
||||
return false;
|
||||
}
|
||||
#endif // ENABLE_MINOR_MC
|
||||
return false;
|
||||
*out = obj;
|
||||
return true;
|
||||
}
|
||||
DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj->IsFiller());
|
||||
// Skip one word filler objects that appear on the
|
||||
@ -599,9 +600,10 @@ T ForwardingAddress(T heap_obj) {
|
||||
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
return T::cast(map_word.ToForwardingAddress());
|
||||
} else if (Heap::InNewSpace(heap_obj)) {
|
||||
} else if (Heap::InFromPage(heap_obj)) {
|
||||
return T();
|
||||
} else {
|
||||
// TODO(ulan): Support minor mark-compactor here.
|
||||
return heap_obj;
|
||||
}
|
||||
}
|
||||
@ -671,7 +673,7 @@ void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
|
||||
#ifdef DEBUG
|
||||
weak_objects_->bytecode_flushing_candidates.Iterate(
|
||||
[](SharedFunctionInfo candidate) {
|
||||
DCHECK(!Heap::InNewSpace(candidate));
|
||||
DCHECK(!Heap::InYoungGeneration(candidate));
|
||||
});
|
||||
#endif
|
||||
}
|
||||
|
@ -459,7 +459,8 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) {
|
||||
#ifdef ENABLE_MINOR_MC
|
||||
|
||||
void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) {
|
||||
if (Heap::InNewSpace(obj) && non_atomic_marking_state_.WhiteToGrey(obj)) {
|
||||
if (Heap::InYoungGeneration(obj) &&
|
||||
non_atomic_marking_state_.WhiteToGrey(obj)) {
|
||||
worklist_->Push(kMainThread, obj);
|
||||
}
|
||||
}
|
||||
|
@ -322,7 +322,8 @@ class FullEvacuationVerifier : public EvacuationVerifier {
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK_IMPLIES(Heap::InNewSpace(heap_object), Heap::InToSpace(heap_object));
|
||||
CHECK_IMPLIES(Heap::InYoungGeneration(heap_object),
|
||||
Heap::InToPage(heap_object));
|
||||
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(heap_object));
|
||||
}
|
||||
|
||||
@ -983,7 +984,7 @@ class InternalizedStringTableCleaner : public ObjectVisitor {
|
||||
p.store(the_hole);
|
||||
} else {
|
||||
// StringTable contains only old space strings.
|
||||
DCHECK(!Heap::InNewSpace(o));
|
||||
DCHECK(!Heap::InYoungGeneration(o));
|
||||
MarkCompactCollector::RecordSlot(table_, p, heap_object);
|
||||
}
|
||||
}
|
||||
@ -1117,7 +1118,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||
Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
// The target is always in old space, we don't have to record the slot in
|
||||
// the old-to-new remembered set.
|
||||
DCHECK(!Heap::InNewSpace(target));
|
||||
DCHECK(!Heap::InYoungGeneration(target));
|
||||
collector_->RecordRelocSlot(host, rinfo, target);
|
||||
}
|
||||
|
||||
@ -1140,9 +1141,10 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||
Address slot) {
|
||||
if (value->IsStrongOrWeak()) {
|
||||
Page* p = Page::FromAddress(value.ptr());
|
||||
if (p->InNewSpace()) {
|
||||
DCHECK_IMPLIES(p->InToSpace(),
|
||||
p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION));
|
||||
if (p->InYoungGeneration()) {
|
||||
DCHECK_IMPLIES(
|
||||
p->IsToPage(),
|
||||
p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) || p->IsLargePage());
|
||||
RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(
|
||||
MemoryChunk::FromHeapObject(host), slot);
|
||||
} else if (p->IsEvacuationCandidate()) {
|
||||
@ -1402,7 +1404,7 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor {
|
||||
case NEW_TO_OLD: {
|
||||
page->heap()->new_space()->from_space().RemovePage(page);
|
||||
Page* new_page = Page::ConvertNewToOld(page);
|
||||
DCHECK(!new_page->InNewSpace());
|
||||
DCHECK(!new_page->InYoungGeneration());
|
||||
new_page->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
|
||||
break;
|
||||
}
|
||||
@ -2448,10 +2450,10 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot,
|
||||
"Only [Full]ObjectSlot and [Full]MaybeObjectSlot are expected here");
|
||||
MapWord map_word = heap_obj->map_word();
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
DCHECK(Heap::InFromSpace(heap_obj) ||
|
||||
MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) ||
|
||||
Page::FromHeapObject(heap_obj)->IsFlagSet(
|
||||
Page::COMPACTION_WAS_ABORTED));
|
||||
DCHECK_IMPLIES(!Heap::InFromPage(heap_obj),
|
||||
MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) ||
|
||||
Page::FromHeapObject(heap_obj)->IsFlagSet(
|
||||
Page::COMPACTION_WAS_ABORTED));
|
||||
typename TSlot::TObject target =
|
||||
MakeSlotValue<TSlot, reference_type>(map_word.ToForwardingAddress());
|
||||
if (access_mode == AccessMode::NON_ATOMIC) {
|
||||
@ -2459,7 +2461,7 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot,
|
||||
} else {
|
||||
slot.Release_CompareAndSwap(old, target);
|
||||
}
|
||||
DCHECK(!Heap::InFromSpace(target));
|
||||
DCHECK(!Heap::InFromPage(target));
|
||||
DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target));
|
||||
} else {
|
||||
DCHECK(heap_obj->map()->IsMap());
|
||||
@ -2640,7 +2642,7 @@ class Evacuator : public Malloced {
|
||||
return kPageNewToOld;
|
||||
if (chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION))
|
||||
return kPageNewToNew;
|
||||
if (chunk->InNewSpace()) return kObjectsNewToOld;
|
||||
if (chunk->InYoungGeneration()) return kObjectsNewToOld;
|
||||
return kObjectsOldToOld;
|
||||
}
|
||||
|
||||
@ -3252,7 +3254,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
if (!(*slot).GetHeapObject(&heap_object)) {
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
if (Heap::InFromSpace(heap_object)) {
|
||||
if (Heap::InFromPage(heap_object)) {
|
||||
MapWord map_word = heap_object->map_word();
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
HeapObjectReference::Update(THeapObjectSlot(slot),
|
||||
@ -3265,10 +3267,10 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
// callback in to space, the object is still live.
|
||||
// Unfortunately, we do not know about the slot. It could be in a
|
||||
// just freed free space object.
|
||||
if (Heap::InToSpace(heap_object)) {
|
||||
if (Heap::InToPage(heap_object)) {
|
||||
return KEEP_SLOT;
|
||||
}
|
||||
} else if (Heap::InToSpace(heap_object)) {
|
||||
} else if (Heap::InToPage(heap_object)) {
|
||||
// Slots can point to "to" space if the page has been moved, or if the
|
||||
// slot has been recorded multiple times in the remembered set, or
|
||||
// if the slot was already updated during old->old updating.
|
||||
@ -3287,7 +3289,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
}
|
||||
return KEEP_SLOT;
|
||||
} else {
|
||||
DCHECK(!Heap::InNewSpace(heap_object));
|
||||
DCHECK(!Heap::InYoungGeneration(heap_object));
|
||||
}
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
@ -3801,7 +3803,7 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
|
||||
|
||||
private:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK_IMPLIES(Heap::InNewSpace(heap_object), IsMarked(heap_object));
|
||||
CHECK_IMPLIES(Heap::InYoungGeneration(heap_object), IsMarked(heap_object));
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
@ -3834,7 +3836,8 @@ class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
|
||||
|
||||
protected:
|
||||
V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) {
|
||||
CHECK_IMPLIES(Heap::InNewSpace(heap_object), Heap::InToSpace(heap_object));
|
||||
CHECK_IMPLIES(Heap::InYoungGeneration(heap_object),
|
||||
Heap::InToPage(heap_object));
|
||||
}
|
||||
|
||||
template <typename TSlot>
|
||||
@ -3869,10 +3872,10 @@ class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
bool IsUnmarkedObjectForYoungGeneration(Heap* heap, FullObjectSlot p) {
|
||||
DCHECK_IMPLIES(Heap::InNewSpace(*p), Heap::InToSpace(*p));
|
||||
return Heap::InNewSpace(*p) && !heap->minor_mark_compact_collector()
|
||||
->non_atomic_marking_state()
|
||||
->IsGrey(HeapObject::cast(*p));
|
||||
DCHECK_IMPLIES(Heap::InYoungGeneration(*p), Heap::InToPage(*p));
|
||||
return Heap::InYoungGeneration(*p) && !heap->minor_mark_compact_collector()
|
||||
->non_atomic_marking_state()
|
||||
->IsGrey(HeapObject::cast(*p));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
@ -3924,7 +3927,7 @@ class YoungGenerationMarkingVisitor final
|
||||
template <typename TSlot>
|
||||
V8_INLINE void VisitPointerImpl(HeapObject host, TSlot slot) {
|
||||
typename TSlot::TObject target = *slot;
|
||||
if (Heap::InNewSpace(target)) {
|
||||
if (Heap::InYoungGeneration(target)) {
|
||||
// Treat weak references as strong.
|
||||
// TODO(marja): Proper weakness handling for minor-mcs.
|
||||
HeapObject target_object = target.GetHeapObject();
|
||||
@ -4031,9 +4034,10 @@ class YoungGenerationRecordMigratedSlotVisitor final
|
||||
Address slot) final {
|
||||
if (value->IsStrongOrWeak()) {
|
||||
Page* p = Page::FromAddress(value.ptr());
|
||||
if (p->InNewSpace()) {
|
||||
DCHECK_IMPLIES(p->InToSpace(),
|
||||
p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION));
|
||||
if (p->InYoungGeneration()) {
|
||||
DCHECK_IMPLIES(
|
||||
p->IsToPage(),
|
||||
p->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION) || p->IsLargePage());
|
||||
RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(
|
||||
Page::FromAddress(slot), slot);
|
||||
} else if (p->IsEvacuationCandidate() && IsLive(host)) {
|
||||
@ -4102,7 +4106,7 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
|
||||
heap()->ProcessWeakListRoots(&evacuation_object_retainer);
|
||||
|
||||
// Update pointers from external string table.
|
||||
heap()->UpdateNewSpaceReferencesInExternalStringTable(
|
||||
heap()->UpdateYoungReferencesInExternalStringTable(
|
||||
&UpdateReferenceInExternalStringTableEntry);
|
||||
}
|
||||
}
|
||||
@ -4284,7 +4288,7 @@ class MinorMarkCompactWeakObjectRetainer : public WeakObjectRetainer {
|
||||
|
||||
Object RetainAs(Object object) override {
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
if (!Heap::InNewSpace(heap_object)) return object;
|
||||
if (!Heap::InYoungGeneration(heap_object)) return object;
|
||||
|
||||
// Young generation marking only marks to grey instead of black.
|
||||
DCHECK(!marking_state_->IsBlack(heap_object));
|
||||
@ -4308,8 +4312,8 @@ void MinorMarkCompactCollector::ClearNonLiveReferences() {
|
||||
// Internalized strings are always stored in old space, so there is no need
|
||||
// to clean them here.
|
||||
YoungGenerationExternalStringTableCleaner external_visitor(this);
|
||||
heap()->external_string_table_.IterateNewSpaceStrings(&external_visitor);
|
||||
heap()->external_string_table_.CleanUpNewSpaceStrings();
|
||||
heap()->external_string_table_.IterateYoung(&external_visitor);
|
||||
heap()->external_string_table_.CleanUpYoung();
|
||||
}
|
||||
|
||||
{
|
||||
@ -4397,7 +4401,7 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
|
||||
};
|
||||
|
||||
void MarkObject(Object object) {
|
||||
if (!Heap::InNewSpace(object)) return;
|
||||
if (!Heap::InYoungGeneration(object)) return;
|
||||
HeapObject heap_object = HeapObject::cast(object);
|
||||
if (marking_state_->WhiteToGrey(heap_object)) {
|
||||
const int size = visitor_.Visit(heap_object);
|
||||
@ -4482,10 +4486,10 @@ class PageMarkingItem : public MarkingItem {
|
||||
std::is_same<TSlot, MaybeObjectSlot>::value,
|
||||
"Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
|
||||
MaybeObject object = *slot;
|
||||
if (Heap::InNewSpace(object)) {
|
||||
if (Heap::InYoungGeneration(object)) {
|
||||
// Marking happens before flipping the young generation, so the object
|
||||
// has to be in ToSpace.
|
||||
DCHECK(Heap::InToSpace(object));
|
||||
// has to be in a to page.
|
||||
DCHECK(Heap::InToPage(object));
|
||||
HeapObject heap_object;
|
||||
bool success = object.GetHeapObject(&heap_object);
|
||||
USE(success);
|
||||
|
@ -149,8 +149,8 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject(
|
||||
allocator_.FreeLast(NEW_SPACE, target, object_size);
|
||||
MapWord map_word = object->synchronized_map_word();
|
||||
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
|
||||
DCHECK(!Heap::InFromSpace(*slot));
|
||||
return Heap::InToSpace(*slot)
|
||||
DCHECK(!Heap::InFromPage(*slot));
|
||||
return Heap::InToPage(*slot)
|
||||
? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
|
||||
: CopyAndForwardResult::SUCCESS_OLD_GENERATION;
|
||||
}
|
||||
@ -185,8 +185,8 @@ CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
|
||||
allocator_.FreeLast(OLD_SPACE, target, object_size);
|
||||
MapWord map_word = object->synchronized_map_word();
|
||||
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
|
||||
DCHECK(!Heap::InFromSpace(*slot));
|
||||
return Heap::InToSpace(*slot)
|
||||
DCHECK(!Heap::InFromPage(*slot));
|
||||
return Heap::InToPage(*slot)
|
||||
? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
|
||||
: CopyAndForwardResult::SUCCESS_OLD_GENERATION;
|
||||
}
|
||||
@ -213,7 +213,7 @@ bool Scavenger::HandleLargeObject(Map map, HeapObject object, int object_size,
|
||||
// object_size > kMaxRegularHeapObjectSize
|
||||
if (V8_UNLIKELY(
|
||||
FLAG_young_generation_large_objects &&
|
||||
MemoryChunk::FromHeapObject(object)->IsInNewLargeObjectSpace())) {
|
||||
MemoryChunk::FromHeapObject(object)->InNewLargeObjectSpace())) {
|
||||
DCHECK_EQ(NEW_LO_SPACE,
|
||||
MemoryChunk::FromHeapObject(object)->owner()->identity());
|
||||
if (object->map_slot().Release_CompareAndSwap(
|
||||
@ -287,7 +287,7 @@ SlotCallbackResult Scavenger::EvacuateThinString(Map map, THeapObjectSlot slot,
|
||||
String actual = object->actual();
|
||||
// ThinStrings always refer to internalized strings, which are always in old
|
||||
// space.
|
||||
DCHECK(!Heap::InNewSpace(actual));
|
||||
DCHECK(!Heap::InYoungGeneration(actual));
|
||||
HeapObjectReference::Update(slot, actual);
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
@ -313,7 +313,7 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
|
||||
|
||||
HeapObjectReference::Update(slot, first);
|
||||
|
||||
if (!Heap::InNewSpace(first)) {
|
||||
if (!Heap::InYoungGeneration(first)) {
|
||||
object->map_slot().Release_Store(
|
||||
MapWord::FromForwardingAddress(first).ToMap());
|
||||
return REMOVE_SLOT;
|
||||
@ -326,7 +326,7 @@ SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
|
||||
HeapObjectReference::Update(slot, target);
|
||||
object->map_slot().Release_Store(
|
||||
MapWord::FromForwardingAddress(target).ToMap());
|
||||
return Heap::InToSpace(target) ? KEEP_SLOT : REMOVE_SLOT;
|
||||
return Heap::InToPage(target) ? KEEP_SLOT : REMOVE_SLOT;
|
||||
}
|
||||
Map map = first_word.ToMap();
|
||||
SlotCallbackResult result =
|
||||
@ -348,7 +348,7 @@ SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map,
|
||||
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
|
||||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
|
||||
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
|
||||
SLOW_DCHECK(Heap::InFromSpace(source));
|
||||
SLOW_DCHECK(Heap::InFromPage(source));
|
||||
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
|
||||
int size = source->SizeFromMap(map);
|
||||
// Cannot use ::cast() below because that would add checks in debug mode
|
||||
@ -377,7 +377,7 @@ SlotCallbackResult Scavenger::ScavengeObject(THeapObjectSlot p,
|
||||
static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
|
||||
std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
|
||||
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
|
||||
DCHECK(Heap::InFromSpace(object));
|
||||
DCHECK(Heap::InFromPage(object));
|
||||
|
||||
// Synchronized load that consumes the publishing CAS of MigrateObject.
|
||||
MapWord first_word = object->synchronized_map_word();
|
||||
@ -387,12 +387,10 @@ SlotCallbackResult Scavenger::ScavengeObject(THeapObjectSlot p,
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject dest = first_word.ToForwardingAddress();
|
||||
HeapObjectReference::Update(p, dest);
|
||||
DCHECK_IMPLIES(Heap::InNewSpace(dest),
|
||||
(Heap::InToSpace(dest) ||
|
||||
MemoryChunk::FromHeapObject(dest)->owner()->identity() ==
|
||||
NEW_LO_SPACE));
|
||||
DCHECK_IMPLIES(Heap::InYoungGeneration(dest),
|
||||
Heap::InToPage(dest) || Heap::IsLargeObject(dest));
|
||||
|
||||
return Heap::InToSpace(dest) ? KEEP_SLOT : REMOVE_SLOT;
|
||||
return Heap::InYoungGeneration(dest) ? KEEP_SLOT : REMOVE_SLOT;
|
||||
}
|
||||
|
||||
Map map = first_word.ToMap();
|
||||
@ -410,15 +408,15 @@ SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, TSlot slot) {
|
||||
"Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
|
||||
using THeapObjectSlot = typename TSlot::THeapObjectSlot;
|
||||
MaybeObject object = *slot;
|
||||
if (Heap::InFromSpace(object)) {
|
||||
if (Heap::InFromPage(object)) {
|
||||
HeapObject heap_object = object->GetHeapObject();
|
||||
|
||||
SlotCallbackResult result =
|
||||
ScavengeObject(THeapObjectSlot(slot), heap_object);
|
||||
DCHECK_IMPLIES(result == REMOVE_SLOT,
|
||||
!heap->IsInYoungGeneration((*slot)->GetHeapObject()));
|
||||
!heap->InYoungGeneration((*slot)->GetHeapObject()));
|
||||
return result;
|
||||
} else if (Heap::InToSpace(object)) {
|
||||
} else if (Heap::InToPage(object)) {
|
||||
// Already updated slot. This can happen when processing of the work list
|
||||
// is interleaved with processing roots.
|
||||
return KEEP_SLOT;
|
||||
@ -463,7 +461,7 @@ void ScavengeVisitor::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) {
|
||||
|
||||
template <typename TSlot>
|
||||
void ScavengeVisitor::VisitHeapObjectImpl(TSlot slot, HeapObject heap_object) {
|
||||
if (Heap::InNewSpace(heap_object)) {
|
||||
if (Heap::InYoungGeneration(heap_object)) {
|
||||
scavenger_->ScavengeObject(HeapObjectSlot(slot), heap_object);
|
||||
}
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
||||
"Only FullHeapObjectSlot and HeapObjectSlot are expected here");
|
||||
scavenger_->PageMemoryFence(MaybeObject::FromObject(target));
|
||||
|
||||
if (Heap::InFromSpace(target)) {
|
||||
if (Heap::InFromPage(target)) {
|
||||
SlotCallbackResult result = scavenger_->ScavengeObject(slot, target);
|
||||
bool success = (*slot)->GetHeapObject(&target);
|
||||
USE(success);
|
||||
@ -145,14 +145,14 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
||||
};
|
||||
|
||||
static bool IsUnscavengedHeapObject(Heap* heap, FullObjectSlot p) {
|
||||
return Heap::InFromSpace(*p) &&
|
||||
return Heap::InFromPage(*p) &&
|
||||
!HeapObject::cast(*p)->map_word().IsForwardingAddress();
|
||||
}
|
||||
|
||||
class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
|
||||
public:
|
||||
Object RetainAs(Object object) override {
|
||||
if (!Heap::InFromSpace(object)) {
|
||||
if (!Heap::InFromPage(object)) {
|
||||
return object;
|
||||
}
|
||||
|
||||
@ -257,7 +257,7 @@ void ScavengerCollector::CollectGarbage() {
|
||||
{
|
||||
// Update references into new space
|
||||
TRACE_GC(heap_->tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE_UPDATE_REFS);
|
||||
heap_->UpdateNewSpaceReferencesInExternalStringTable(
|
||||
heap_->UpdateYoungReferencesInExternalStringTable(
|
||||
&Heap::UpdateNewSpaceReferenceInExternalStringTableEntry);
|
||||
|
||||
heap_->incremental_marking()->UpdateMarkingWorklistAfterScavenge();
|
||||
@ -456,9 +456,9 @@ void RootScavengeVisitor::VisitRootPointers(Root root, const char* description,
|
||||
void RootScavengeVisitor::ScavengePointer(FullObjectSlot p) {
|
||||
Object object = *p;
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
if (!Heap::InNewSpace(object)) return;
|
||||
|
||||
scavenger_->ScavengeObject(FullHeapObjectSlot(p), HeapObject::cast(object));
|
||||
if (Heap::InYoungGeneration(object)) {
|
||||
scavenger_->ScavengeObject(FullHeapObjectSlot(p), HeapObject::cast(object));
|
||||
}
|
||||
}
|
||||
|
||||
RootScavengeVisitor::RootScavengeVisitor(Scavenger* scavenger)
|
||||
|
@ -119,8 +119,10 @@ void Space::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
|
||||
// SemiSpace
|
||||
|
||||
bool SemiSpace::Contains(HeapObject o) {
|
||||
return id_ == kToSpace ? MemoryChunk::FromHeapObject(o)->InToSpace()
|
||||
: MemoryChunk::FromHeapObject(o)->InFromSpace();
|
||||
MemoryChunk* memory_chunk = MemoryChunk::FromHeapObject(o);
|
||||
if (memory_chunk->IsLargePage()) return false;
|
||||
return id_ == kToSpace ? memory_chunk->IsToPage()
|
||||
: memory_chunk->IsFromPage();
|
||||
}
|
||||
|
||||
bool SemiSpace::Contains(Object o) {
|
||||
@ -234,10 +236,6 @@ void MemoryChunk::MoveExternalBackingStoreBytes(ExternalBackingStoreType type,
|
||||
amount);
|
||||
}
|
||||
|
||||
bool MemoryChunk::IsInNewLargeObjectSpace() const {
|
||||
return owner()->identity() == NEW_LO_SPACE;
|
||||
}
|
||||
|
||||
void Page::MarkNeverAllocateForTesting() {
|
||||
DCHECK(this->owner()->identity() != NEW_SPACE);
|
||||
DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
|
||||
|
@ -708,10 +708,7 @@ Page* PagedSpace::InitializePage(MemoryChunk* chunk, Executability executable) {
|
||||
Page* SemiSpace::InitializePage(MemoryChunk* chunk, Executability executable) {
|
||||
DCHECK_EQ(executable, Executability::NOT_EXECUTABLE);
|
||||
bool in_to_space = (id() != kFromSpace);
|
||||
chunk->SetFlag(in_to_space ? MemoryChunk::IN_TO_SPACE
|
||||
: MemoryChunk::IN_FROM_SPACE);
|
||||
DCHECK(!chunk->IsFlagSet(in_to_space ? MemoryChunk::IN_FROM_SPACE
|
||||
: MemoryChunk::IN_TO_SPACE));
|
||||
chunk->SetFlag(in_to_space ? MemoryChunk::TO_PAGE : MemoryChunk::FROM_PAGE);
|
||||
Page* page = static_cast<Page*>(chunk);
|
||||
page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking());
|
||||
page->AllocateLocalTracker();
|
||||
@ -748,6 +745,7 @@ LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk,
|
||||
}
|
||||
|
||||
LargePage* page = static_cast<LargePage*>(chunk);
|
||||
page->SetFlag(MemoryChunk::LARGE_PAGE);
|
||||
page->list_node().Initialize();
|
||||
return page;
|
||||
}
|
||||
@ -1312,7 +1310,7 @@ void MemoryChunk::ReleaseAllocatedMemory() {
|
||||
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
|
||||
if (marking_bitmap_ != nullptr) ReleaseMarkingBitmap();
|
||||
|
||||
if (!heap_->IsLargeMemoryChunk(this)) {
|
||||
if (!IsLargePage()) {
|
||||
Page* page = static_cast<Page*>(this);
|
||||
page->ReleaseFreeListCategories();
|
||||
}
|
||||
@ -2198,7 +2196,7 @@ bool SemiSpace::EnsureCurrentCapacity() {
|
||||
memory_chunk_list_.Remove(current_page);
|
||||
// Clear new space flags to avoid this page being treated as a new
|
||||
// space page that is potentially being swept.
|
||||
current_page->SetFlags(0, Page::kIsInNewSpaceMask);
|
||||
current_page->SetFlags(0, Page::kIsInYoungGenerationMask);
|
||||
heap()->memory_allocator()->Free<MemoryAllocator::kPooledAndQueue>(
|
||||
current_page);
|
||||
current_page = next_current;
|
||||
@ -2660,17 +2658,16 @@ void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) {
|
||||
page->set_owner(this);
|
||||
page->SetFlags(flags, mask);
|
||||
if (id_ == kToSpace) {
|
||||
page->ClearFlag(MemoryChunk::IN_FROM_SPACE);
|
||||
page->SetFlag(MemoryChunk::IN_TO_SPACE);
|
||||
page->ClearFlag(MemoryChunk::FROM_PAGE);
|
||||
page->SetFlag(MemoryChunk::TO_PAGE);
|
||||
page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
|
||||
heap()->incremental_marking()->non_atomic_marking_state()->SetLiveBytes(
|
||||
page, 0);
|
||||
} else {
|
||||
page->SetFlag(MemoryChunk::IN_FROM_SPACE);
|
||||
page->ClearFlag(MemoryChunk::IN_TO_SPACE);
|
||||
page->SetFlag(MemoryChunk::FROM_PAGE);
|
||||
page->ClearFlag(MemoryChunk::TO_PAGE);
|
||||
}
|
||||
DCHECK(page->IsFlagSet(MemoryChunk::IN_TO_SPACE) ||
|
||||
page->IsFlagSet(MemoryChunk::IN_FROM_SPACE));
|
||||
DCHECK(page->InYoungGeneration());
|
||||
}
|
||||
}
|
||||
|
||||
@ -2759,10 +2756,10 @@ void SemiSpace::Verify() {
|
||||
for (Page* page : *this) {
|
||||
CHECK_EQ(page->owner(), this);
|
||||
CHECK(page->InNewSpace());
|
||||
CHECK(page->IsFlagSet(is_from_space ? MemoryChunk::IN_FROM_SPACE
|
||||
: MemoryChunk::IN_TO_SPACE));
|
||||
CHECK(!page->IsFlagSet(is_from_space ? MemoryChunk::IN_TO_SPACE
|
||||
: MemoryChunk::IN_FROM_SPACE));
|
||||
CHECK(page->IsFlagSet(is_from_space ? MemoryChunk::FROM_PAGE
|
||||
: MemoryChunk::TO_PAGE));
|
||||
CHECK(!page->IsFlagSet(is_from_space ? MemoryChunk::TO_PAGE
|
||||
: MemoryChunk::FROM_PAGE));
|
||||
CHECK(page->IsFlagSet(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING));
|
||||
if (!is_from_space) {
|
||||
// The pointers-from-here-are-interesting flag isn't updated dynamically
|
||||
@ -3558,13 +3555,14 @@ void LargeObjectSpace::RemoveChunkMapEntries(LargePage* page,
|
||||
|
||||
void LargeObjectSpace::PromoteNewLargeObject(LargePage* page) {
|
||||
DCHECK_EQ(page->owner()->identity(), NEW_LO_SPACE);
|
||||
DCHECK(page->IsFlagSet(MemoryChunk::IN_FROM_SPACE));
|
||||
DCHECK(!page->IsFlagSet(MemoryChunk::IN_TO_SPACE));
|
||||
DCHECK(page->IsLargePage());
|
||||
DCHECK(page->IsFlagSet(MemoryChunk::FROM_PAGE));
|
||||
DCHECK(!page->IsFlagSet(MemoryChunk::TO_PAGE));
|
||||
size_t object_size = static_cast<size_t>(page->GetObject()->Size());
|
||||
reinterpret_cast<NewLargeObjectSpace*>(page->owner())
|
||||
->Unregister(page, object_size);
|
||||
Register(page, object_size);
|
||||
page->ClearFlag(MemoryChunk::IN_FROM_SPACE);
|
||||
page->ClearFlag(MemoryChunk::FROM_PAGE);
|
||||
page->SetOldGenerationPageFlags(heap()->incremental_marking()->IsMarking());
|
||||
page->set_owner(this);
|
||||
}
|
||||
@ -3773,7 +3771,7 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) {
|
||||
LargePage* page = AllocateLargePage(object_size, NOT_EXECUTABLE);
|
||||
if (page == nullptr) return AllocationResult::Retry(identity());
|
||||
page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking());
|
||||
page->SetFlag(MemoryChunk::IN_TO_SPACE);
|
||||
page->SetFlag(MemoryChunk::TO_PAGE);
|
||||
page->InitializationMemoryFence();
|
||||
return page->GetObject();
|
||||
}
|
||||
@ -3786,8 +3784,8 @@ size_t NewLargeObjectSpace::Available() {
|
||||
void NewLargeObjectSpace::Flip() {
|
||||
for (LargePage* chunk = first_page(); chunk != nullptr;
|
||||
chunk = chunk->next_page()) {
|
||||
chunk->SetFlag(MemoryChunk::IN_FROM_SPACE);
|
||||
chunk->ClearFlag(MemoryChunk::IN_TO_SPACE);
|
||||
chunk->SetFlag(MemoryChunk::FROM_PAGE);
|
||||
chunk->ClearFlag(MemoryChunk::TO_PAGE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,10 +269,12 @@ class MemoryChunk {
|
||||
IS_EXECUTABLE = 1u << 0,
|
||||
POINTERS_TO_HERE_ARE_INTERESTING = 1u << 1,
|
||||
POINTERS_FROM_HERE_ARE_INTERESTING = 1u << 2,
|
||||
// A page in new space has one of the next two flags set.
|
||||
IN_FROM_SPACE = 1u << 3,
|
||||
IN_TO_SPACE = 1u << 4,
|
||||
NEW_SPACE_BELOW_AGE_MARK = 1u << 5,
|
||||
// A page in the from-space or a young large page that was not scavenged
|
||||
// yet.
|
||||
FROM_PAGE = 1u << 3,
|
||||
// A page in the to-space or a young large page that was scavenged.
|
||||
TO_PAGE = 1u << 4,
|
||||
LARGE_PAGE = 1u << 5,
|
||||
EVACUATION_CANDIDATE = 1u << 6,
|
||||
NEVER_EVACUATE = 1u << 7,
|
||||
|
||||
@ -322,7 +324,8 @@ class MemoryChunk {
|
||||
|
||||
// |INCREMENTAL_MARKING|: Indicates whether incremental marking is currently
|
||||
// enabled.
|
||||
INCREMENTAL_MARKING = 1u << 18
|
||||
INCREMENTAL_MARKING = 1u << 18,
|
||||
NEW_SPACE_BELOW_AGE_MARK = 1u << 19
|
||||
};
|
||||
|
||||
using Flags = uintptr_t;
|
||||
@ -335,10 +338,12 @@ class MemoryChunk {
|
||||
|
||||
static const Flags kEvacuationCandidateMask = EVACUATION_CANDIDATE;
|
||||
|
||||
static const Flags kIsInNewSpaceMask = IN_FROM_SPACE | IN_TO_SPACE;
|
||||
static const Flags kIsInYoungGenerationMask = FROM_PAGE | TO_PAGE;
|
||||
|
||||
static const Flags kIsLargePageMask = LARGE_PAGE;
|
||||
|
||||
static const Flags kSkipEvacuationSlotsRecordingMask =
|
||||
kEvacuationCandidateMask | kIsInNewSpaceMask;
|
||||
kEvacuationCandidateMask | kIsInYoungGenerationMask;
|
||||
|
||||
// |kSweepingDone|: The page state when sweeping is complete or sweeping must
|
||||
// not be performed on that page. Sweeper threads that are done with their
|
||||
@ -633,17 +638,20 @@ class MemoryChunk {
|
||||
return IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
|
||||
}
|
||||
|
||||
bool InNewSpace() { return (flags_ & kIsInNewSpaceMask) != 0; }
|
||||
|
||||
bool InToSpace() { return IsFlagSet(IN_TO_SPACE); }
|
||||
|
||||
bool InFromSpace() { return IsFlagSet(IN_FROM_SPACE); }
|
||||
bool IsFromPage() const { return (flags_ & FROM_PAGE) != 0; }
|
||||
bool IsToPage() const { return (flags_ & TO_PAGE) != 0; }
|
||||
bool IsLargePage() const { return (flags_ & LARGE_PAGE) != 0; }
|
||||
|
||||
bool InYoungGeneration() const {
|
||||
return (flags_ & kIsInYoungGenerationMask) != 0;
|
||||
}
|
||||
bool InNewSpace() const { return InYoungGeneration() && !IsLargePage(); }
|
||||
bool InNewLargeObjectSpace() const {
|
||||
return InYoungGeneration() && IsLargePage();
|
||||
}
|
||||
bool InOldSpace() const;
|
||||
|
||||
bool InLargeObjectSpace() const;
|
||||
|
||||
inline bool IsInNewLargeObjectSpace() const;
|
||||
|
||||
Space* owner() const { return owner_; }
|
||||
|
||||
@ -1204,8 +1212,7 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
|
||||
}
|
||||
|
||||
void AddMemoryChunkSafe(MemoryChunk* chunk) {
|
||||
if (!heap_->IsLargeMemoryChunk(chunk) &&
|
||||
chunk->executable() != EXECUTABLE) {
|
||||
if (!chunk->IsLargePage() && chunk->executable() != EXECUTABLE) {
|
||||
AddMemoryChunkSafe<kRegular>(chunk);
|
||||
} else {
|
||||
AddMemoryChunkSafe<kNonRegular>(chunk);
|
||||
@ -2703,7 +2710,7 @@ class NewSpace : public SpaceWithLinearArea {
|
||||
}
|
||||
|
||||
void MovePageFromSpaceToSpace(Page* page) {
|
||||
DCHECK(page->InFromSpace());
|
||||
DCHECK(page->IsFromPage());
|
||||
from_space_.RemovePage(page);
|
||||
to_space_.PrependPage(page);
|
||||
}
|
||||
|
@ -67,8 +67,8 @@ bool CommonStubCacheChecks(StubCache* stub_cache, Name name, Map map,
|
||||
MaybeObject handler) {
|
||||
// Validate that the name and handler do not move on scavenge, and that we
|
||||
// can use identity checks instead of structural equality checks.
|
||||
DCHECK(!Heap::InNewSpace(name));
|
||||
DCHECK(!Heap::InNewSpace(handler));
|
||||
DCHECK(!Heap::InYoungGeneration(name));
|
||||
DCHECK(!Heap::InYoungGeneration(handler));
|
||||
DCHECK(name->IsUniqueName());
|
||||
DCHECK(name->HasHashCode());
|
||||
if (handler->ptr() != kNullAddress) DCHECK(IC::IsHandler(handler));
|
||||
|
@ -635,7 +635,7 @@ void JSObject::JSObjectVerify(Isolate* isolate) {
|
||||
|
||||
void Map::MapVerify(Isolate* isolate) {
|
||||
Heap* heap = isolate->heap();
|
||||
CHECK(!Heap::InNewSpace(*this));
|
||||
CHECK(!Heap::InYoungGeneration(*this));
|
||||
CHECK(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
|
||||
CHECK(instance_size() == kVariableSizeSentinel ||
|
||||
(kTaggedSize <= instance_size() &&
|
||||
@ -990,7 +990,7 @@ void String::StringVerify(Isolate* isolate) {
|
||||
CHECK(length() >= 0 && length() <= Smi::kMaxValue);
|
||||
CHECK_IMPLIES(length() == 0, *this == ReadOnlyRoots(isolate).empty_string());
|
||||
if (IsInternalizedString()) {
|
||||
CHECK(!Heap::InNewSpace(*this));
|
||||
CHECK(!Heap::InYoungGeneration(*this));
|
||||
}
|
||||
if (IsConsString()) {
|
||||
ConsString::cast(*this)->ConsStringVerify(isolate);
|
||||
|
@ -935,7 +935,7 @@ WriteBarrierMode HeapObject::GetWriteBarrierMode(
|
||||
const DisallowHeapAllocation& promise) {
|
||||
Heap* heap = Heap::FromWritableHeapObject(*this);
|
||||
if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
|
||||
if (Heap::InNewSpace(*this)) return SKIP_WRITE_BARRIER;
|
||||
if (Heap::InYoungGeneration(*this)) return SKIP_WRITE_BARRIER;
|
||||
return UPDATE_WRITE_BARRIER;
|
||||
}
|
||||
|
||||
|
@ -2657,7 +2657,7 @@ Handle<String> String::SlowFlatten(Isolate* isolate, Handle<ConsString> cons,
|
||||
|
||||
DCHECK(AllowHeapAllocation::IsAllowed());
|
||||
int length = cons->length();
|
||||
PretenureFlag tenure = Heap::InNewSpace(*cons) ? pretenure : TENURED;
|
||||
PretenureFlag tenure = Heap::InYoungGeneration(*cons) ? pretenure : TENURED;
|
||||
Handle<SeqString> result;
|
||||
if (cons->IsOneByteRepresentation()) {
|
||||
Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString(
|
||||
@ -15917,9 +15917,10 @@ static bool ShouldConvertToSlowElements(JSObject object, uint32_t capacity,
|
||||
if (index - capacity >= JSObject::kMaxGap) return true;
|
||||
*new_capacity = JSObject::NewElementsCapacity(index + 1);
|
||||
DCHECK_LT(index, *new_capacity);
|
||||
// TODO(ulan): Check if it works with young large objects.
|
||||
if (*new_capacity <= JSObject::kMaxUncheckedOldFastElementsLength ||
|
||||
(*new_capacity <= JSObject::kMaxUncheckedFastElementsLength &&
|
||||
Heap::InNewSpace(object))) {
|
||||
Heap::InYoungGeneration(object))) {
|
||||
return false;
|
||||
}
|
||||
// If the fast-case backing storage takes up much more memory than a
|
||||
@ -17204,8 +17205,8 @@ Handle<Derived> HashTable<Derived, Shape>::EnsureCapacity(
|
||||
|
||||
const int kMinCapacityForPretenure = 256;
|
||||
bool should_pretenure =
|
||||
pretenure == TENURED ||
|
||||
((capacity > kMinCapacityForPretenure) && !Heap::InNewSpace(*table));
|
||||
pretenure == TENURED || ((capacity > kMinCapacityForPretenure) &&
|
||||
!Heap::InYoungGeneration(*table));
|
||||
Handle<Derived> new_table = HashTable::New(
|
||||
isolate, new_nof, should_pretenure ? TENURED : NOT_TENURED);
|
||||
|
||||
@ -17253,7 +17254,7 @@ Handle<Derived> HashTable<Derived, Shape>::Shrink(Isolate* isolate,
|
||||
|
||||
const int kMinCapacityForPretenure = 256;
|
||||
bool pretenure = (at_least_room_for > kMinCapacityForPretenure) &&
|
||||
!Heap::InNewSpace(*table);
|
||||
!Heap::InYoungGeneration(*table);
|
||||
Handle<Derived> new_table =
|
||||
HashTable::New(isolate, new_capacity, pretenure ? TENURED : NOT_TENURED,
|
||||
USE_CUSTOM_MINIMUM_CAPACITY);
|
||||
|
@ -195,11 +195,12 @@ NEVER_READ_ONLY_SPACE_IMPL(Code)
|
||||
|
||||
INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
|
||||
INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
|
||||
#define CODE_ACCESSORS(name, type, offset) \
|
||||
ACCESSORS_CHECKED2(Code, name, type, offset, true, !Heap::InNewSpace(value))
|
||||
#define CODE_ACCESSORS(name, type, offset) \
|
||||
ACCESSORS_CHECKED2(Code, name, type, offset, true, \
|
||||
!Heap::InYoungGeneration(value))
|
||||
#define SYNCHRONIZED_CODE_ACCESSORS(name, type, offset) \
|
||||
SYNCHRONIZED_ACCESSORS_CHECKED2(Code, name, type, offset, true, \
|
||||
!Heap::InNewSpace(value))
|
||||
!Heap::InYoungGeneration(value))
|
||||
|
||||
CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
|
||||
CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
|
||||
|
@ -146,7 +146,7 @@ void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) {
|
||||
DCHECK_NE(array->map(), array->GetReadOnlyRoots().fixed_cow_array_map());
|
||||
DCHECK_GE(index, 0);
|
||||
DCHECK_LT(index, array->length());
|
||||
DCHECK(!Heap::InNewSpace(value));
|
||||
DCHECK(!Heap::InYoungGeneration(value));
|
||||
RELAXED_WRITE_FIELD(array, kHeaderSize + index * kTaggedSize, value);
|
||||
}
|
||||
|
||||
|
@ -430,9 +430,10 @@ Object JSObject::InObjectPropertyAtPut(int index, Object value,
|
||||
|
||||
void JSObject::InitializeBody(Map map, int start_offset,
|
||||
Object pre_allocated_value, Object filler_value) {
|
||||
DCHECK(!filler_value->IsHeapObject() || !Heap::InNewSpace(filler_value));
|
||||
DCHECK(!pre_allocated_value->IsHeapObject() ||
|
||||
!Heap::InNewSpace(pre_allocated_value));
|
||||
DCHECK_IMPLIES(filler_value->IsHeapObject(),
|
||||
!Heap::InYoungGeneration(filler_value));
|
||||
DCHECK_IMPLIES(pre_allocated_value->IsHeapObject(),
|
||||
!Heap::InYoungGeneration(pre_allocated_value));
|
||||
int size = map->instance_size();
|
||||
int offset = start_offset;
|
||||
if (filler_value != pre_allocated_value) {
|
||||
@ -545,13 +546,13 @@ Code JSFunction::code() const {
|
||||
}
|
||||
|
||||
void JSFunction::set_code(Code value) {
|
||||
DCHECK(!Heap::InNewSpace(value));
|
||||
DCHECK(!Heap::InYoungGeneration(value));
|
||||
RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
|
||||
MarkingBarrier(*this, RawField(kCodeOffset), value);
|
||||
}
|
||||
|
||||
void JSFunction::set_code_no_write_barrier(Code value) {
|
||||
DCHECK(!Heap::InNewSpace(value));
|
||||
DCHECK(!Heap::InYoungGeneration(value));
|
||||
RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
|
||||
}
|
||||
|
||||
@ -846,8 +847,8 @@ NumberDictionary JSObject::element_dictionary() {
|
||||
|
||||
void JSReceiver::initialize_properties() {
|
||||
ReadOnlyRoots roots = GetReadOnlyRoots();
|
||||
DCHECK(!Heap::InNewSpace(roots.empty_fixed_array()));
|
||||
DCHECK(!Heap::InNewSpace(roots.empty_property_dictionary()));
|
||||
DCHECK(!Heap::InYoungGeneration(roots.empty_fixed_array()));
|
||||
DCHECK(!Heap::InYoungGeneration(roots.empty_property_dictionary()));
|
||||
if (map()->is_dictionary_map()) {
|
||||
WRITE_FIELD(this, kPropertiesOrHashOffset,
|
||||
roots.empty_property_dictionary());
|
||||
|
@ -219,7 +219,7 @@ FixedArrayBase Map::GetInitialElements() const {
|
||||
} else {
|
||||
UNREACHABLE();
|
||||
}
|
||||
DCHECK(!Heap::InNewSpace(result));
|
||||
DCHECK(!Heap::InYoungGeneration(result));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -70,8 +70,9 @@ Handle<Derived> OrderedHashTable<Derived, entrysize>::Clear(
|
||||
Isolate* isolate, Handle<Derived> table) {
|
||||
DCHECK(!table->IsObsolete());
|
||||
|
||||
Handle<Derived> new_table = Allocate(
|
||||
isolate, kMinCapacity, Heap::InNewSpace(*table) ? NOT_TENURED : TENURED);
|
||||
Handle<Derived> new_table =
|
||||
Allocate(isolate, kMinCapacity,
|
||||
Heap::InYoungGeneration(*table) ? NOT_TENURED : TENURED);
|
||||
|
||||
table->SetNextTable(*new_table);
|
||||
table->SetNumberOfDeletedElements(kClearedTableSentinel);
|
||||
@ -187,7 +188,8 @@ Handle<Derived> OrderedHashTable<Derived, entrysize>::Rehash(
|
||||
DCHECK(!table->IsObsolete());
|
||||
|
||||
Handle<Derived> new_table = Derived::Allocate(
|
||||
isolate, new_capacity, Heap::InNewSpace(*table) ? NOT_TENURED : TENURED);
|
||||
isolate, new_capacity,
|
||||
Heap::InYoungGeneration(*table) ? NOT_TENURED : TENURED);
|
||||
int nof = table->NumberOfElements();
|
||||
int nod = table->NumberOfDeletedElements();
|
||||
int new_buckets = new_table->NumberOfBuckets();
|
||||
@ -508,7 +510,7 @@ void SmallOrderedHashTable<Derived>::Initialize(Isolate* isolate,
|
||||
memset(reinterpret_cast<byte*>(hashtable_start), kNotFound,
|
||||
num_buckets + num_chains);
|
||||
|
||||
if (Heap::InNewSpace(*this)) {
|
||||
if (Heap::InYoungGeneration(*this)) {
|
||||
MemsetTagged(RawField(DataTableStartOffset()),
|
||||
ReadOnlyRoots(isolate).the_hole_value(),
|
||||
capacity * Derived::kEntrySize);
|
||||
@ -728,7 +730,8 @@ Handle<Derived> SmallOrderedHashTable<Derived>::Rehash(Isolate* isolate,
|
||||
DCHECK_GE(kMaxCapacity, new_capacity);
|
||||
|
||||
Handle<Derived> new_table = SmallOrderedHashTable<Derived>::Allocate(
|
||||
isolate, new_capacity, Heap::InNewSpace(*table) ? NOT_TENURED : TENURED);
|
||||
isolate, new_capacity,
|
||||
Heap::InYoungGeneration(*table) ? NOT_TENURED : TENURED);
|
||||
int nof = table->NumberOfElements();
|
||||
int nod = table->NumberOfDeletedElements();
|
||||
int new_entry = 0;
|
||||
|
@ -207,7 +207,8 @@ Object RemoveArrayHoles(Isolate* isolate, Handle<JSReceiver> receiver,
|
||||
Handle<Map> new_map =
|
||||
JSObject::GetElementsTransitionMap(object, HOLEY_ELEMENTS);
|
||||
|
||||
PretenureFlag tenure = Heap::InNewSpace(*object) ? NOT_TENURED : TENURED;
|
||||
PretenureFlag tenure =
|
||||
Heap::InYoungGeneration(*object) ? NOT_TENURED : TENURED;
|
||||
Handle<FixedArray> fast_elements =
|
||||
isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure);
|
||||
dict->CopyValuesTo(*fast_elements);
|
||||
|
@ -796,7 +796,7 @@ RUNTIME_FUNCTION(Runtime_InNewSpace) {
|
||||
SealHandleScope shs(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
CONVERT_ARG_CHECKED(Object, obj, 0);
|
||||
return isolate->heap()->ToBoolean(Heap::InNewSpace(obj));
|
||||
return isolate->heap()->ToBoolean(Heap::InYoungGeneration(obj));
|
||||
}
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_IsAsmWasmCode) {
|
||||
|
@ -648,7 +648,7 @@ bool Deserializer::ReadData(UnalignedSlot current, UnalignedSlot limit,
|
||||
case kVariableRepeat: {
|
||||
int repeats = source_.GetInt();
|
||||
MaybeObject object = current.ReadPrevious();
|
||||
DCHECK(!Heap::InNewSpace(object));
|
||||
DCHECK(!Heap::InYoungGeneration(object));
|
||||
for (int i = 0; i < repeats; i++) {
|
||||
UnalignedCopy(current, object);
|
||||
current.Advance();
|
||||
@ -723,7 +723,7 @@ bool Deserializer::ReadData(UnalignedSlot current, UnalignedSlot limit,
|
||||
int id = data & kRootArrayConstantsMask;
|
||||
RootIndex root_index = static_cast<RootIndex>(id);
|
||||
MaybeObject object = MaybeObject::FromObject(isolate->root(root_index));
|
||||
DCHECK(!Heap::InNewSpace(object));
|
||||
DCHECK(!Heap::InYoungGeneration(object));
|
||||
UnalignedCopy(current, object);
|
||||
current.Advance();
|
||||
break;
|
||||
@ -747,7 +747,7 @@ bool Deserializer::ReadData(UnalignedSlot current, UnalignedSlot limit,
|
||||
}
|
||||
|
||||
UnalignedCopy(current, hot_maybe_object);
|
||||
if (write_barrier_needed && Heap::InNewSpace(hot_object)) {
|
||||
if (write_barrier_needed && Heap::InYoungGeneration(hot_object)) {
|
||||
HeapObject current_object =
|
||||
HeapObject::FromAddress(current_object_address);
|
||||
GenerationalBarrier(current_object, current.Slot(), hot_maybe_object);
|
||||
@ -771,7 +771,7 @@ bool Deserializer::ReadData(UnalignedSlot current, UnalignedSlot limit,
|
||||
SIXTEEN_CASES(kFixedRepeat) {
|
||||
int repeats = data - kFixedRepeatStart;
|
||||
MaybeObject object = current.ReadPrevious();
|
||||
DCHECK(!Heap::InNewSpace(object));
|
||||
DCHECK(!Heap::InYoungGeneration(object));
|
||||
for (int i = 0; i < repeats; i++) {
|
||||
UnalignedCopy(current, object);
|
||||
current.Advance();
|
||||
@ -851,22 +851,22 @@ UnalignedSlot Deserializer::ReadDataCase(Isolate* isolate,
|
||||
int id = source_.GetInt();
|
||||
RootIndex root_index = static_cast<RootIndex>(id);
|
||||
new_object = isolate->root(root_index);
|
||||
emit_write_barrier = Heap::InNewSpace(new_object);
|
||||
emit_write_barrier = Heap::InYoungGeneration(new_object);
|
||||
hot_objects_.Add(HeapObject::cast(new_object));
|
||||
} else if (where == kReadOnlyObjectCache) {
|
||||
int cache_index = source_.GetInt();
|
||||
new_object = isolate->read_only_object_cache()->at(cache_index);
|
||||
DCHECK(!Heap::InNewSpace(new_object));
|
||||
DCHECK(!Heap::InYoungGeneration(new_object));
|
||||
emit_write_barrier = false;
|
||||
} else if (where == kPartialSnapshotCache) {
|
||||
int cache_index = source_.GetInt();
|
||||
new_object = isolate->partial_snapshot_cache()->at(cache_index);
|
||||
emit_write_barrier = Heap::InNewSpace(new_object);
|
||||
emit_write_barrier = Heap::InYoungGeneration(new_object);
|
||||
} else {
|
||||
DCHECK_EQ(where, kAttachedReference);
|
||||
int index = source_.GetInt();
|
||||
new_object = *attached_objects_[index];
|
||||
emit_write_barrier = Heap::InNewSpace(new_object);
|
||||
emit_write_barrier = Heap::InYoungGeneration(new_object);
|
||||
}
|
||||
if (within == kInnerPointer) {
|
||||
DCHECK_EQ(how, kFromCode);
|
||||
|
@ -215,8 +215,10 @@ void Serializer::PutRoot(RootIndex root, HeapObject object,
|
||||
STATIC_ASSERT(static_cast<int>(RootIndex::kArgumentsMarker) ==
|
||||
kNumberOfRootArrayConstants - 1);
|
||||
|
||||
// TODO(ulan): Check that it works with young large objects.
|
||||
if (how_to_code == kPlain && where_to_point == kStartOfObject &&
|
||||
root_index < kNumberOfRootArrayConstants && !Heap::InNewSpace(object)) {
|
||||
root_index < kNumberOfRootArrayConstants &&
|
||||
!Heap::InYoungGeneration(object)) {
|
||||
if (skip == 0) {
|
||||
sink_.Put(kRootArrayConstants + root_index, "RootConstant");
|
||||
} else {
|
||||
@ -699,7 +701,7 @@ void Serializer::ObjectSerializer::VisitPointers(HeapObject host,
|
||||
RootsTable::IsImmortalImmovable(root_index) &&
|
||||
*current == *(current - 1)) {
|
||||
DCHECK_EQ(reference_type, HeapObjectReferenceType::STRONG);
|
||||
DCHECK(!Heap::InNewSpace(current_contents));
|
||||
DCHECK(!Heap::InYoungGeneration(current_contents));
|
||||
int repeat_count = 1;
|
||||
while (current + repeat_count < end - 1 &&
|
||||
*(current + repeat_count) == *current) {
|
||||
|
@ -5767,7 +5767,8 @@ TEST(YoungGenerationLargeObjectAllocationScavenge) {
|
||||
Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
|
||||
CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
|
||||
CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
|
||||
CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
|
||||
CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
|
||||
|
||||
Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
|
||||
array_small->set(0, *number);
|
||||
@ -5778,7 +5779,7 @@ TEST(YoungGenerationLargeObjectAllocationScavenge) {
|
||||
// generation large object space.
|
||||
chunk = MemoryChunk::FromHeapObject(*array_small);
|
||||
CHECK_EQ(LO_SPACE, chunk->owner()->identity());
|
||||
CHECK(!chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
|
||||
CHECK(!chunk->InYoungGeneration());
|
||||
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
}
|
||||
@ -5796,7 +5797,8 @@ TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
|
||||
Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
|
||||
CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
|
||||
CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
|
||||
CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
|
||||
CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
|
||||
|
||||
Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
|
||||
array_small->set(0, *number);
|
||||
@ -5807,7 +5809,7 @@ TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
|
||||
// large object space.
|
||||
chunk = MemoryChunk::FromHeapObject(*array_small);
|
||||
CHECK_EQ(LO_SPACE, chunk->owner()->identity());
|
||||
CHECK(!chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
|
||||
CHECK(!chunk->InYoungGeneration());
|
||||
|
||||
CcTest::CollectAllAvailableGarbage();
|
||||
}
|
||||
@ -5827,7 +5829,7 @@ TEST(YoungGenerationLargeObjectAllocationReleaseScavenger) {
|
||||
Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(20000);
|
||||
MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
|
||||
CHECK_EQ(NEW_LO_SPACE, chunk->owner()->identity());
|
||||
CHECK(chunk->IsFlagSet(MemoryChunk::IN_TO_SPACE));
|
||||
CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,38 +84,38 @@ TEST_F(SpacesTest, WriteBarrierIsMarking) {
|
||||
EXPECT_FALSE(slim_chunk->IsMarking());
|
||||
}
|
||||
|
||||
TEST_F(SpacesTest, WriteBarrierInNewSpaceToSpace) {
|
||||
TEST_F(SpacesTest, WriteBarrierInYoungGenerationToSpace) {
|
||||
const size_t kSizeOfMemoryChunk = sizeof(MemoryChunk);
|
||||
char memory[kSizeOfMemoryChunk];
|
||||
memset(&memory, 0, kSizeOfMemoryChunk);
|
||||
MemoryChunk* chunk = reinterpret_cast<MemoryChunk*>(&memory);
|
||||
heap_internals::MemoryChunk* slim_chunk =
|
||||
reinterpret_cast<heap_internals::MemoryChunk*>(&memory);
|
||||
EXPECT_FALSE(chunk->InNewSpace());
|
||||
EXPECT_FALSE(slim_chunk->InNewSpace());
|
||||
chunk->SetFlag(MemoryChunk::IN_TO_SPACE);
|
||||
EXPECT_TRUE(chunk->InNewSpace());
|
||||
EXPECT_TRUE(slim_chunk->InNewSpace());
|
||||
chunk->ClearFlag(MemoryChunk::IN_TO_SPACE);
|
||||
EXPECT_FALSE(chunk->InNewSpace());
|
||||
EXPECT_FALSE(slim_chunk->InNewSpace());
|
||||
EXPECT_FALSE(chunk->InYoungGeneration());
|
||||
EXPECT_FALSE(slim_chunk->InYoungGeneration());
|
||||
chunk->SetFlag(MemoryChunk::TO_PAGE);
|
||||
EXPECT_TRUE(chunk->InYoungGeneration());
|
||||
EXPECT_TRUE(slim_chunk->InYoungGeneration());
|
||||
chunk->ClearFlag(MemoryChunk::TO_PAGE);
|
||||
EXPECT_FALSE(chunk->InYoungGeneration());
|
||||
EXPECT_FALSE(slim_chunk->InYoungGeneration());
|
||||
}
|
||||
|
||||
TEST_F(SpacesTest, WriteBarrierInNewSpaceFromSpace) {
|
||||
TEST_F(SpacesTest, WriteBarrierInYoungGenerationFromSpace) {
|
||||
const size_t kSizeOfMemoryChunk = sizeof(MemoryChunk);
|
||||
char memory[kSizeOfMemoryChunk];
|
||||
memset(&memory, 0, kSizeOfMemoryChunk);
|
||||
MemoryChunk* chunk = reinterpret_cast<MemoryChunk*>(&memory);
|
||||
heap_internals::MemoryChunk* slim_chunk =
|
||||
reinterpret_cast<heap_internals::MemoryChunk*>(&memory);
|
||||
EXPECT_FALSE(chunk->InNewSpace());
|
||||
EXPECT_FALSE(slim_chunk->InNewSpace());
|
||||
chunk->SetFlag(MemoryChunk::IN_FROM_SPACE);
|
||||
EXPECT_TRUE(chunk->InNewSpace());
|
||||
EXPECT_TRUE(slim_chunk->InNewSpace());
|
||||
chunk->ClearFlag(MemoryChunk::IN_FROM_SPACE);
|
||||
EXPECT_FALSE(chunk->InNewSpace());
|
||||
EXPECT_FALSE(slim_chunk->InNewSpace());
|
||||
EXPECT_FALSE(chunk->InYoungGeneration());
|
||||
EXPECT_FALSE(slim_chunk->InYoungGeneration());
|
||||
chunk->SetFlag(MemoryChunk::FROM_PAGE);
|
||||
EXPECT_TRUE(chunk->InYoungGeneration());
|
||||
EXPECT_TRUE(slim_chunk->InYoungGeneration());
|
||||
chunk->ClearFlag(MemoryChunk::FROM_PAGE);
|
||||
EXPECT_FALSE(chunk->InYoungGeneration());
|
||||
EXPECT_FALSE(slim_chunk->InYoungGeneration());
|
||||
}
|
||||
|
||||
TEST_F(SpacesTest, CodeRangeAddressReuse) {
|
||||
|
Loading…
Reference in New Issue
Block a user