[heap] MinorMC: Reuse NewSpaceVisitor for marking

Completely shares first-level visitation with the Scavenger.

Remove marking recursively on the way as we cannot reliable check for
stack overflow on background tasks.

Bug: chromium:651354
Change-Id: I6da1dc787cdfb4232ea4b6cb8e72e0f736cabf10
Reviewed-on: https://chromium-review.googlesource.com/544967
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46162}
This commit is contained in:
Michael Lippautz 2017-06-23 10:41:55 +02:00 committed by Commit Bot
parent c535258aab
commit b45f206651

View File

@ -2203,21 +2203,14 @@ void MarkCompactCollector::RecordObjectStats() {
} }
} }
class YoungGenerationMarkingVisitor final class YoungGenerationMarkingVisitor final : public NewSpaceVisitor {
: public HeapVisitor<int, YoungGenerationMarkingVisitor> {
public: public:
using BaseClass = HeapVisitor<int, YoungGenerationMarkingVisitor>;
YoungGenerationMarkingVisitor(Heap* heap, YoungGenerationMarkingVisitor(Heap* heap,
WorkStealingBag* global_marking_deque, WorkStealingBag* global_marking_deque,
int task_id) int task_id)
: heap_(heap), marking_deque_(global_marking_deque, task_id) {} : heap_(heap), marking_deque_(global_marking_deque, task_id) {}
void VisitPointers(HeapObject* host, Object** start, Object** end) final { void VisitPointers(HeapObject* host, Object** start, Object** end) final {
const int kMinRangeForMarkingRecursion = 64;
if (end - start >= kMinRangeForMarkingRecursion) {
if (MarkRecursively(host, start, end)) return;
}
for (Object** p = start; p < end; p++) { for (Object** p = start; p < end; p++) {
VisitPointer(host, p); VisitPointer(host, p);
} }
@ -2231,42 +2224,6 @@ class YoungGenerationMarkingVisitor final
} }
} }
void VisitCodeEntry(JSFunction* host, Address code_entry) final {
// Code is not in new space.
}
// Special cases for young generation.
int VisitJSFunction(Map* map, JSFunction* object) final {
if (!ShouldVisit(object)) return 0;
int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
JSFunction::BodyDescriptorWeak::IterateBody(object, size, this);
return size;
}
int VisitNativeContext(Map* map, Context* object) final {
if (!ShouldVisit(object)) return 0;
int size = Context::BodyDescriptor::SizeOf(map, object);
VisitMapPointer(object, object->map_slot());
Context::BodyDescriptor::IterateBody(object, size, this);
return size;
}
int VisitJSApiObject(Map* map, JSObject* object) final {
return VisitJSObject(map, object);
}
int VisitBytecodeArray(Map* map, BytecodeArray* object) final {
UNREACHABLE();
return 0;
}
int VisitSharedFunctionInfo(Map* map, SharedFunctionInfo* object) final {
UNREACHABLE();
return 0;
}
private: private:
inline MarkingState marking_state(HeapObject* object) { inline MarkingState marking_state(HeapObject* object) {
SLOW_DCHECK( SLOW_DCHECK(
@ -2283,24 +2240,6 @@ class YoungGenerationMarkingVisitor final
} }
} }
inline bool MarkRecursively(HeapObject* host, Object** start, Object** end) {
// TODO(mlippautz): Stack check on background tasks. We cannot do a reliable
// stack check on background tasks yet.
for (Object** p = start; p < end; p++) {
Object* target = *p;
if (heap_->InNewSpace(target)) {
HeapObject* target_object = HeapObject::cast(target);
if (ObjectMarking::WhiteToGrey<AccessMode::ATOMIC>(
target_object, marking_state(target_object))) {
const int size = Visit(target_object);
marking_state(target_object)
.IncrementLiveBytes<AccessMode::ATOMIC>(size);
}
}
}
return true;
}
Heap* heap_; Heap* heap_;
LocalWorkStealingBag marking_deque_; LocalWorkStealingBag marking_deque_;
}; };