[heap] Less aggressive inlining of IncrementalMarking code.

This moves some methods of IncrementalMarking from the inlined header
into the compilation unit. The methods in question are either not hot
or are being called through a non-inline function already.

R=hpayer@chromium.org

Review URL: https://codereview.chromium.org/1380523002

Cr-Commit-Position: refs/heads/master@{#31017}
This commit is contained in:
mstarzinger 2015-09-30 01:23:55 -07:00 committed by Commit bot
parent 759591dae7
commit 9e0e7273c6
4 changed files with 91 additions and 87 deletions

View File

@ -11,6 +11,7 @@
#include "src/counters.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/mark-compact.h"
#include "src/heap/spaces-inl.h"
#include "src/heap/store-buffer.h"
#include "src/heap/store-buffer-inl.h"

View File

@ -6,42 +6,11 @@
#define V8_HEAP_INCREMENTAL_MARKING_INL_H_
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
namespace v8 {
namespace internal {
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object** slot,
Object* value) {
HeapObject* value_heap_obj = HeapObject::cast(value);
MarkBit value_bit = Marking::MarkBitFrom(value_heap_obj);
if (Marking::IsWhite(value_bit)) {
MarkBit obj_bit = Marking::MarkBitFrom(obj);
if (Marking::IsBlack(obj_bit)) {
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
if (chunk->IsLeftOfProgressBar(slot)) {
WhiteToGreyAndPush(value_heap_obj, value_bit);
RestartIfNotMarking();
} else {
return false;
}
} else {
BlackToGreyAndUnshift(obj, obj_bit);
RestartIfNotMarking();
return false;
}
} else {
return false;
}
}
if (!is_compacting_) return false;
MarkBit obj_bit = Marking::MarkBitFrom(obj);
return Marking::IsBlack(obj_bit);
}
void IncrementalMarking::RecordWrite(HeapObject* obj, Object** slot,
Object* value) {
if (IsMarking() && value->IsHeapObject()) {
@ -52,7 +21,9 @@ void IncrementalMarking::RecordWrite(HeapObject* obj, Object** slot,
void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host, Object** slot,
Code* value) {
if (IsMarking()) RecordWriteOfCodeEntrySlow(host, slot, value);
if (IsMarking()) {
RecordWriteOfCodeEntrySlow(host, slot, value);
}
}
@ -64,57 +35,7 @@ void IncrementalMarking::RecordWriteIntoCode(HeapObject* obj, RelocInfo* rinfo,
}
void IncrementalMarking::RecordWrites(HeapObject* obj) {
if (IsMarking()) {
MarkBit obj_bit = Marking::MarkBitFrom(obj);
if (Marking::IsBlack(obj_bit)) {
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
chunk->set_progress_bar(0);
}
BlackToGreyAndUnshift(obj, obj_bit);
RestartIfNotMarking();
}
}
}
void IncrementalMarking::BlackToGreyAndUnshift(HeapObject* obj,
MarkBit mark_bit) {
DCHECK(Marking::MarkBitFrom(obj) == mark_bit);
DCHECK(obj->Size() >= 2 * kPointerSize);
DCHECK(IsMarking());
Marking::BlackToGrey(mark_bit);
int obj_size = obj->Size();
MemoryChunk::IncrementLiveBytesFromGC(obj, -obj_size);
bytes_scanned_ -= obj_size;
int64_t old_bytes_rescanned = bytes_rescanned_;
bytes_rescanned_ = old_bytes_rescanned + obj_size;
if ((bytes_rescanned_ >> 20) != (old_bytes_rescanned >> 20)) {
if (bytes_rescanned_ > 2 * heap_->PromotedSpaceSizeOfObjects()) {
// If we have queued twice the heap size for rescanning then we are
// going around in circles, scanning the same objects again and again
// as the program mutates the heap faster than we can incrementally
// trace it. In this case we switch to non-incremental marking in
// order to finish off this marking phase.
if (FLAG_trace_incremental_marking) {
PrintIsolate(
heap()->isolate(),
"Hurrying incremental marking because of lack of progress\n");
}
marking_speed_ = kMaxMarkingSpeed;
}
}
heap_->mark_compact_collector()->marking_deque()->Unshift(obj);
}
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) {
Marking::WhiteToGrey(mark_bit);
heap_->mark_compact_collector()->marking_deque()->Push(obj);
}
}
} // namespace v8::internal
} // namespace internal
} // namespace v8
#endif // V8_HEAP_INCREMENTAL_MARKING_INL_H_

View File

@ -47,6 +47,36 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
request_type_(COMPLETE_MARKING) {}
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object** slot,
Object* value) {
HeapObject* value_heap_obj = HeapObject::cast(value);
MarkBit value_bit = Marking::MarkBitFrom(value_heap_obj);
if (Marking::IsWhite(value_bit)) {
MarkBit obj_bit = Marking::MarkBitFrom(obj);
if (Marking::IsBlack(obj_bit)) {
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
if (chunk->IsLeftOfProgressBar(slot)) {
WhiteToGreyAndPush(value_heap_obj, value_bit);
RestartIfNotMarking();
} else {
return false;
}
} else {
BlackToGreyAndUnshift(obj, obj_bit);
RestartIfNotMarking();
return false;
}
} else {
return false;
}
}
if (!is_compacting_) return false;
MarkBit obj_bit = Marking::MarkBitFrom(obj);
return Marking::IsBlack(obj_bit);
}
void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
Object* value) {
if (BaseRecordWrite(obj, slot, value) && slot != NULL) {
@ -134,6 +164,58 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj,
}
void IncrementalMarking::RecordWrites(HeapObject* obj) {
if (IsMarking()) {
MarkBit obj_bit = Marking::MarkBitFrom(obj);
if (Marking::IsBlack(obj_bit)) {
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
chunk->set_progress_bar(0);
}
BlackToGreyAndUnshift(obj, obj_bit);
RestartIfNotMarking();
}
}
}
void IncrementalMarking::BlackToGreyAndUnshift(HeapObject* obj,
MarkBit mark_bit) {
DCHECK(Marking::MarkBitFrom(obj) == mark_bit);
DCHECK(obj->Size() >= 2 * kPointerSize);
DCHECK(IsMarking());
Marking::BlackToGrey(mark_bit);
int obj_size = obj->Size();
MemoryChunk::IncrementLiveBytesFromGC(obj, -obj_size);
bytes_scanned_ -= obj_size;
int64_t old_bytes_rescanned = bytes_rescanned_;
bytes_rescanned_ = old_bytes_rescanned + obj_size;
if ((bytes_rescanned_ >> 20) != (old_bytes_rescanned >> 20)) {
if (bytes_rescanned_ > 2 * heap_->PromotedSpaceSizeOfObjects()) {
// If we have queued twice the heap size for rescanning then we are
// going around in circles, scanning the same objects again and again
// as the program mutates the heap faster than we can incrementally
// trace it. In this case we switch to non-incremental marking in
// order to finish off this marking phase.
if (FLAG_trace_incremental_marking) {
PrintIsolate(
heap()->isolate(),
"Hurrying incremental marking because of lack of progress\n");
}
marking_speed_ = kMaxMarkingSpeed;
}
}
heap_->mark_compact_collector()->marking_deque()->Unshift(obj);
}
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) {
Marking::WhiteToGrey(mark_bit);
heap_->mark_compact_collector()->marking_deque()->Push(obj);
}
static void MarkObjectGreyDoNotEnqueue(Object* obj) {
if (obj->IsHeapObject()) {
HeapObject* heap_obj = HeapObject::cast(obj);

View File

@ -174,11 +174,11 @@ class IncrementalMarking {
void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
void RecordCodeTargetPatch(Address pc, HeapObject* value);
inline void RecordWrites(HeapObject* obj);
void RecordWrites(HeapObject* obj);
inline void BlackToGreyAndUnshift(HeapObject* obj, MarkBit mark_bit);
void BlackToGreyAndUnshift(HeapObject* obj, MarkBit mark_bit);
inline void WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit);
void WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit);
inline void SetOldSpacePageFlags(MemoryChunk* chunk) {
SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());