Split incremental marking write barrier into fast and slow paths.
Force inlining of the fast path. Force inlining LiteralBuffer::AddChar and Scanner::AddLiteralChar. R=erik.corry@gmail.com Review URL: http://codereview.chromium.org/8431010 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9853 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
da49e4d83d
commit
858320725b
@ -37,7 +37,6 @@ namespace internal {
|
||||
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj,
|
||||
Object** slot,
|
||||
Object* value) {
|
||||
if (IsMarking() && value->IsHeapObject()) {
|
||||
MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value));
|
||||
if (Marking::IsWhite(value_bit)) {
|
||||
MarkBit obj_bit = Marking::MarkBitFrom(obj);
|
||||
@ -46,53 +45,34 @@ bool IncrementalMarking::BaseRecordWrite(HeapObject* obj,
|
||||
RestartIfNotMarking();
|
||||
}
|
||||
|
||||
// Object is either grey or white it will be scanned if survives.
|
||||
// Object is either grey or white. It will be scanned if survives.
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::RecordWrite(HeapObject* obj,
|
||||
Object** slot,
|
||||
Object* value) {
|
||||
if (BaseRecordWrite(obj, slot, value) && is_compacting_ && slot != NULL) {
|
||||
MarkBit obj_bit = Marking::MarkBitFrom(obj);
|
||||
if (Marking::IsBlack(obj_bit)) {
|
||||
// Object is not going to be rescanned we need to record the slot.
|
||||
heap_->mark_compact_collector()->RecordSlot(
|
||||
HeapObject::RawField(obj, 0), slot, value);
|
||||
if (IsMarking() && value->NonFailureIsHeapObject()) {
|
||||
RecordWriteSlow(obj, slot, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host,
|
||||
Object** slot,
|
||||
Code* value) {
|
||||
if (IsMarking()) RecordWriteOfCodeEntrySlow(host, slot, value);
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::RecordWriteIntoCode(HeapObject* obj,
|
||||
RelocInfo* rinfo,
|
||||
Object* value) {
|
||||
if (IsMarking() && value->IsHeapObject()) {
|
||||
MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value));
|
||||
if (Marking::IsWhite(value_bit)) {
|
||||
MarkBit obj_bit = Marking::MarkBitFrom(obj);
|
||||
if (Marking::IsBlack(obj_bit)) {
|
||||
BlackToGreyAndUnshift(obj, obj_bit);
|
||||
RestartIfNotMarking();
|
||||
}
|
||||
|
||||
// Object is either grey or white it will be scanned if survives.
|
||||
return;
|
||||
}
|
||||
|
||||
if (is_compacting_) {
|
||||
MarkBit obj_bit = Marking::MarkBitFrom(obj);
|
||||
if (Marking::IsBlack(obj_bit)) {
|
||||
// Object is not going to be rescanned we need to record the slot.
|
||||
heap_->mark_compact_collector()->RecordRelocSlot(rinfo,
|
||||
Code::cast(value));
|
||||
}
|
||||
}
|
||||
if (IsMarking() && value->NonFailureIsHeapObject()) {
|
||||
RecordWriteIntoCodeSlow(obj, rinfo, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,6 +60,20 @@ void IncrementalMarking::TearDown() {
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::RecordWriteSlow(HeapObject* obj,
|
||||
Object** slot,
|
||||
Object* value) {
|
||||
if (BaseRecordWrite(obj, slot, value) && is_compacting_ && slot != NULL) {
|
||||
MarkBit obj_bit = Marking::MarkBitFrom(obj);
|
||||
if (Marking::IsBlack(obj_bit)) {
|
||||
// Object is not going to be rescanned we need to record the slot.
|
||||
heap_->mark_compact_collector()->RecordSlot(
|
||||
HeapObject::RawField(obj, 0), slot, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::RecordWriteFromCode(HeapObject* obj,
|
||||
Object* value,
|
||||
Isolate* isolate) {
|
||||
@ -108,7 +122,7 @@ void IncrementalMarking::RecordCodeTargetPatch(Address pc, HeapObject* value) {
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host,
|
||||
void IncrementalMarking::RecordWriteOfCodeEntrySlow(JSFunction* host,
|
||||
Object** slot,
|
||||
Code* value) {
|
||||
if (BaseRecordWrite(host, slot, value) && is_compacting_) {
|
||||
@ -119,6 +133,30 @@ void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host,
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::RecordWriteIntoCodeSlow(HeapObject* obj,
|
||||
RelocInfo* rinfo,
|
||||
Object* value) {
|
||||
MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value));
|
||||
if (Marking::IsWhite(value_bit)) {
|
||||
MarkBit obj_bit = Marking::MarkBitFrom(obj);
|
||||
if (Marking::IsBlack(obj_bit)) {
|
||||
BlackToGreyAndUnshift(obj, obj_bit);
|
||||
RestartIfNotMarking();
|
||||
}
|
||||
// Object is either grey or white. It will be scanned if survives.
|
||||
return;
|
||||
}
|
||||
|
||||
if (is_compacting_) {
|
||||
MarkBit obj_bit = Marking::MarkBitFrom(obj);
|
||||
if (Marking::IsBlack(obj_bit)) {
|
||||
// Object is not going to be rescanned. We need to record the slot.
|
||||
heap_->mark_compact_collector()->RecordRelocSlot(rinfo,
|
||||
Code::cast(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class IncrementalMarkingMarkingVisitor : public ObjectVisitor {
|
||||
public:
|
||||
|
@ -59,7 +59,7 @@ class IncrementalMarking {
|
||||
|
||||
inline bool IsStopped() { return state() == STOPPED; }
|
||||
|
||||
inline bool IsMarking() { return state() >= MARKING; }
|
||||
INLINE(bool IsMarking()) { return state() >= MARKING; }
|
||||
|
||||
inline bool IsMarkingIncomplete() { return state() == MARKING; }
|
||||
|
||||
@ -120,16 +120,23 @@ class IncrementalMarking {
|
||||
Object** slot,
|
||||
Isolate* isolate);
|
||||
|
||||
inline bool BaseRecordWrite(HeapObject* obj, Object** slot, Object* value);
|
||||
INLINE(bool BaseRecordWrite(HeapObject* obj, Object** slot, Object* value));
|
||||
INLINE(void RecordWrite(HeapObject* obj, Object** slot, Object* value));
|
||||
INLINE(void RecordWriteIntoCode(HeapObject* obj,
|
||||
RelocInfo* rinfo,
|
||||
Object* value));
|
||||
INLINE(void RecordWriteOfCodeEntry(JSFunction* host,
|
||||
Object** slot,
|
||||
Code* value));
|
||||
|
||||
|
||||
inline void RecordWrite(HeapObject* obj, Object** slot, Object* value);
|
||||
inline void RecordWriteIntoCode(HeapObject* obj,
|
||||
void RecordWriteSlow(HeapObject* obj, Object** slot, Object* value);
|
||||
void RecordWriteIntoCodeSlow(HeapObject* obj,
|
||||
RelocInfo* rinfo,
|
||||
Object* value);
|
||||
void RecordWriteOfCodeEntrySlow(JSFunction* host, Object** slot, Code* value);
|
||||
void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
|
||||
void RecordCodeTargetPatch(Address pc, HeapObject* value);
|
||||
void RecordWriteOfCodeEntry(JSFunction* host, Object** slot, Code* value);
|
||||
|
||||
inline void RecordWrites(HeapObject* obj);
|
||||
|
||||
|
@ -169,7 +169,7 @@ class LiteralBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
inline void AddChar(uc16 character) {
|
||||
INLINE(void AddChar(uc16 character)) {
|
||||
if (position_ >= backing_store_.length()) ExpandBuffer();
|
||||
if (is_ascii_) {
|
||||
if (character < kMaxAsciiCharCodeU) {
|
||||
@ -389,7 +389,7 @@ class Scanner {
|
||||
next_.literal_chars = free_buffer;
|
||||
}
|
||||
|
||||
inline void AddLiteralChar(uc32 c) {
|
||||
INLINE(void AddLiteralChar(uc32 c)) {
|
||||
ASSERT_NOT_NULL(next_.literal_chars);
|
||||
next_.literal_chars->AddChar(c);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user