Remove custom weak handling of monomorphic IC.

This is not needed anymore since all ICs use weak cells to embed maps.

BUG=v8:3629
LOG=N

Review URL: https://codereview.chromium.org/817223003

Cr-Commit-Position: refs/heads/master@{#25928}
This commit is contained in:
ulan 2014-12-22 07:04:10 -08:00 committed by Commit bot
parent 00013a5692
commit 0344722a16
13 changed files with 22 additions and 228 deletions

View File

@ -75,7 +75,6 @@ bool CodeStub::FindCodeInCache(Code** code_out) {
void CodeStub::RecordCodeGeneration(Handle<Code> code) {
IC::RegisterWeakMapDependency(code);
std::ostringstream os;
os << *this;
PROFILE(isolate(),

View File

@ -581,8 +581,6 @@ DEFINE_BOOL(trace_fragmentation, false,
"report fragmentation for old pointer and data pages")
DEFINE_BOOL(collect_maps, true,
"garbage collect maps from which no objects can be reached")
DEFINE_BOOL(weak_embedded_maps_in_ic, true,
"make maps embedded in inline cache stubs")
DEFINE_BOOL(weak_embedded_maps_in_optimized_code, true,
"make maps embedded in optimized code weak")
DEFINE_BOOL(weak_embedded_objects_in_optimized_code, true,

View File

@ -385,7 +385,7 @@ void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() {
for (HeapObject* obj = code_iterator.Next(); obj != NULL;
obj = code_iterator.Next()) {
Code* code = Code::cast(obj);
if (!code->is_optimized_code() && !code->is_weak_stub()) continue;
if (!code->is_optimized_code()) continue;
if (WillBeDeoptimized(code)) continue;
code->VerifyEmbeddedObjectsDependency();
}
@ -2545,34 +2545,12 @@ void MarkCompactCollector::TrimEnumCache(Map* map,
}
void MarkCompactCollector::ClearDependentICList(Object* head) {
Object* current = head;
Object* undefined = heap()->undefined_value();
while (current != undefined) {
Code* code = Code::cast(current);
if (IsMarked(code)) {
DCHECK(code->is_weak_stub());
IC::InvalidateMaps(code);
}
current = code->next_code_link();
code->set_next_code_link(undefined);
}
}
void MarkCompactCollector::ClearDependentCode(DependentCode* entries) {
DisallowHeapAllocation no_allocation;
DependentCode::GroupStartIndexes starts(entries);
int number_of_entries = starts.number_of_entries();
if (number_of_entries == 0) return;
int g = DependentCode::kWeakICGroup;
if (starts.at(g) != starts.at(g + 1)) {
int i = starts.at(g);
DCHECK(i + 1 == starts.at(g + 1));
Object* head = entries->object_at(i);
ClearDependentICList(head);
}
g = DependentCode::kWeakCodeGroup;
int g = DependentCode::kWeakCodeGroup;
for (int i = starts.at(g); i < starts.at(g + 1); i++) {
// If the entry is compilation info then the map must be alive,
// and ClearDependentCode shouldn't be called.
@ -2594,34 +2572,17 @@ void MarkCompactCollector::ClearDependentCode(DependentCode* entries) {
int MarkCompactCollector::ClearNonLiveDependentCodeInGroup(
DependentCode* entries, int group, int start, int end, int new_start) {
int survived = 0;
if (group == DependentCode::kWeakICGroup) {
// Dependent weak IC stubs form a linked list and only the head is stored
// in the dependent code array.
if (start != end) {
DCHECK(start + 1 == end);
Object* old_head = entries->object_at(start);
MarkCompactWeakObjectRetainer retainer;
Object* head = VisitWeakList<Code>(heap(), old_head, &retainer);
entries->set_object_at(new_start, head);
Object** slot = entries->slot_at(new_start);
RecordSlot(slot, slot, head);
// We do not compact this group even if the head is undefined,
// more dependent ICs are likely to be added later.
survived = 1;
}
} else {
for (int i = start; i < end; i++) {
Object* obj = entries->object_at(i);
DCHECK(obj->IsCode() || IsMarked(obj));
if (IsMarked(obj) &&
(!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
if (new_start + survived != i) {
entries->set_object_at(new_start + survived, obj);
}
Object** slot = entries->slot_at(new_start + survived);
RecordSlot(slot, slot, obj);
survived++;
for (int i = start; i < end; i++) {
Object* obj = entries->object_at(i);
DCHECK(obj->IsCode() || IsMarked(obj));
if (IsMarked(obj) &&
(!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
if (new_start + survived != i) {
entries->set_object_at(new_start + survived, obj);
}
Object** slot = entries->slot_at(new_start + survived);
RecordSlot(slot, slot, obj);
survived++;
}
}
entries->set_number_of_entries(

View File

@ -808,7 +808,6 @@ class MarkCompactCollector {
void TrimEnumCache(Map* map, DescriptorArray* descriptors);
void ClearDependentCode(DependentCode* dependent_code);
void ClearDependentICList(Object* head);
void ClearNonLiveDependentCode(DependentCode* dependent_code);
int ClearNonLiveDependentCodeInGroup(DependentCode* dependent_code, int group,
int start, int end, int new_start);

View File

@ -263,10 +263,9 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
// to be serialized.
if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
!target->is_call_stub() &&
((heap->flush_monomorphic_ics() && !target->is_weak_stub()) ||
((heap->flush_monomorphic_ics() && !target->embeds_maps_weakly()) ||
heap->isolate()->serializer_enabled() ||
target->ic_age() != heap->global_ic_age() ||
target->is_invalidated_weak_stub())) {
target->ic_age() != heap->global_ic_age())) {
ICUtility::Clear(heap->isolate(), rinfo->pc(),
rinfo->host()->constant_pool());
target = Code::GetCodeFromTargetAddress(rinfo->target_address());
@ -508,10 +507,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
bool is_weak_object =
(array->get_weak_object_state() ==
ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
Code::IsWeakObjectInOptimizedCode(object)) ||
(array->get_weak_object_state() ==
ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
Code::IsWeakObjectInIC(object));
Code::IsWeakObjectInOptimizedCode(object));
if (!is_weak_object) {
StaticVisitor::MarkObject(heap, object);
}

View File

@ -380,7 +380,6 @@ Handle<Code> PropertyICCompiler::GetCode(Code::Kind kind, Code::StubType type,
Code::Flags flags =
Code::ComputeFlags(kind, state, extra_ic_state_, type, cache_holder());
Handle<Code> code = GetCodeWithFlags(flags, name);
IC::RegisterWeakMapDependency(code);
PROFILE(isolate(), CodeCreateEvent(log_kind(code), *code, *name));
return code;
}

View File

@ -125,9 +125,6 @@ void IC::SetTargetAtAddress(Address address, Code* target,
void IC::set_target(Code* code) {
#ifdef VERIFY_HEAP
code->VerifyEmbeddedObjectsDependency();
#endif
SetTargetAtAddress(address(), code, constant_pool());
target_set_ = true;
}

View File

@ -474,42 +474,6 @@ void IC::PostPatching(Address address, Code* target, Code* old_target) {
}
void IC::RegisterWeakMapDependency(Handle<Code> stub) {
if (FLAG_collect_maps && FLAG_weak_embedded_maps_in_ic &&
stub->CanBeWeakStub()) {
DCHECK(!stub->is_weak_stub());
MapHandleList maps;
stub->FindAllMaps(&maps);
if (maps.length() == 1 && stub->IsWeakObjectInIC(*maps.at(0))) {
Map::AddDependentIC(maps.at(0), stub);
stub->mark_as_weak_stub();
if (FLAG_enable_ool_constant_pool) {
stub->constant_pool()->set_weak_object_state(
ConstantPoolArray::WEAK_OBJECTS_IN_IC);
}
}
}
}
void IC::InvalidateMaps(Code* stub) {
DCHECK(stub->is_weak_stub());
stub->mark_as_invalidated_weak_stub();
Isolate* isolate = stub->GetIsolate();
Heap* heap = isolate->heap();
Object* undefined = heap->undefined_value();
int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
for (RelocIterator it(stub, mode_mask); !it.done(); it.next()) {
RelocInfo::Mode mode = it.rinfo()->rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT &&
it.rinfo()->target_object()->IsMap()) {
it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER);
}
}
CpuFeatures::FlushICache(stub->instruction_start(), stub->instruction_size());
}
void IC::Clear(Isolate* isolate, Address address,
ConstantPoolArray* constant_pool) {
Code* target = GetTargetAtAddress(address, constant_pool);

View File

@ -76,14 +76,6 @@ class IC {
state_ = PROTOTYPE_FAILURE;
}
// If the stub contains weak maps then this function adds the stub to
// the dependent code array of each weak map.
static void RegisterWeakMapDependency(Handle<Code> stub);
// This function is called when a weak map in the stub is dying,
// invalidates the stub by setting maps in it to undefined.
static void InvalidateMaps(Code* stub);
// Clear the inline cache to initial state.
static void Clear(Isolate* isolate, Address address,
ConstantPoolArray* constant_pool);

View File

@ -684,9 +684,8 @@ void Code::VerifyEmbeddedObjectsDependency() {
if (IsWeakObject(obj)) {
if (obj->IsMap()) {
Map* map = Map::cast(obj);
DependentCode::DependencyGroup group = is_optimized_code() ?
DependentCode::kWeakCodeGroup : DependentCode::kWeakICGroup;
CHECK(map->dependent_code()->Contains(group, this));
CHECK(map->dependent_code()->Contains(DependentCode::kWeakCodeGroup,
this));
} else if (obj->IsJSObject()) {
Object* raw_table = GetIsolate()->heap()->weak_object_to_code_table();
WeakHashTable* table = WeakHashTable::cast(raw_table);

View File

@ -5068,34 +5068,6 @@ void Code::set_marked_for_deoptimization(bool flag) {
}
bool Code::is_weak_stub() {
return CanBeWeakStub() && WeakStubField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::mark_as_weak_stub() {
DCHECK(CanBeWeakStub());
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
int updated = WeakStubField::update(previous, true);
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::is_invalidated_weak_stub() {
return is_weak_stub() && InvalidatedWeakStubField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
void Code::mark_as_invalidated_weak_stub() {
DCHECK(is_inline_cache_stub());
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
int updated = InvalidatedWeakStubField::update(previous, true);
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::is_inline_cache_stub() {
Kind kind = this->kind();
switch (kind) {
@ -5242,13 +5214,6 @@ class Code::FindAndReplacePattern {
};
bool Code::IsWeakObjectInIC(Object* object) {
return object->IsMap() && Map::cast(object)->CanTransition() &&
FLAG_collect_maps &&
FLAG_weak_embedded_maps_in_ic;
}
Object* Map::prototype() const {
return READ_FIELD(this, kPrototypeOffset);
}

View File

@ -11924,23 +11924,6 @@ void Map::AddDependentCode(Handle<Map> map,
}
// static
void Map::AddDependentIC(Handle<Map> map,
Handle<Code> stub) {
DCHECK(stub->next_code_link()->IsUndefined());
int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup);
if (n == 0) {
// Slow path: insert the head of the list with possible heap allocation.
Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub);
} else {
// Fast path: link the stub to the existing head of the list without any
// heap allocation.
DCHECK(n == 1);
map->dependent_code()->AddToDependentICList(stub);
}
}
DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) {
Recompute(entries);
}
@ -12070,22 +12053,10 @@ void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
}
static bool CodeListContains(Object* head, Code* code) {
while (!head->IsUndefined()) {
if (head == code) return true;
head = Code::cast(head)->next_code_link();
}
return false;
}
bool DependentCode::Contains(DependencyGroup group, Code* code) {
GroupStartIndexes starts(this);
int start = starts.at(group);
int end = starts.at(group + 1);
if (group == kWeakICGroup) {
return CodeListContains(object_at(start), code);
}
for (int i = start; i < end; i++) {
if (object_at(i) == code) return true;
}
@ -12142,24 +12113,6 @@ void DependentCode::DeoptimizeDependentCodeGroup(
}
void DependentCode::AddToDependentICList(Handle<Code> stub) {
DisallowHeapAllocation no_heap_allocation;
GroupStartIndexes starts(this);
int i = starts.at(kWeakICGroup);
Object* head = object_at(i);
// Try to insert the stub after the head of the list to minimize number of
// writes to the DependentCode array, since a write to the array can make it
// strong if it was alread marked by incremental marker.
if (head->IsCode()) {
stub->set_next_code_link(Code::cast(head)->next_code_link());
Code::cast(head)->set_next_code_link(*stub);
} else {
stub->set_next_code_link(head);
set_object_at(i, *stub);
}
}
void DependentCode::SetMarkedForDeoptimization(Code* code,
DependencyGroup group) {
code->set_marked_for_deoptimization(true);
@ -12178,8 +12131,6 @@ void DependentCode::SetMarkedForDeoptimization(Code* code,
const char* DependentCode::DependencyGroupName(DependencyGroup group) {
switch (group) {
case kWeakICGroup:
return "weak-ic";
case kWeakCodeGroup:
return "weak-code";
case kTransitionGroup:

View File

@ -2690,11 +2690,7 @@ class WeakFixedArray : public FixedArray {
//
class ConstantPoolArray: public HeapObject {
public:
enum WeakObjectState {
NO_WEAK_OBJECTS,
WEAK_OBJECTS_IN_OPTIMIZED_CODE,
WEAK_OBJECTS_IN_IC
};
enum WeakObjectState { NO_WEAK_OBJECTS, WEAK_OBJECTS_IN_OPTIMIZED_CODE };
enum Type {
INT64 = 0,
@ -5138,12 +5134,7 @@ class Code: public HeapObject {
inline bool is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
inline bool is_keyed_stub();
inline bool is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
inline bool is_weak_stub();
inline void mark_as_weak_stub();
inline bool is_invalidated_weak_stub();
inline void mark_as_invalidated_weak_stub();
inline bool CanBeWeakStub() {
inline bool embeds_maps_weakly() {
Kind k = kind();
return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
@ -5424,18 +5415,14 @@ class Code: public HeapObject {
void VerifyEmbeddedObjectsInFullCode();
#endif // DEBUG
inline bool CanContainWeakObjects() {
return is_optimized_code() || is_weak_stub();
}
inline bool CanContainWeakObjects() { return is_optimized_code(); }
inline bool IsWeakObject(Object* object) {
return (is_optimized_code() && !is_turbofanned() &&
IsWeakObjectInOptimizedCode(object)) ||
(is_weak_stub() && IsWeakObjectInIC(object));
IsWeakObjectInOptimizedCode(object));
}
static inline bool IsWeakObjectInOptimizedCode(Object* object);
static inline bool IsWeakObjectInIC(Object* object);
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
@ -5497,9 +5484,7 @@ class Code: public HeapObject {
static const int kHasFunctionCacheBit =
kStackSlotsFirstBit + kStackSlotsBitCount;
static const int kMarkedForDeoptimizationBit = kHasFunctionCacheBit + 1;
static const int kWeakStubBit = kMarkedForDeoptimizationBit + 1;
static const int kInvalidatedWeakStubBit = kWeakStubBit + 1;
static const int kIsTurbofannedBit = kInvalidatedWeakStubBit + 1;
static const int kIsTurbofannedBit = kMarkedForDeoptimizationBit + 1;
STATIC_ASSERT(kStackSlotsFirstBit + kStackSlotsBitCount <= 32);
STATIC_ASSERT(kIsTurbofannedBit + 1 <= 32);
@ -5510,9 +5495,6 @@ class Code: public HeapObject {
}; // NOLINT
class MarkedForDeoptimizationField
: public BitField<bool, kMarkedForDeoptimizationBit, 1> {}; // NOLINT
class WeakStubField : public BitField<bool, kWeakStubBit, 1> {}; // NOLINT
class InvalidatedWeakStubField
: public BitField<bool, kInvalidatedWeakStubBit, 1> {}; // NOLINT
class IsTurbofannedField : public BitField<bool, kIsTurbofannedBit, 1> {
}; // NOLINT
@ -5594,11 +5576,6 @@ class CompilationInfo;
class DependentCode: public FixedArray {
public:
enum DependencyGroup {
// Group of IC stubs that weakly embed this map and depend on being
// invalidated when the map is garbage collected. Dependent IC stubs form
// a linked list. This group stores only the head of the list. This means
// that the number_of_entries(kWeakICGroup) is 0 or 1.
kWeakICGroup,
// Group of code that weakly embed this map and depend on being
// deoptimized when the map is garbage collected.
kWeakCodeGroup,
@ -5659,7 +5636,6 @@ class DependentCode: public FixedArray {
bool MarkCodeForDeoptimization(Isolate* isolate,
DependentCode::DependencyGroup group);
void AddToDependentICList(Handle<Code> stub);
// The following low-level accessors should only be used by this class
// and the mark compact collector.
@ -6278,8 +6254,6 @@ class Map: public HeapObject {
static void AddDependentCode(Handle<Map> map,
DependentCode::DependencyGroup group,
Handle<Code> code);
static void AddDependentIC(Handle<Map> map,
Handle<Code> stub);
bool IsMapInArrayPrototypeChain();