Use weak cells in dependent code.
BUG= Review URL: https://codereview.chromium.org/871253005 Cr-Commit-Position: refs/heads/master@{#26614}
This commit is contained in:
parent
9159c419e8
commit
b79b985988
@ -6253,7 +6253,7 @@ class Internals {
|
||||
static const int kNullValueRootIndex = 7;
|
||||
static const int kTrueValueRootIndex = 8;
|
||||
static const int kFalseValueRootIndex = 9;
|
||||
static const int kEmptyStringRootIndex = 154;
|
||||
static const int kEmptyStringRootIndex = 155;
|
||||
|
||||
// The external allocation limit should be below 256 MB on all architectures
|
||||
// to avoid that resource-constrained embedders run low on memory.
|
||||
|
@ -206,6 +206,16 @@ CompilationInfo::~CompilationInfo() {
|
||||
|
||||
|
||||
void CompilationInfo::CommitDependencies(Handle<Code> code) {
|
||||
bool has_dependencies = false;
|
||||
for (int i = 0; i < DependentCode::kGroupCount; i++) {
|
||||
has_dependencies |=
|
||||
dependencies_[i] != NULL && dependencies_[i]->length() > 0;
|
||||
}
|
||||
// Avoid creating a weak cell for code with no dependencies.
|
||||
if (!has_dependencies) return;
|
||||
|
||||
AllowDeferredHandleDereference get_object_wrapper;
|
||||
Handle<WeakCell> cell = Code::WeakCellFor(code);
|
||||
for (int i = 0; i < DependentCode::kGroupCount; i++) {
|
||||
ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
|
||||
if (group_objects == NULL) continue;
|
||||
@ -215,7 +225,7 @@ void CompilationInfo::CommitDependencies(Handle<Code> code) {
|
||||
static_cast<DependentCode::DependencyGroup>(i);
|
||||
DependentCode* dependent_code =
|
||||
DependentCode::ForObject(group_objects->at(j), group);
|
||||
dependent_code->UpdateToFinishedCode(group, this, *code);
|
||||
dependent_code->UpdateToFinishedCode(group, *object_wrapper(), *cell);
|
||||
}
|
||||
dependencies_[i] = NULL; // Zone-allocated, no need to delete.
|
||||
}
|
||||
@ -223,6 +233,7 @@ void CompilationInfo::CommitDependencies(Handle<Code> code) {
|
||||
|
||||
|
||||
void CompilationInfo::RollbackDependencies() {
|
||||
AllowDeferredHandleDereference get_object_wrapper;
|
||||
// Unregister from all dependent maps if not yet committed.
|
||||
for (int i = 0; i < DependentCode::kGroupCount; i++) {
|
||||
ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
|
||||
@ -232,7 +243,7 @@ void CompilationInfo::RollbackDependencies() {
|
||||
static_cast<DependentCode::DependencyGroup>(i);
|
||||
DependentCode* dependent_code =
|
||||
DependentCode::ForObject(group_objects->at(j), group);
|
||||
dependent_code->RemoveCompilationInfo(group, this);
|
||||
dependent_code->RemoveCompilationInfo(group, *object_wrapper());
|
||||
}
|
||||
dependencies_[i] = NULL; // Zone-allocated, no need to delete.
|
||||
}
|
||||
|
@ -135,9 +135,6 @@ Heap::Heap()
|
||||
full_codegen_bytes_generated_(0),
|
||||
crankshaft_codegen_bytes_generated_(0),
|
||||
gcs_since_last_deopt_(0),
|
||||
#ifdef VERIFY_HEAP
|
||||
no_weak_object_verification_scope_depth_(0),
|
||||
#endif
|
||||
allocation_sites_scratchpad_length_(0),
|
||||
promotion_queue_(this),
|
||||
configured_(false),
|
||||
@ -3169,6 +3166,10 @@ void Heap::CreateInitialObjects() {
|
||||
|
||||
set_detached_contexts(empty_fixed_array());
|
||||
|
||||
set_weak_object_to_code_table(
|
||||
*WeakHashTable::New(isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY,
|
||||
TENURED));
|
||||
|
||||
Handle<SeededNumberDictionary> slow_element_dictionary =
|
||||
SeededNumberDictionary::New(isolate(), 0, TENURED);
|
||||
slow_element_dictionary->set_requires_slow_elements();
|
||||
@ -5600,7 +5601,6 @@ bool Heap::CreateHeapObjects() {
|
||||
set_array_buffers_list(undefined_value());
|
||||
set_new_array_buffer_views_list(undefined_value());
|
||||
set_allocation_sites_list(undefined_value());
|
||||
weak_object_to_code_table_ = undefined_value();
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -5775,41 +5775,25 @@ void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) {
|
||||
|
||||
|
||||
// TODO(ishell): Find a better place for this.
|
||||
void Heap::AddWeakObjectToCodeDependency(Handle<Object> obj,
|
||||
void Heap::AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
|
||||
Handle<DependentCode> dep) {
|
||||
DCHECK(!InNewSpace(*obj));
|
||||
DCHECK(!InNewSpace(*dep));
|
||||
// This handle scope keeps the table handle local to this function, which
|
||||
// allows us to safely skip write barriers in table update operations.
|
||||
HandleScope scope(isolate());
|
||||
Handle<WeakHashTable> table(WeakHashTable::cast(weak_object_to_code_table_),
|
||||
isolate());
|
||||
Handle<WeakHashTable> table(weak_object_to_code_table(), isolate());
|
||||
table = WeakHashTable::Put(table, obj, dep);
|
||||
|
||||
if (ShouldZapGarbage() && weak_object_to_code_table_ != *table) {
|
||||
WeakHashTable::cast(weak_object_to_code_table_)->Zap(the_hole_value());
|
||||
}
|
||||
set_weak_object_to_code_table(*table);
|
||||
DCHECK_EQ(*dep, table->Lookup(obj));
|
||||
if (*table != weak_object_to_code_table())
|
||||
set_weak_object_to_code_table(*table);
|
||||
DCHECK_EQ(*dep, LookupWeakObjectToCodeDependency(obj));
|
||||
}
|
||||
|
||||
|
||||
DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<Object> obj) {
|
||||
Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj);
|
||||
DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<HeapObject> obj) {
|
||||
Object* dep = weak_object_to_code_table()->Lookup(obj);
|
||||
if (dep->IsDependentCode()) return DependentCode::cast(dep);
|
||||
return DependentCode::cast(empty_fixed_array());
|
||||
}
|
||||
|
||||
|
||||
void Heap::EnsureWeakObjectToCodeTable() {
|
||||
if (!weak_object_to_code_table()->IsHashTable()) {
|
||||
set_weak_object_to_code_table(
|
||||
*WeakHashTable::New(isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY,
|
||||
TENURED));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Heap::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
|
||||
v8::internal::V8::FatalProcessOutOfMemory(location, take_snapshot);
|
||||
}
|
||||
|
@ -182,7 +182,8 @@ namespace internal {
|
||||
V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
|
||||
V(FixedArray, microtask_queue, MicrotaskQueue) \
|
||||
V(FixedArray, keyed_load_dummy_vector, KeyedLoadDummyVector) \
|
||||
V(FixedArray, detached_contexts, DetachedContexts)
|
||||
V(FixedArray, detached_contexts, DetachedContexts) \
|
||||
V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)
|
||||
|
||||
// Entries in this list are limited to Smis and are not visited during GC.
|
||||
#define SMI_ROOT_LIST(V) \
|
||||
@ -873,8 +874,6 @@ class Heap {
|
||||
// Used in CreateAllocationSiteStub and the (de)serializer.
|
||||
Object** allocation_sites_list_address() { return &allocation_sites_list_; }
|
||||
|
||||
Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
|
||||
|
||||
void set_encountered_weak_collections(Object* weak_collection) {
|
||||
encountered_weak_collections_ = weak_collection;
|
||||
}
|
||||
@ -979,11 +978,6 @@ class Heap {
|
||||
#ifdef VERIFY_HEAP
|
||||
// Verify the heap is in its normal state before or after a GC.
|
||||
void Verify();
|
||||
|
||||
|
||||
bool weak_embedded_objects_verification_enabled() {
|
||||
return no_weak_object_verification_scope_depth_ == 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -1439,16 +1433,10 @@ class Heap {
|
||||
Heap* heap_;
|
||||
};
|
||||
|
||||
void AddWeakObjectToCodeDependency(Handle<Object> obj,
|
||||
void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
|
||||
Handle<DependentCode> dep);
|
||||
|
||||
DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj);
|
||||
|
||||
void InitializeWeakObjectToCodeTable() {
|
||||
set_weak_object_to_code_table(undefined_value());
|
||||
}
|
||||
|
||||
void EnsureWeakObjectToCodeTable();
|
||||
DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
|
||||
|
||||
static void FatalProcessOutOfMemory(const char* location,
|
||||
bool take_snapshot = false);
|
||||
@ -1645,11 +1633,6 @@ class Heap {
|
||||
// array buffer.
|
||||
Object* new_array_buffer_views_list_;
|
||||
|
||||
// WeakHashTable that maps objects embedded in optimized code to dependent
|
||||
// code list. It is initialized lazily and contains the undefined_value at
|
||||
// start.
|
||||
Object* weak_object_to_code_table_;
|
||||
|
||||
// List of encountered weak collections (JSWeakMap and JSWeakSet) during
|
||||
// marking. It is initialized during marking, destroyed after marking and
|
||||
// contains Smi(0) while marking is not active.
|
||||
@ -2067,15 +2050,6 @@ class Heap {
|
||||
|
||||
void ClearObjectStats(bool clear_last_time_stats = false);
|
||||
|
||||
void set_weak_object_to_code_table(Object* value) {
|
||||
DCHECK(!InNewSpace(value));
|
||||
weak_object_to_code_table_ = value;
|
||||
}
|
||||
|
||||
Object** weak_object_to_code_table_address() {
|
||||
return &weak_object_to_code_table_;
|
||||
}
|
||||
|
||||
inline void UpdateAllocationsHash(HeapObject* object);
|
||||
inline void UpdateAllocationsHash(uint32_t value);
|
||||
inline void PrintAlloctionsHash();
|
||||
@ -2127,10 +2101,6 @@ class Heap {
|
||||
// deoptimization triggered by garbage collection.
|
||||
int gcs_since_last_deopt_;
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
int no_weak_object_verification_scope_depth_;
|
||||
#endif
|
||||
|
||||
static const int kAllocationSiteScratchpadSize = 256;
|
||||
int allocation_sites_scratchpad_length_;
|
||||
|
||||
|
@ -552,8 +552,6 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) {
|
||||
IncrementalMarkingRootMarkingVisitor visitor(this);
|
||||
heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
|
||||
|
||||
heap_->mark_compact_collector()->MarkWeakObjectToCodeTable();
|
||||
|
||||
// Ready to start incremental marking.
|
||||
if (FLAG_trace_incremental_marking) {
|
||||
PrintF("[IncrementalMarking] Running\n");
|
||||
|
@ -301,10 +301,13 @@ void MarkCompactCollector::CollectGarbage() {
|
||||
MarkLiveObjects();
|
||||
DCHECK(heap_->incremental_marking()->IsStopped());
|
||||
|
||||
if (FLAG_collect_maps) ClearNonLiveReferences();
|
||||
|
||||
// ClearNonLiveReferences can deoptimize code in dependent code arrays.
|
||||
// Process weak cells before so that weak cells in dependent code
|
||||
// arrays are cleared or contain only live code objects.
|
||||
ProcessAndClearWeakCells();
|
||||
|
||||
if (FLAG_collect_maps) ClearNonLiveReferences();
|
||||
|
||||
ClearWeakCollections();
|
||||
|
||||
heap_->set_encountered_weak_cells(Smi::FromInt(0));
|
||||
@ -318,9 +321,7 @@ void MarkCompactCollector::CollectGarbage() {
|
||||
SweepSpaces();
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (heap()->weak_embedded_objects_verification_enabled()) {
|
||||
VerifyWeakEmbeddedObjectsInCode();
|
||||
}
|
||||
VerifyWeakEmbeddedObjectsInCode();
|
||||
if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
|
||||
VerifyOmittedMapChecks();
|
||||
}
|
||||
@ -1958,8 +1959,6 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
||||
// Handle the string table specially.
|
||||
MarkStringTable(visitor);
|
||||
|
||||
MarkWeakObjectToCodeTable();
|
||||
|
||||
// There may be overflowed objects in the heap. Visit them now.
|
||||
while (marking_deque_.overflowed()) {
|
||||
RefillMarkingDeque();
|
||||
@ -2000,16 +1999,6 @@ void MarkCompactCollector::MarkImplicitRefGroups() {
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::MarkWeakObjectToCodeTable() {
|
||||
HeapObject* weak_object_to_code_table =
|
||||
HeapObject::cast(heap()->weak_object_to_code_table());
|
||||
if (!IsMarked(weak_object_to_code_table)) {
|
||||
MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table);
|
||||
SetMark(weak_object_to_code_table, mark);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Mark all objects reachable from the objects on the marking stack.
|
||||
// Before: the marking stack contains zero or more heap object pointers.
|
||||
// After: the marking stack is empty, and all objects reachable from the
|
||||
@ -2328,67 +2317,29 @@ void MarkCompactCollector::ClearNonLiveReferences() {
|
||||
ClearNonLivePrototypeTransitions(map);
|
||||
ClearNonLiveMapTransitions(map, map_mark);
|
||||
|
||||
if (map_mark.Get()) {
|
||||
ClearNonLiveDependentCode(map->dependent_code());
|
||||
} else {
|
||||
ClearDependentCode(map->dependent_code());
|
||||
if (!map_mark.Get()) {
|
||||
have_code_to_deoptimize_ |=
|
||||
map->dependent_code()->MarkCodeForDeoptimization(
|
||||
isolate(), DependentCode::kWeakCodeGroup);
|
||||
map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate over property cell space, removing dependent code that is not
|
||||
// otherwise kept alive by strong references.
|
||||
HeapObjectIterator cell_iterator(heap_->property_cell_space());
|
||||
for (HeapObject* cell = cell_iterator.Next(); cell != NULL;
|
||||
cell = cell_iterator.Next()) {
|
||||
if (IsMarked(cell)) {
|
||||
ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code());
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate over allocation sites, removing dependent code that is not
|
||||
// otherwise kept alive by strong references.
|
||||
Object* undefined = heap()->undefined_value();
|
||||
for (Object* site = heap()->allocation_sites_list(); site != undefined;
|
||||
site = AllocationSite::cast(site)->weak_next()) {
|
||||
if (IsMarked(site)) {
|
||||
ClearNonLiveDependentCode(AllocationSite::cast(site)->dependent_code());
|
||||
}
|
||||
}
|
||||
|
||||
if (heap_->weak_object_to_code_table()->IsHashTable()) {
|
||||
WeakHashTable* table =
|
||||
WeakHashTable::cast(heap_->weak_object_to_code_table());
|
||||
uint32_t capacity = table->Capacity();
|
||||
for (uint32_t i = 0; i < capacity; i++) {
|
||||
uint32_t key_index = table->EntryToIndex(i);
|
||||
Object* key = table->get(key_index);
|
||||
if (!table->IsKey(key)) continue;
|
||||
uint32_t value_index = table->EntryToValueIndex(i);
|
||||
Object* value = table->get(value_index);
|
||||
if (key->IsCell() && !IsMarked(key)) {
|
||||
Cell* cell = Cell::cast(key);
|
||||
Object* object = cell->value();
|
||||
if (IsMarked(object)) {
|
||||
MarkBit mark = Marking::MarkBitFrom(cell);
|
||||
SetMark(cell, mark);
|
||||
Object** value_slot = HeapObject::RawField(cell, Cell::kValueOffset);
|
||||
RecordSlot(value_slot, value_slot, *value_slot);
|
||||
}
|
||||
}
|
||||
if (IsMarked(key)) {
|
||||
if (!IsMarked(value)) {
|
||||
HeapObject* obj = HeapObject::cast(value);
|
||||
MarkBit mark = Marking::MarkBitFrom(obj);
|
||||
SetMark(obj, mark);
|
||||
}
|
||||
ClearNonLiveDependentCode(DependentCode::cast(value));
|
||||
} else {
|
||||
ClearDependentCode(DependentCode::cast(value));
|
||||
table->set(key_index, heap_->the_hole_value());
|
||||
table->set(value_index, heap_->the_hole_value());
|
||||
table->ElementRemoved();
|
||||
}
|
||||
WeakHashTable* table = heap_->weak_object_to_code_table();
|
||||
uint32_t capacity = table->Capacity();
|
||||
for (uint32_t i = 0; i < capacity; i++) {
|
||||
uint32_t key_index = table->EntryToIndex(i);
|
||||
Object* key = table->get(key_index);
|
||||
if (!table->IsKey(key)) continue;
|
||||
uint32_t value_index = table->EntryToValueIndex(i);
|
||||
Object* value = table->get(value_index);
|
||||
if (WeakCell::cast(key)->cleared()) {
|
||||
have_code_to_deoptimize_ |=
|
||||
DependentCode::cast(value)->MarkCodeForDeoptimization(
|
||||
isolate(), DependentCode::kWeakCodeGroup);
|
||||
table->set(key_index, heap_->the_hole_value());
|
||||
table->set(value_index, heap_->the_hole_value());
|
||||
table->ElementRemoved();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2560,70 +2511,6 @@ void MarkCompactCollector::TrimEnumCache(Map* map,
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ClearDependentCode(DependentCode* entries) {
|
||||
DisallowHeapAllocation no_allocation;
|
||||
DependentCode::GroupStartIndexes starts(entries);
|
||||
int number_of_entries = starts.number_of_entries();
|
||||
if (number_of_entries == 0) return;
|
||||
int g = DependentCode::kWeakCodeGroup;
|
||||
for (int i = starts.at(g); i < starts.at(g + 1); i++) {
|
||||
// If the entry is compilation info then the map must be alive,
|
||||
// and ClearDependentCode shouldn't be called.
|
||||
DCHECK(entries->is_code_at(i));
|
||||
Code* code = entries->code_at(i);
|
||||
if (IsMarked(code) && !code->marked_for_deoptimization()) {
|
||||
DependentCode::SetMarkedForDeoptimization(
|
||||
code, static_cast<DependentCode::DependencyGroup>(g));
|
||||
code->InvalidateEmbeddedObjects();
|
||||
have_code_to_deoptimize_ = true;
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < number_of_entries; i++) {
|
||||
entries->clear_at(i);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int MarkCompactCollector::ClearNonLiveDependentCodeInGroup(
|
||||
DependentCode* entries, int group, int start, int end, int new_start) {
|
||||
int survived = 0;
|
||||
for (int i = start; i < end; i++) {
|
||||
Object* obj = entries->object_at(i);
|
||||
DCHECK(obj->IsCode() || IsMarked(obj));
|
||||
if (IsMarked(obj) &&
|
||||
(!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
|
||||
if (new_start + survived != i) {
|
||||
entries->set_object_at(new_start + survived, obj);
|
||||
}
|
||||
Object** slot = entries->slot_at(new_start + survived);
|
||||
RecordSlot(slot, slot, obj);
|
||||
survived++;
|
||||
}
|
||||
}
|
||||
entries->set_number_of_entries(
|
||||
static_cast<DependentCode::DependencyGroup>(group), survived);
|
||||
return survived;
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
|
||||
DisallowHeapAllocation no_allocation;
|
||||
DependentCode::GroupStartIndexes starts(entries);
|
||||
int number_of_entries = starts.number_of_entries();
|
||||
if (number_of_entries == 0) return;
|
||||
int new_number_of_entries = 0;
|
||||
// Go through all groups, remove dead codes and compact.
|
||||
for (int g = 0; g < DependentCode::kGroupCount; g++) {
|
||||
int survived = ClearNonLiveDependentCodeInGroup(
|
||||
entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries);
|
||||
new_number_of_entries += survived;
|
||||
}
|
||||
for (int i = new_number_of_entries; i < number_of_entries; i++) {
|
||||
entries->clear_at(i);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ProcessWeakCollections() {
|
||||
GCTracer::Scope gc_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
|
||||
@ -2699,10 +2586,31 @@ void MarkCompactCollector::ProcessAndClearWeakCells() {
|
||||
// cannot be a Smi here.
|
||||
HeapObject* value = HeapObject::cast(weak_cell->value());
|
||||
if (!MarkCompactCollector::IsMarked(value)) {
|
||||
weak_cell->clear();
|
||||
// Cells for new-space objects embedded in optimized code are wrapped in
|
||||
// WeakCell and put into Heap::weak_object_to_code_table.
|
||||
// Such cells do not have any strong references but we want to keep them
|
||||
// alive as long as the cell value is alive.
|
||||
// TODO(ulan): remove this once we remove Heap::weak_object_to_code_table.
|
||||
if (value->IsCell()) {
|
||||
Object* cell_value = Cell::cast(value)->value();
|
||||
if (cell_value->IsHeapObject() &&
|
||||
MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) {
|
||||
// Resurrect the cell.
|
||||
MarkBit mark = Marking::MarkBitFrom(value);
|
||||
SetMark(value, mark);
|
||||
Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
|
||||
RecordSlot(slot, slot, *slot);
|
||||
slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
||||
RecordSlot(slot, slot, *slot);
|
||||
} else {
|
||||
weak_cell->clear();
|
||||
}
|
||||
} else {
|
||||
weak_cell->clear();
|
||||
}
|
||||
} else {
|
||||
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
||||
heap()->mark_compact_collector()->RecordSlot(slot, slot, value);
|
||||
RecordSlot(slot, slot, *slot);
|
||||
}
|
||||
weak_cell_obj = weak_cell->next();
|
||||
weak_cell->set_next(undefined, SKIP_WRITE_BARRIER);
|
||||
@ -3548,13 +3456,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
||||
}
|
||||
|
||||
heap_->string_table()->Iterate(&updating_visitor);
|
||||
updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address());
|
||||
if (heap_->weak_object_to_code_table()->IsHashTable()) {
|
||||
WeakHashTable* table =
|
||||
WeakHashTable::cast(heap_->weak_object_to_code_table());
|
||||
table->Iterate(&updating_visitor);
|
||||
table->Rehash(heap_->isolate()->factory()->undefined_value());
|
||||
}
|
||||
|
||||
// Update pointers from external string table.
|
||||
heap_->UpdateReferencesInExternalStringTable(
|
||||
@ -3576,6 +3477,10 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
||||
|
||||
slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_);
|
||||
DCHECK(migration_slots_buffer_ == NULL);
|
||||
|
||||
// The hashing of weak_object_to_code_table is no longer valid.
|
||||
heap()->weak_object_to_code_table()->Rehash(
|
||||
heap()->isolate()->factory()->undefined_value());
|
||||
}
|
||||
|
||||
|
||||
|
@ -656,10 +656,6 @@ class MarkCompactCollector {
|
||||
|
||||
bool evacuation() const { return evacuation_; }
|
||||
|
||||
// Mark the global table which maps weak objects to dependent code without
|
||||
// marking its contents.
|
||||
void MarkWeakObjectToCodeTable();
|
||||
|
||||
// Special case for processing weak references in a full collection. We need
|
||||
// to artificially keep AllocationSites alive for a time.
|
||||
void MarkAllocationSite(AllocationSite* site);
|
||||
@ -818,11 +814,6 @@ class MarkCompactCollector {
|
||||
int number_of_own_descriptors);
|
||||
void TrimEnumCache(Map* map, DescriptorArray* descriptors);
|
||||
|
||||
void ClearDependentCode(DependentCode* dependent_code);
|
||||
void ClearNonLiveDependentCode(DependentCode* dependent_code);
|
||||
int ClearNonLiveDependentCodeInGroup(DependentCode* dependent_code, int group,
|
||||
int start, int end, int new_start);
|
||||
|
||||
// Mark all values associated with reachable keys in weak collections
|
||||
// encountered so far. This might push new object or even new weak maps onto
|
||||
// the marking stack.
|
||||
|
@ -326,19 +326,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
|
||||
Map* map, HeapObject* object) {
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
Object** slot =
|
||||
HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
|
||||
if (FLAG_collect_maps) {
|
||||
// Mark property cell dependent codes array but do not push it onto marking
|
||||
// stack, this will make references from it weak. We will clean dead
|
||||
// codes when we iterate over property cells in ClearNonLiveReferences.
|
||||
HeapObject* obj = HeapObject::cast(*slot);
|
||||
heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
|
||||
StaticVisitor::MarkObjectWithoutPush(heap, obj);
|
||||
} else {
|
||||
StaticVisitor::VisitPointer(heap, slot);
|
||||
}
|
||||
|
||||
StaticVisitor::VisitPointers(
|
||||
heap,
|
||||
HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
|
||||
@ -367,20 +354,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
|
||||
Map* map, HeapObject* object) {
|
||||
Heap* heap = map->GetHeap();
|
||||
|
||||
Object** slot =
|
||||
HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
|
||||
if (FLAG_collect_maps) {
|
||||
// Mark allocation site dependent codes array but do not push it onto
|
||||
// marking stack, this will make references from it weak. We will clean
|
||||
// dead codes when we iterate over allocation sites in
|
||||
// ClearNonLiveReferences.
|
||||
HeapObject* obj = HeapObject::cast(*slot);
|
||||
heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
|
||||
StaticVisitor::MarkObjectWithoutPush(heap, obj);
|
||||
} else {
|
||||
StaticVisitor::VisitPointer(heap, slot);
|
||||
}
|
||||
|
||||
StaticVisitor::VisitPointers(
|
||||
heap,
|
||||
HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
|
||||
@ -643,14 +616,6 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
|
||||
descriptors->GetDescriptorEndSlot(end));
|
||||
}
|
||||
|
||||
// Mark prototype dependent codes array but do not push it onto marking
|
||||
// stack, this will make references from it weak. We will clean dead
|
||||
// codes when we iterate over maps in ClearNonLiveTransitions.
|
||||
Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
|
||||
HeapObject* obj = HeapObject::cast(*slot);
|
||||
heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
|
||||
StaticVisitor::MarkObjectWithoutPush(heap, obj);
|
||||
|
||||
// Mark the pointer fields of the Map. Since the transitions array has
|
||||
// been marked already, it is fine that one of these fields contains a
|
||||
// pointer to it.
|
||||
|
@ -414,12 +414,12 @@ Representation LChunk::LookupLiteralRepresentation(
|
||||
|
||||
|
||||
static void AddWeakObjectToCodeDependency(Isolate* isolate,
|
||||
Handle<Object> object,
|
||||
Handle<HeapObject> object,
|
||||
Handle<Code> code) {
|
||||
Handle<WeakCell> cell = Code::WeakCellFor(code);
|
||||
Heap* heap = isolate->heap();
|
||||
heap->EnsureWeakObjectToCodeTable();
|
||||
Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
|
||||
dep = DependentCode::Insert(dep, DependentCode::kWeakCodeGroup, code);
|
||||
dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
|
||||
heap->AddWeakObjectToCodeDependency(object, dep);
|
||||
}
|
||||
|
||||
@ -462,6 +462,9 @@ void LChunk::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) const {
|
||||
|
||||
|
||||
void LChunk::CommitDependencies(Handle<Code> code) const {
|
||||
if (!code->is_optimized_code()) return;
|
||||
HandleScope scope(isolate());
|
||||
|
||||
for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
|
||||
iend = deprecation_dependencies_.end(); it != iend; ++it) {
|
||||
Handle<Map> map = *it;
|
||||
@ -479,7 +482,7 @@ void LChunk::CommitDependencies(Handle<Code> code) const {
|
||||
}
|
||||
|
||||
info_->CommitDependencies(code);
|
||||
if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
|
||||
RegisterWeakObjectsInOptimizedCode(code);
|
||||
}
|
||||
|
||||
|
||||
|
@ -677,6 +677,7 @@ void Code::CodeVerify() {
|
||||
|
||||
void Code::VerifyEmbeddedObjectsDependency() {
|
||||
if (!CanContainWeakObjects()) return;
|
||||
WeakCell* cell = CachedWeakCell();
|
||||
DisallowHeapAllocation no_gc;
|
||||
Isolate* isolate = GetIsolate();
|
||||
HandleScope scope(isolate);
|
||||
@ -687,13 +688,13 @@ void Code::VerifyEmbeddedObjectsDependency() {
|
||||
if (obj->IsMap()) {
|
||||
Map* map = Map::cast(obj);
|
||||
CHECK(map->dependent_code()->Contains(DependentCode::kWeakCodeGroup,
|
||||
this));
|
||||
cell));
|
||||
} else if (obj->IsJSObject()) {
|
||||
Object* raw_table = GetIsolate()->heap()->weak_object_to_code_table();
|
||||
WeakHashTable* table = WeakHashTable::cast(raw_table);
|
||||
Handle<Object> key_obj(obj, isolate);
|
||||
CHECK(DependentCode::cast(table->Lookup(key_obj))->Contains(
|
||||
DependentCode::kWeakCodeGroup, this));
|
||||
WeakHashTable* table =
|
||||
GetIsolate()->heap()->weak_object_to_code_table();
|
||||
Handle<HeapObject> key_obj(HeapObject::cast(obj), isolate);
|
||||
CHECK(DependentCode::cast(table->Lookup(key_obj))
|
||||
->Contains(DependentCode::kWeakCodeGroup, cell));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4766,21 +4766,6 @@ void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
|
||||
}
|
||||
|
||||
|
||||
bool DependentCode::is_code_at(int i) {
|
||||
return get(kCodesStartIndex + i)->IsCode();
|
||||
}
|
||||
|
||||
Code* DependentCode::code_at(int i) {
|
||||
return Code::cast(get(kCodesStartIndex + i));
|
||||
}
|
||||
|
||||
|
||||
CompilationInfo* DependentCode::compilation_info_at(int i) {
|
||||
return reinterpret_cast<CompilationInfo*>(
|
||||
Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
|
||||
}
|
||||
|
||||
|
||||
void DependentCode::set_object_at(int i, Object* object) {
|
||||
set(kCodesStartIndex + i, object);
|
||||
}
|
||||
@ -4791,11 +4776,6 @@ Object* DependentCode::object_at(int i) {
|
||||
}
|
||||
|
||||
|
||||
Object** DependentCode::slot_at(int i) {
|
||||
return RawFieldOfElementAt(kCodesStartIndex + i);
|
||||
}
|
||||
|
||||
|
||||
void DependentCode::clear_at(int i) {
|
||||
set_undefined(kCodesStartIndex + i);
|
||||
}
|
||||
@ -7228,13 +7208,18 @@ Handle<ObjectHashTable> ObjectHashTable::Shrink(
|
||||
|
||||
template <int entrysize>
|
||||
bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
|
||||
return key->SameValue(other);
|
||||
if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
|
||||
return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
|
||||
: *key == other;
|
||||
}
|
||||
|
||||
|
||||
template <int entrysize>
|
||||
uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
|
||||
intptr_t hash = reinterpret_cast<intptr_t>(*key);
|
||||
intptr_t hash =
|
||||
key->IsWeakCell()
|
||||
? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
|
||||
: reinterpret_cast<intptr_t>(*key);
|
||||
return (uint32_t)(hash & 0xFFFFFFFF);
|
||||
}
|
||||
|
||||
@ -7242,6 +7227,7 @@ uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
|
||||
template <int entrysize>
|
||||
uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
|
||||
Object* other) {
|
||||
if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
|
||||
intptr_t hash = reinterpret_cast<intptr_t>(other);
|
||||
return (uint32_t)(hash & 0xFFFFFFFF);
|
||||
}
|
||||
|
167
src/objects.cc
167
src/objects.cc
@ -11884,9 +11884,9 @@ void Map::ZapPrototypeTransitions() {
|
||||
void Map::AddDependentCompilationInfo(Handle<Map> map,
|
||||
DependentCode::DependencyGroup group,
|
||||
CompilationInfo* info) {
|
||||
Handle<DependentCode> codes =
|
||||
DependentCode::Insert(handle(map->dependent_code(), info->isolate()),
|
||||
group, info->object_wrapper());
|
||||
Handle<DependentCode> codes = DependentCode::InsertCompilationInfo(
|
||||
handle(map->dependent_code(), info->isolate()), group,
|
||||
info->object_wrapper());
|
||||
if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
|
||||
info->dependencies(group)->Add(map, info->zone());
|
||||
}
|
||||
@ -11896,8 +11896,9 @@ void Map::AddDependentCompilationInfo(Handle<Map> map,
|
||||
void Map::AddDependentCode(Handle<Map> map,
|
||||
DependentCode::DependencyGroup group,
|
||||
Handle<Code> code) {
|
||||
Handle<DependentCode> codes = DependentCode::Insert(
|
||||
Handle<DependentCode>(map->dependent_code()), group, code);
|
||||
Handle<WeakCell> cell = Code::WeakCellFor(code);
|
||||
Handle<DependentCode> codes = DependentCode::InsertWeakCode(
|
||||
Handle<DependentCode>(map->dependent_code()), group, cell);
|
||||
if (*codes != map->dependent_code()) map->set_dependent_code(*codes);
|
||||
}
|
||||
|
||||
@ -11929,6 +11930,20 @@ DependentCode* DependentCode::ForObject(Handle<HeapObject> object,
|
||||
}
|
||||
|
||||
|
||||
Handle<DependentCode> DependentCode::InsertCompilationInfo(
|
||||
Handle<DependentCode> entries, DependencyGroup group,
|
||||
Handle<Foreign> info) {
|
||||
return Insert(entries, group, info);
|
||||
}
|
||||
|
||||
|
||||
Handle<DependentCode> DependentCode::InsertWeakCode(
|
||||
Handle<DependentCode> entries, DependencyGroup group,
|
||||
Handle<WeakCell> code_cell) {
|
||||
return Insert(entries, group, code_cell);
|
||||
}
|
||||
|
||||
|
||||
Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
|
||||
DependencyGroup group,
|
||||
Handle<Object> object) {
|
||||
@ -11941,27 +11956,13 @@ Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
|
||||
if (entries->object_at(i) == *object) return entries;
|
||||
}
|
||||
if (entries->length() < kCodesStartIndex + number_of_entries + 1) {
|
||||
int capacity = kCodesStartIndex + number_of_entries + 1;
|
||||
if (capacity > 5) capacity = capacity * 5 / 4;
|
||||
Handle<DependentCode> new_entries = Handle<DependentCode>::cast(
|
||||
FixedArray::CopySize(entries, capacity, TENURED));
|
||||
// The number of codes can change after GC.
|
||||
entries = EnsureSpace(entries);
|
||||
// The number of codes can change after Compact and GC.
|
||||
starts.Recompute(*entries);
|
||||
start = starts.at(group);
|
||||
end = starts.at(group + 1);
|
||||
number_of_entries = starts.number_of_entries();
|
||||
for (int i = 0; i < number_of_entries; i++) {
|
||||
entries->clear_at(i);
|
||||
}
|
||||
// If the old fixed array was empty, we need to reset counters of the
|
||||
// new array.
|
||||
if (number_of_entries == 0) {
|
||||
for (int g = 0; g < kGroupCount; g++) {
|
||||
new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
|
||||
}
|
||||
}
|
||||
entries = new_entries;
|
||||
}
|
||||
|
||||
entries->ExtendGroup(group);
|
||||
entries->set_object_at(end, *object);
|
||||
entries->set_number_of_entries(group, end + 1 - start);
|
||||
@ -11969,42 +11970,82 @@ Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries,
|
||||
}
|
||||
|
||||
|
||||
void DependentCode::UpdateToFinishedCode(DependencyGroup group,
|
||||
CompilationInfo* info,
|
||||
Code* code) {
|
||||
Handle<DependentCode> DependentCode::EnsureSpace(
|
||||
Handle<DependentCode> entries) {
|
||||
if (entries->length() == 0) {
|
||||
entries = Handle<DependentCode>::cast(
|
||||
FixedArray::CopySize(entries, kCodesStartIndex + 1, TENURED));
|
||||
for (int g = 0; g < kGroupCount; g++) {
|
||||
entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0);
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
if (entries->Compact()) return entries;
|
||||
GroupStartIndexes starts(*entries);
|
||||
int capacity =
|
||||
kCodesStartIndex + DependentCode::Grow(starts.number_of_entries());
|
||||
return Handle<DependentCode>::cast(
|
||||
FixedArray::CopySize(entries, capacity, TENURED));
|
||||
}
|
||||
|
||||
|
||||
bool DependentCode::Compact() {
|
||||
GroupStartIndexes starts(this);
|
||||
int n = 0;
|
||||
for (int g = 0; g < kGroupCount; g++) {
|
||||
int start = starts.at(g);
|
||||
int end = starts.at(g + 1);
|
||||
int count = 0;
|
||||
DCHECK(start >= n);
|
||||
for (int i = start; i < end; i++) {
|
||||
Object* obj = object_at(i);
|
||||
if (!obj->IsWeakCell() || !WeakCell::cast(obj)->cleared()) {
|
||||
if (i != n + count) {
|
||||
copy(i, n + count);
|
||||
}
|
||||
count++;
|
||||
}
|
||||
}
|
||||
if (count != end - start) {
|
||||
set_number_of_entries(static_cast<DependencyGroup>(g), count);
|
||||
}
|
||||
n += count;
|
||||
}
|
||||
return n < starts.number_of_entries();
|
||||
}
|
||||
|
||||
|
||||
void DependentCode::UpdateToFinishedCode(DependencyGroup group, Foreign* info,
|
||||
WeakCell* code_cell) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
AllowDeferredHandleDereference get_object_wrapper;
|
||||
Foreign* info_wrapper = *info->object_wrapper();
|
||||
GroupStartIndexes starts(this);
|
||||
int start = starts.at(group);
|
||||
int end = starts.at(group + 1);
|
||||
for (int i = start; i < end; i++) {
|
||||
if (object_at(i) == info_wrapper) {
|
||||
set_object_at(i, code);
|
||||
if (object_at(i) == info) {
|
||||
set_object_at(i, code_cell);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
for (int i = start; i < end; i++) {
|
||||
DCHECK(is_code_at(i) || compilation_info_at(i) != info);
|
||||
DCHECK(object_at(i) != info);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
|
||||
CompilationInfo* info) {
|
||||
Foreign* info) {
|
||||
DisallowHeapAllocation no_allocation;
|
||||
AllowDeferredHandleDereference get_object_wrapper;
|
||||
Foreign* info_wrapper = *info->object_wrapper();
|
||||
GroupStartIndexes starts(this);
|
||||
int start = starts.at(group);
|
||||
int end = starts.at(group + 1);
|
||||
// Find compilation info wrapper.
|
||||
int info_pos = -1;
|
||||
for (int i = start; i < end; i++) {
|
||||
if (object_at(i) == info_wrapper) {
|
||||
if (object_at(i) == info) {
|
||||
info_pos = i;
|
||||
break;
|
||||
}
|
||||
@ -12025,18 +12066,18 @@ void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group,
|
||||
|
||||
#ifdef DEBUG
|
||||
for (int i = start; i < end - 1; i++) {
|
||||
DCHECK(is_code_at(i) || compilation_info_at(i) != info);
|
||||
DCHECK(object_at(i) != info);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
bool DependentCode::Contains(DependencyGroup group, Code* code) {
|
||||
bool DependentCode::Contains(DependencyGroup group, WeakCell* code_cell) {
|
||||
GroupStartIndexes starts(this);
|
||||
int start = starts.at(group);
|
||||
int end = starts.at(group + 1);
|
||||
for (int i = start; i < end; i++) {
|
||||
if (object_at(i) == code) return true;
|
||||
if (object_at(i) == code_cell) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@ -12054,15 +12095,24 @@ bool DependentCode::MarkCodeForDeoptimization(
|
||||
|
||||
// Mark all the code that needs to be deoptimized.
|
||||
bool marked = false;
|
||||
bool invalidate_embedded_objects = group == kWeakCodeGroup;
|
||||
for (int i = start; i < end; i++) {
|
||||
if (is_code_at(i)) {
|
||||
Code* code = code_at(i);
|
||||
Object* obj = object_at(i);
|
||||
if (obj->IsWeakCell()) {
|
||||
WeakCell* cell = WeakCell::cast(obj);
|
||||
if (cell->cleared()) continue;
|
||||
Code* code = Code::cast(cell->value());
|
||||
if (!code->marked_for_deoptimization()) {
|
||||
SetMarkedForDeoptimization(code, group);
|
||||
if (invalidate_embedded_objects) {
|
||||
code->InvalidateEmbeddedObjects();
|
||||
}
|
||||
marked = true;
|
||||
}
|
||||
} else {
|
||||
CompilationInfo* info = compilation_info_at(i);
|
||||
DCHECK(obj->IsForeign());
|
||||
CompilationInfo* info = reinterpret_cast<CompilationInfo*>(
|
||||
Foreign::cast(obj)->foreign_address());
|
||||
info->AbortDueToDependencyChange();
|
||||
}
|
||||
}
|
||||
@ -12086,7 +12136,6 @@ void DependentCode::DeoptimizeDependentCodeGroup(
|
||||
DCHECK(AllowCodeDependencyChange::IsAllowed());
|
||||
DisallowHeapAllocation no_allocation_scope;
|
||||
bool marked = MarkCodeForDeoptimization(isolate, group);
|
||||
|
||||
if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate);
|
||||
}
|
||||
|
||||
@ -13167,7 +13216,7 @@ void AllocationSite::AddDependentCompilationInfo(
|
||||
CompilationInfo* info) {
|
||||
Handle<DependentCode> dep(site->dependent_code());
|
||||
Handle<DependentCode> codes =
|
||||
DependentCode::Insert(dep, group, info->object_wrapper());
|
||||
DependentCode::InsertCompilationInfo(dep, group, info->object_wrapper());
|
||||
if (*codes != site->dependent_code()) site->set_dependent_code(*codes);
|
||||
info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone());
|
||||
}
|
||||
@ -15980,7 +16029,7 @@ void ObjectHashTable::RemoveEntry(int entry) {
|
||||
}
|
||||
|
||||
|
||||
Object* WeakHashTable::Lookup(Handle<Object> key) {
|
||||
Object* WeakHashTable::Lookup(Handle<HeapObject> key) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
DCHECK(IsKey(*key));
|
||||
int entry = FindEntry(key);
|
||||
@ -15990,36 +16039,31 @@ Object* WeakHashTable::Lookup(Handle<Object> key) {
|
||||
|
||||
|
||||
Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table,
|
||||
Handle<Object> key,
|
||||
Handle<Object> value) {
|
||||
Handle<HeapObject> key,
|
||||
Handle<HeapObject> value) {
|
||||
DCHECK(table->IsKey(*key));
|
||||
int entry = table->FindEntry(key);
|
||||
// Key is already in table, just overwrite value.
|
||||
if (entry != kNotFound) {
|
||||
// TODO(ulan): Skipping write barrier is a temporary solution to avoid
|
||||
// memory leaks. Remove this once we have special visitor for weak fixed
|
||||
// arrays.
|
||||
table->set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
|
||||
table->set(EntryToValueIndex(entry), *value);
|
||||
return table;
|
||||
}
|
||||
|
||||
Handle<WeakCell> key_cell = key->GetIsolate()->factory()->NewWeakCell(key);
|
||||
|
||||
// Check whether the hash table should be extended.
|
||||
table = EnsureCapacity(table, 1, key, TENURED);
|
||||
|
||||
table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value);
|
||||
table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key_cell, value);
|
||||
return table;
|
||||
}
|
||||
|
||||
|
||||
void WeakHashTable::AddEntry(int entry,
|
||||
Handle<Object> key,
|
||||
Handle<Object> value) {
|
||||
void WeakHashTable::AddEntry(int entry, Handle<WeakCell> key_cell,
|
||||
Handle<HeapObject> value) {
|
||||
DisallowHeapAllocation no_allocation;
|
||||
// TODO(ulan): Skipping write barrier is a temporary solution to avoid
|
||||
// memory leaks. Remove this once we have special visitor for weak fixed
|
||||
// arrays.
|
||||
set(EntryToIndex(entry), *key, SKIP_WRITE_BARRIER);
|
||||
set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER);
|
||||
set(EntryToIndex(entry), *key_cell);
|
||||
set(EntryToValueIndex(entry), *value);
|
||||
ElementAdded();
|
||||
}
|
||||
|
||||
@ -16962,10 +17006,9 @@ Handle<Object> PropertyCell::SetValueInferType(Handle<PropertyCell> cell,
|
||||
// static
|
||||
void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell,
|
||||
CompilationInfo* info) {
|
||||
Handle<DependentCode> codes =
|
||||
DependentCode::Insert(handle(cell->dependent_code(), info->isolate()),
|
||||
DependentCode::kPropertyCellChangedGroup,
|
||||
info->object_wrapper());
|
||||
Handle<DependentCode> codes = DependentCode::InsertCompilationInfo(
|
||||
handle(cell->dependent_code(), info->isolate()),
|
||||
DependentCode::kPropertyCellChangedGroup, info->object_wrapper());
|
||||
if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes);
|
||||
info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add(
|
||||
cell, info->zone());
|
||||
|
@ -850,6 +850,7 @@ class ConsString;
|
||||
class DictionaryElementsAccessor;
|
||||
class ElementsAccessor;
|
||||
class FixedArrayBase;
|
||||
class FunctionLiteral;
|
||||
class GlobalObject;
|
||||
class LayoutDescriptor;
|
||||
class LookupIterator;
|
||||
@ -857,7 +858,6 @@ class ObjectVisitor;
|
||||
class StringStream;
|
||||
class TypeFeedbackVector;
|
||||
class WeakCell;
|
||||
class FunctionLiteral;
|
||||
|
||||
// We cannot just say "class HeapType;" if it is created from a template... =8-?
|
||||
template<class> class TypeImpl;
|
||||
@ -4049,9 +4049,9 @@ class WeakHashTableShape : public BaseShape<Handle<Object> > {
|
||||
};
|
||||
|
||||
|
||||
// WeakHashTable maps keys that are arbitrary objects to object values.
|
||||
// It is used for the global weak hash table that maps objects
|
||||
// embedded in optimized code to dependent code lists.
|
||||
// WeakHashTable maps keys that are arbitrary heap objects to heap object
|
||||
// values. The table wraps the keys in weak cells and store values directly.
|
||||
// Thus it references keys weakly and values strongly.
|
||||
class WeakHashTable: public HashTable<WeakHashTable,
|
||||
WeakHashTableShape<2>,
|
||||
Handle<Object> > {
|
||||
@ -4062,27 +4062,18 @@ class WeakHashTable: public HashTable<WeakHashTable,
|
||||
|
||||
// Looks up the value associated with the given key. The hole value is
|
||||
// returned in case the key is not present.
|
||||
Object* Lookup(Handle<Object> key);
|
||||
Object* Lookup(Handle<HeapObject> key);
|
||||
|
||||
// Adds (or overwrites) the value associated with the given key. Mapping a
|
||||
// key to the hole value causes removal of the whole entry.
|
||||
MUST_USE_RESULT static Handle<WeakHashTable> Put(Handle<WeakHashTable> table,
|
||||
Handle<Object> key,
|
||||
Handle<Object> value);
|
||||
|
||||
// This function is called when heap verification is turned on.
|
||||
void Zap(Object* value) {
|
||||
int capacity = Capacity();
|
||||
for (int i = 0; i < capacity; i++) {
|
||||
set(EntryToIndex(i), value);
|
||||
set(EntryToValueIndex(i), value);
|
||||
}
|
||||
}
|
||||
Handle<HeapObject> key,
|
||||
Handle<HeapObject> value);
|
||||
|
||||
private:
|
||||
friend class MarkCompactCollector;
|
||||
|
||||
void AddEntry(int entry, Handle<Object> key, Handle<Object> value);
|
||||
void AddEntry(int entry, Handle<WeakCell> key, Handle<HeapObject> value);
|
||||
|
||||
// Returns the index to the value of an entry.
|
||||
static inline int EntryToValueIndex(int entry) {
|
||||
@ -5543,9 +5534,9 @@ class CompilationInfo;
|
||||
//
|
||||
// The first n elements are Smis, each of them specifies the number of codes
|
||||
// in the corresponding group. The subsequent elements contain grouped code
|
||||
// objects. The suffix of the array can be filled with the undefined value if
|
||||
// the number of codes is less than the length of the array. The order of the
|
||||
// code objects within a group is not preserved.
|
||||
// objects in weak cells. The suffix of the array can be filled with the
|
||||
// undefined value if the number of codes is less than the length of the
|
||||
// array. The order of the code objects within a group is not preserved.
|
||||
//
|
||||
// All code indexes used in the class are counted starting from the first
|
||||
// code object of the first group. In other words, code index 0 corresponds
|
||||
@ -5599,15 +5590,21 @@ class DependentCode: public FixedArray {
|
||||
int start_indexes_[kGroupCount + 1];
|
||||
};
|
||||
|
||||
bool Contains(DependencyGroup group, Code* code);
|
||||
static Handle<DependentCode> Insert(Handle<DependentCode> entries,
|
||||
DependencyGroup group,
|
||||
Handle<Object> object);
|
||||
void UpdateToFinishedCode(DependencyGroup group,
|
||||
CompilationInfo* info,
|
||||
Code* code);
|
||||
bool Contains(DependencyGroup group, WeakCell* code_cell);
|
||||
|
||||
static Handle<DependentCode> InsertCompilationInfo(
|
||||
Handle<DependentCode> entries, DependencyGroup group,
|
||||
Handle<Foreign> info);
|
||||
|
||||
static Handle<DependentCode> InsertWeakCode(Handle<DependentCode> entries,
|
||||
DependencyGroup group,
|
||||
Handle<WeakCell> code_cell);
|
||||
|
||||
void UpdateToFinishedCode(DependencyGroup group, Foreign* info,
|
||||
WeakCell* code_cell);
|
||||
|
||||
void RemoveCompilationInfo(DependentCode::DependencyGroup group,
|
||||
CompilationInfo* info);
|
||||
Foreign* info);
|
||||
|
||||
void DeoptimizeDependentCodeGroup(Isolate* isolate,
|
||||
DependentCode::DependencyGroup group);
|
||||
@ -5619,12 +5616,8 @@ class DependentCode: public FixedArray {
|
||||
// and the mark compact collector.
|
||||
inline int number_of_entries(DependencyGroup group);
|
||||
inline void set_number_of_entries(DependencyGroup group, int value);
|
||||
inline bool is_code_at(int i);
|
||||
inline Code* code_at(int i);
|
||||
inline CompilationInfo* compilation_info_at(int i);
|
||||
inline void set_object_at(int i, Object* object);
|
||||
inline Object** slot_at(int i);
|
||||
inline Object* object_at(int i);
|
||||
inline void set_object_at(int i, Object* object);
|
||||
inline void clear_at(int i);
|
||||
inline void copy(int from, int to);
|
||||
DECLARE_CAST(DependentCode)
|
||||
@ -5636,9 +5629,20 @@ class DependentCode: public FixedArray {
|
||||
static void SetMarkedForDeoptimization(Code* code, DependencyGroup group);
|
||||
|
||||
private:
|
||||
static Handle<DependentCode> Insert(Handle<DependentCode> entries,
|
||||
DependencyGroup group,
|
||||
Handle<Object> object);
|
||||
static Handle<DependentCode> EnsureSpace(Handle<DependentCode> entries);
|
||||
// Make a room at the end of the given group by moving out the first
|
||||
// code objects of the subsequent groups.
|
||||
inline void ExtendGroup(DependencyGroup group);
|
||||
// Compact by removing cleared weak cells and return true if there was
|
||||
// any cleared weak cell.
|
||||
bool Compact();
|
||||
static int Grow(int number_of_entries) {
|
||||
if (number_of_entries < 5) return number_of_entries + 1;
|
||||
return number_of_entries * 5 / 4;
|
||||
}
|
||||
static const int kCodesStartIndex = kGroupCount;
|
||||
};
|
||||
|
||||
@ -8473,7 +8477,7 @@ class AllocationSite: public Struct {
|
||||
// During mark compact we need to take special care for the dependent code
|
||||
// field.
|
||||
static const int kPointerFieldsBeginOffset = kTransitionInfoOffset;
|
||||
static const int kPointerFieldsEndOffset = kDependentCodeOffset;
|
||||
static const int kPointerFieldsEndOffset = kWeakNextOffset;
|
||||
|
||||
// For other visitors, use the fixed body descriptor below.
|
||||
typedef FixedBodyDescriptor<HeapObject::kHeaderSize,
|
||||
@ -9741,7 +9745,7 @@ class PropertyCell: public Cell {
|
||||
static const int kSize = kDependentCodeOffset + kPointerSize;
|
||||
|
||||
static const int kPointerFieldsBeginOffset = kValueOffset;
|
||||
static const int kPointerFieldsEndOffset = kDependentCodeOffset;
|
||||
static const int kPointerFieldsEndOffset = kSize;
|
||||
|
||||
typedef FixedBodyDescriptor<kValueOffset,
|
||||
kSize,
|
||||
|
@ -682,8 +682,6 @@ void Deserializer::Deserialize(Isolate* isolate) {
|
||||
isolate_->heap()->undefined_value());
|
||||
}
|
||||
|
||||
isolate_->heap()->InitializeWeakObjectToCodeTable();
|
||||
|
||||
// Update data pointers to the external strings containing natives sources.
|
||||
for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
|
||||
Object* source = isolate_->heap()->natives_source_cache()->get(i);
|
||||
|
@ -4068,8 +4068,9 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) {
|
||||
DependentCode::GroupStartIndexes starts(site->dependent_code());
|
||||
CHECK_GE(starts.number_of_entries(), 1);
|
||||
int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
|
||||
CHECK(site->dependent_code()->is_code_at(index));
|
||||
Code* function_bar = site->dependent_code()->code_at(index);
|
||||
CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
|
||||
Code* function_bar = Code::cast(
|
||||
WeakCell::cast(site->dependent_code()->object_at(index))->value());
|
||||
Handle<JSFunction> bar_handle =
|
||||
v8::Utils::OpenHandle(
|
||||
*v8::Handle<v8::Function>::Cast(
|
||||
@ -4087,7 +4088,8 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) {
|
||||
// longer referred to by dependent_code().
|
||||
DependentCode::GroupStartIndexes starts(site->dependent_code());
|
||||
int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
|
||||
CHECK(!(site->dependent_code()->is_code_at(index)));
|
||||
CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
|
||||
WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
|
||||
}
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user