Revert "[runtime][gc] Drop AllocationSite::weak_next field for sub-literals"

This reverts commit 380dba0a5c.

Reason for revert: Fails gc-stress: https://ci.chromium.org/p/v8/builders/luci.v8.ci/V8%20Mac64%20GC%20Stress/1471

Original change's description:
> [runtime][gc] Drop AllocationSite::weak_next field for sub-literals
> 
> Use AllocationSite without Weaknext field for all the allocations in nested
> literal except for Root. The nested field is sufficient to link all the
> allocations in a nested literal. Only the Root is added to heap weak_alloc_list
> for GC to traverse
> 
> Change-Id: I946e63292c6d168197cd2a087f697c73cc431272
> Reviewed-on: https://chromium-review.googlesource.com/1101323
> Commit-Queue: Chandan Reddy <chandanreddy@google.com>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#53813}

TBR=ulan@chromium.org,cbruni@chromium.org,chandanreddy@google.com

Change-Id: Icc87027f14f917da3033db256c2535e08e2a4a34
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/1105159
Reviewed-by: Clemens Hammacher <clemensh@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53815}
This commit is contained in:
Clemens Hammacher 2018-06-19 07:53:58 +00:00 committed by Commit Bot
parent 33f45e32fc
commit 0181cf0b31
11 changed files with 135 additions and 176 deletions

View File

@ -9394,7 +9394,7 @@ TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
SloppyTNode<FeedbackVector> feedback_vector, TNode<Smi> slot) {
TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSize);
Node* site = Allocate(size, CodeStubAssembler::kPretenured);
StoreMapNoWriteBarrier(site, Heap::kAllocationSiteWithWeakNextMapRootIndex);
// Should match AllocationSite::Initialize.

View File

@ -140,6 +140,7 @@ void Factory::InitializeAllocationMemento(AllocationMemento* memento,
AllocationSite* allocation_site) {
memento->set_map_after_allocation(*allocation_memento_map(),
SKIP_WRITE_BARRIER);
DCHECK(allocation_site->map() == *allocation_site_map());
memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
if (FLAG_allocation_site_pretenuring) {
allocation_site->IncrementMementoCreateCount();
@ -1750,18 +1751,15 @@ Handle<TransitionArray> Factory::NewTransitionArray(int number_of_transitions,
return array;
}
Handle<AllocationSite> Factory::NewAllocationSite(bool with_weak_next) {
Handle<Map> map = with_weak_next ? allocation_site_map()
: allocation_site_without_weaknext_map();
Handle<AllocationSite> Factory::NewAllocationSite() {
Handle<Map> map = allocation_site_map();
Handle<AllocationSite> site(AllocationSite::cast(New(map, TENURED)),
isolate());
site->Initialize();
if (with_weak_next) {
// Link the site
site->set_weak_next(isolate()->heap()->allocation_sites_list());
isolate()->heap()->set_allocation_sites_list(*site);
}
// Link the site
site->set_weak_next(isolate()->heap()->allocation_sites_list());
isolate()->heap()->set_allocation_sites_list(*site);
return site;
}

View File

@ -454,7 +454,7 @@ class V8_EXPORT_PRIVATE Factory {
int slack = 0);
// Allocate a tenured AllocationSite. Its payload is null.
Handle<AllocationSite> NewAllocationSite(bool with_weak_next);
Handle<AllocationSite> NewAllocationSite();
// Allocates and initializes a new Map.
Handle<Map> NewMap(InstanceType type, int instance_size,

View File

@ -870,16 +870,17 @@ void Heap::ProcessPretenuringFeedback() {
// Step 2: Deopt maybe tenured allocation sites if necessary.
bool deopt_maybe_tenured = DeoptMaybeTenuredAllocationSites();
if (deopt_maybe_tenured) {
ForeachAllocationSite(
allocation_sites_list(),
[&allocation_sites, &trigger_deoptimization](AllocationSite* site) {
DCHECK(site->IsAllocationSite());
allocation_sites++;
if (site->IsMaybeTenure()) {
site->set_deopt_dependent_code(true);
trigger_deoptimization = true;
}
});
Object* list_element = allocation_sites_list();
while (list_element->IsAllocationSite()) {
site = AllocationSite::cast(list_element);
DCHECK(site->IsAllocationSite());
allocation_sites++;
if (site->IsMaybeTenure()) {
site->set_deopt_dependent_code(true);
trigger_deoptimization = true;
}
list_element = site->weak_next();
}
}
if (trigger_deoptimization) {
@ -918,15 +919,16 @@ void Heap::InvalidateCodeDeoptimizationData(Code* code) {
void Heap::DeoptMarkedAllocationSites() {
// TODO(hpayer): If iterating over the allocation sites list becomes a
// performance issue, use a cache data structure in heap instead.
ForeachAllocationSite(allocation_sites_list(), [this](AllocationSite* site) {
Object* list_element = allocation_sites_list();
while (list_element->IsAllocationSite()) {
AllocationSite* site = AllocationSite::cast(list_element);
if (site->deopt_dependent_code()) {
site->dependent_code()->MarkCodeForDeoptimization(
isolate_, DependentCode::kAllocationSiteTenuringChangedGroup);
site->set_deopt_dependent_code(false);
}
});
list_element = site->weak_next();
}
Deoptimizer::DeoptimizeMarkedCode(isolate_);
}
@ -2456,37 +2458,20 @@ void Heap::ProcessWeakListRoots(WeakObjectRetainer* retainer) {
set_allocation_sites_list(retainer->RetainAs(allocation_sites_list()));
}
void Heap::ForeachAllocationSite(Object* list,
std::function<void(AllocationSite*)> visitor) {
DisallowHeapAllocation disallow_heap_allocation;
Object* current = list;
while (current->IsAllocationSite()) {
AllocationSite* site = AllocationSite::cast(current);
visitor(site);
Object* current_nested = site->nested_site();
while (current_nested->IsAllocationSite()) {
AllocationSite* nested_site = AllocationSite::cast(current_nested);
visitor(nested_site);
current_nested = nested_site->nested_site();
}
current = site->weak_next();
}
}
void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) {
DisallowHeapAllocation no_allocation_scope;
Object* cur = allocation_sites_list();
bool marked = false;
ForeachAllocationSite(allocation_sites_list(),
[&marked, flag, this](AllocationSite* site) {
if (site->GetPretenureMode() == flag) {
site->ResetPretenureDecision();
site->set_deopt_dependent_code(true);
marked = true;
RemoveAllocationSitePretenuringFeedback(site);
return;
}
});
while (cur->IsAllocationSite()) {
AllocationSite* casted = AllocationSite::cast(cur);
if (casted->GetPretenureMode() == flag) {
casted->ResetPretenureDecision();
casted->set_deopt_dependent_code(true);
marked = true;
RemoveAllocationSitePretenuringFeedback(casted);
}
cur = casted->weak_next();
}
if (marked) isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
}

View File

@ -805,11 +805,6 @@ class Heap {
// Used in CreateAllocationSiteStub and the (de)serializer.
Object** allocation_sites_list_address() { return &allocation_sites_list_; }
// Traverse all the allocaions_sites [nested_site and weak_next] in the list
// and foreach call the visitor
void ForeachAllocationSite(Object* list,
std::function<void(AllocationSite*)> visitor);
// Number of mark-sweeps.
int ms_count() const { return ms_count_; }

View File

@ -149,30 +149,6 @@ class JSFunction::BodyDescriptor final : public BodyDescriptorBase {
}
};
template <bool includeWeakNext>
class AllocationSite::BodyDescriptorImpl final : public BodyDescriptorBase {
public:
static bool IsValidSlot(Map* map, HeapObject* obj, int offset) {
return offset >= AllocationSite::kStartOffset && offset < GetEndOffset(map);
}
template <typename ObjectVisitor>
static inline void IterateBody(Map* map, HeapObject* obj, int object_size,
ObjectVisitor* v) {
IteratePointers(obj, AllocationSite::kStartOffset, GetEndOffset(map), v);
}
static inline int SizeOf(Map* map, HeapObject* object) {
return map->instance_size();
}
private:
static inline int GetEndOffset(Map* map) {
return includeWeakNext ? map->instance_size()
: AllocationSite::kSizeWithoutWeakNext;
}
};
class JSArrayBuffer::BodyDescriptor final : public BodyDescriptorBase {
public:
STATIC_ASSERT(kByteLengthOffset + kPointerSize == kBackingStoreOffset);

View File

@ -1140,10 +1140,6 @@ FixedArrayBase* JSObject::elements() const {
return static_cast<FixedArrayBase*>(array);
}
bool AllocationSite::HasWeakNext() const {
return map() == GetHeap()->allocation_site_map();
}
void AllocationSite::Initialize() {
set_transition_info_or_boilerplate(Smi::kZero);
SetElementsKind(GetInitialFastElementsKind());
@ -2434,8 +2430,7 @@ SMI_ACCESSORS(AllocationSite, pretenure_create_count,
kPretenureCreateCountOffset)
ACCESSORS(AllocationSite, dependent_code, DependentCode,
kDependentCodeOffset)
ACCESSORS_CHECKED(AllocationSite, weak_next, Object, kWeakNextOffset,
HasWeakNext())
ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
SMI_ACCESSORS(StackFrameInfo, line_number, kLineNumberIndex)

View File

@ -3921,9 +3921,6 @@ class AllocationSite: public Struct {
inline void Initialize();
// Checks if the allocation site contain weak_next field;
inline bool HasWeakNext() const;
// This method is expensive, it should only be called for reporting.
bool IsNested();
@ -4012,17 +4009,23 @@ class AllocationSite: public Struct {
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, ALLOCATION_SITE_FIELDS)
static const int kStartOffset = HeapObject::kHeaderSize;
// Need KSize to statisfy Struct Macro gen machineary
static const int kSize = kSizeWithWeakNext;
template <bool includeWeakNext>
class BodyDescriptorImpl;
// During mark compact we need to take special care for the dependent code
// field.
static const int kPointerFieldsBeginOffset =
kTransitionInfoOrBoilerplateOffset;
static const int kPointerFieldsEndOffset = kWeakNextOffset;
// BodyDescriptor is used to traverse all the fields including weak_next
typedef BodyDescriptorImpl<true> BodyDescriptor;
// Ignores weakness.
typedef FixedBodyDescriptor<HeapObject::kHeaderSize, kSize, kSize>
BodyDescriptor;
// BodyDescriptorWeak is used to traverse all the pointers
// except for weak_next
typedef BodyDescriptorImpl<false> BodyDescriptorWeak;
// Respects weakness.
typedef FixedBodyDescriptor<kPointerFieldsBeginOffset,
kPointerFieldsEndOffset, kSize>
BodyDescriptorWeak;
private:
inline bool PretenuringDecisionMade() const;

View File

@ -1316,6 +1316,10 @@ void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
TagObject(site->dependent_code(), "(dependent code)");
SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
AllocationSite::kDependentCodeOffset);
// Do not visit weak_next as it is not visited by the ObjectVisitor,
// and we're not very interested in weak_next field here.
STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
AllocationSite::kPointerFieldsEndOffset);
}
class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {

View File

@ -240,22 +240,19 @@ class AllocationSiteCreationContext : public AllocationSiteContext {
if (top().is_null()) {
// We are creating the top level AllocationSite as opposed to a nested
// AllocationSite.
InitializeTraversal(isolate()->factory()->NewAllocationSite(true));
InitializeTraversal(isolate()->factory()->NewAllocationSite());
scope_site = Handle<AllocationSite>(*top(), isolate());
if (FLAG_trace_creation_allocation_sites) {
PrintF("*** Creating top level %s AllocationSite %p\n", "Fat",
PrintF("*** Creating top level AllocationSite %p\n",
static_cast<void*>(*scope_site));
}
} else {
DCHECK(!current().is_null());
scope_site = isolate()->factory()->NewAllocationSite(false);
scope_site = isolate()->factory()->NewAllocationSite();
if (FLAG_trace_creation_allocation_sites) {
PrintF(
"*** Creating nested %s AllocationSite (top, current, new) (%p, "
"%p, "
"%p)\n",
"Slim", static_cast<void*>(*top()), static_cast<void*>(*current()),
static_cast<void*>(*scope_site));
PrintF("Creating nested site (top, current, new) (%p, %p, %p)\n",
static_cast<void*>(*top()), static_cast<void*>(*current()),
static_cast<void*>(*scope_site));
}
current()->set_nested_site(*scope_site);
update_current_site(*scope_site);
@ -273,7 +270,7 @@ class AllocationSiteCreationContext : public AllocationSiteContext {
PrintF("*** Setting AllocationSite %p transition_info %p\n",
static_cast<void*>(*scope_site), static_cast<void*>(*object));
} else {
PrintF("*** Setting AllocationSite (%p, %p) transition_info %p\n",
PrintF("Setting AllocationSite (%p, %p) transition_info %p\n",
static_cast<void*>(*top()), static_cast<void*>(*scope_site),
static_cast<void*>(*object));
}

View File

@ -3489,30 +3489,13 @@ TEST(DisableInlineAllocation) {
static int AllocationSitesCount(Heap* heap) {
int count = 0;
for (Object* site = heap->allocation_sites_list();
site->IsAllocationSite();) {
AllocationSite* cur = AllocationSite::cast(site);
CHECK(cur->HasWeakNext());
site = cur->weak_next();
!(site->IsUndefined(heap->isolate()));
site = AllocationSite::cast(site)->weak_next()) {
count++;
}
return count;
}
static int SlimAllocationSiteCount(Heap* heap) {
int count = 0;
for (Object* weak_list = heap->allocation_sites_list();
weak_list->IsAllocationSite();) {
AllocationSite* weak_cur = AllocationSite::cast(weak_list);
for (Object* site = weak_cur->nested_site(); site->IsAllocationSite();) {
AllocationSite* cur = AllocationSite::cast(site);
CHECK(!cur->HasWeakNext());
site = cur->nested_site();
count++;
}
weak_list = weak_cur->weak_next();
}
return count;
}
TEST(EnsureAllocationSiteDependentCodesProcessed) {
if (FLAG_always_opt || !FLAG_opt) return;
@ -3582,21 +3565,6 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) {
WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
}
void CheckNumberOfAllocations(Heap* heap, const char* source,
int expected_full_alloc,
int expected_slim_alloc) {
int prev_fat_alloc_count = AllocationSitesCount(heap);
int prev_slim_alloc_count = SlimAllocationSiteCount(heap);
CompileRun(source);
int fat_alloc_sites = AllocationSitesCount(heap) - prev_fat_alloc_count;
int slim_alloc_sites = SlimAllocationSiteCount(heap) - prev_slim_alloc_count;
CHECK_EQ(expected_full_alloc, fat_alloc_sites);
CHECK_EQ(expected_slim_alloc, slim_alloc_sites);
}
TEST(AllocationSiteCreation) {
FLAG_always_opt = false;
CcTest::InitializeVM();
@ -3604,52 +3572,90 @@ TEST(AllocationSiteCreation) {
Heap* heap = isolate->heap();
HandleScope scope(isolate);
// Array literals.
CheckNumberOfAllocations(heap, "(function f1() { return []; })()", 1, 0);
CheckNumberOfAllocations(heap, "(function f2() { return [1, 2]; })()", 1, 0);
CheckNumberOfAllocations(heap, "(function f3() { return [[1], [2]]; })()", 1,
2);
int prev_count = 0;
int count = 0;
CheckNumberOfAllocations(heap,
"(function f4() { "
"return [0, [1, 1.1, 1.2, "
"], 1.5, [2.1, 2.2], 3];"
"})()",
1, 2);
// Array literals.
prev_count = AllocationSitesCount(heap);
CompileRun("(function f1() { return []; })()");
count = AllocationSitesCount(heap);
CHECK_EQ(1, count - prev_count);
prev_count = count;
CompileRun("(function f2() { return [1, 2]; })()");
count = AllocationSitesCount(heap);
CHECK_EQ(1, count - prev_count);
prev_count = count;
CompileRun("(function f3() { return [[1], [2]]; })()");
count = AllocationSitesCount(heap);
CHECK_EQ(3, count - prev_count);
prev_count = count;
CompileRun(
"(function f4() { "
"return [0, [1, 1.1, 1.2, "
"], 1.5, [2.1, 2.2], 3];"
"})()");
count = AllocationSitesCount(heap);
CHECK_EQ(3, count - prev_count);
// Object literals have lazy AllocationSites
CheckNumberOfAllocations(heap, "function f5() { return {}; }; f5(); ", 0, 0);
prev_count = AllocationSitesCount(heap);
CompileRun("function f5() { return {}; }; f5(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(0, count - prev_count);
// No AllocationSites are created for the empty object literal.
for (int i = 0; i < 5; i++) {
CheckNumberOfAllocations(heap, "f5(); ", 0, 0);
prev_count = AllocationSitesCount(heap);
CompileRun("f5(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(0, count - prev_count);
}
CheckNumberOfAllocations(heap, "function f6() { return {a:1}; }; f6(); ", 0,
0);
prev_count = AllocationSitesCount(heap);
CompileRun("function f6() { return {a:1}; }; f6(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(0, count - prev_count);
prev_count = AllocationSitesCount(heap);
CompileRun("f6(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(1, count - prev_count);
CheckNumberOfAllocations(heap, "f6(); ", 1, 0);
prev_count = AllocationSitesCount(heap);
CompileRun("function f7() { return {a:1, b:2}; }; f7(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(0, count - prev_count);
prev_count = AllocationSitesCount(heap);
CompileRun("f7(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(1, count - prev_count);
CheckNumberOfAllocations(heap, "function f7() { return {a:1, b:2}; }; f7(); ",
0, 0);
CheckNumberOfAllocations(heap, "f7(); ", 1, 0);
CheckNumberOfAllocations(heap,
"function f8() {"
"return {a:{}, b:{ a:2, c:{ d:{f:{}}} } }; "
"}; f8(); ",
0, 0);
CheckNumberOfAllocations(heap, "f8(); ", 1, 5);
prev_count = AllocationSitesCount(heap);
CompileRun(
"function f8() {"
"return {a:{}, b:{ a:2, c:{ d:{f:{}}} } }; "
"}; f8(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(0, count - prev_count);
prev_count = AllocationSitesCount(heap);
CompileRun("f8(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(6, count - prev_count);
// We currently eagerly create allocation sites if there are sub-arrays.
CheckNumberOfAllocations(heap,
"function f9() {"
"return {a:[1, 2, 3], b:{ a:2, c:{ d:{f:[]} } }}; "
"}; f9(); ",
1, 5);
prev_count = AllocationSitesCount(heap);
CompileRun(
"function f9() {"
"return {a:[1, 2, 3], b:{ a:2, c:{ d:{f:[]} } }}; "
"}; f9(); ");
count = AllocationSitesCount(heap);
CHECK_EQ(6, count - prev_count);
prev_count = AllocationSitesCount(heap);
CompileRun("f9(); ");
count = AllocationSitesCount(heap);
// No new AllocationSites created on the second invocation.
CheckNumberOfAllocations(heap, "f9(); ", 0, 0);
CHECK_EQ(0, count - prev_count);
}
TEST(CellsInOptimizedCodeAreWeak) {