[literals] Reuse InitializeAllocationMemento in FastCloneShallowObject

- Update InitializeAllocationMemento to use newer CSA helper
- Fix AllocateJSArray to create AllocationMementos for empty arrays as well

Bug: v8:6211
Change-Id: I8731b04cdd500b877a54dee67f00f2899d91d86d
Reviewed-on: https://chromium-review.googlesource.com/566810
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46594}
This commit is contained in:
Camillo Bruni 2017-07-11 15:41:06 +02:00 committed by Commit Bot
parent c7e8f23754
commit 6b34496034
3 changed files with 15 additions and 24 deletions

View File

@ -619,7 +619,8 @@ Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject(
STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize); STATIC_ASSERT(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize);
Node* instance_size = TimesPointerSize(LoadMapInstanceSize(boilerplate_map)); Node* instance_size = TimesPointerSize(LoadMapInstanceSize(boilerplate_map));
Node* allocation_size = instance_size; Node* allocation_size = instance_size;
if (FLAG_allocation_site_pretenuring) { bool needs_allocation_memento = FLAG_allocation_site_pretenuring;
if (needs_allocation_memento) {
// Prepare for inner-allocating the AllocationMemento. // Prepare for inner-allocating the AllocationMemento.
allocation_size = allocation_size =
IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize)); IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize));
@ -638,18 +639,8 @@ Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject(
// Initialize the AllocationMemento before potential GCs due to heap number // Initialize the AllocationMemento before potential GCs due to heap number
// allocation when copying the in-object properties. // allocation when copying the in-object properties.
if (FLAG_allocation_site_pretenuring) { if (needs_allocation_memento) {
Comment("Initialize AllocationMemento"); InitializeAllocationMemento(copy, instance_size, allocation_site);
Node* memento = InnerAllocate(copy, instance_size);
StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
StoreObjectFieldNoWriteBarrier(
memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
Node* memento_create_count = LoadObjectField(
allocation_site, AllocationSite::kPretenureCreateCountOffset);
memento_create_count = SmiAdd(memento_create_count, SmiConstant(1));
StoreObjectFieldNoWriteBarrier(allocation_site,
AllocationSite::kPretenureCreateCountOffset,
memento_create_count);
} }
{ {

View File

@ -2284,7 +2284,8 @@ Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
Heap::kEmptyFixedArrayRootIndex); Heap::kEmptyFixedArrayRootIndex);
if (allocation_site != nullptr) { if (allocation_site != nullptr) {
InitializeAllocationMemento(array, JSArray::kSize, allocation_site); InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
allocation_site);
} }
return array; return array;
} }
@ -2302,7 +2303,7 @@ Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
// Array is empty. Use the shared empty fixed array instead of allocating a // Array is empty. Use the shared empty fixed array instead of allocating a
// new one. // new one.
array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length, array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length,
nullptr); allocation_site);
StoreObjectFieldRoot(array, JSArray::kElementsOffset, StoreObjectFieldRoot(array, JSArray::kElementsOffset,
Heap::kEmptyFixedArrayRootIndex); Heap::kEmptyFixedArrayRootIndex);
} else { } else {
@ -2769,16 +2770,14 @@ Node* CodeStubAssembler::GrowElementsCapacity(
return new_elements; return new_elements;
} }
void CodeStubAssembler::InitializeAllocationMemento(Node* base_allocation, void CodeStubAssembler::InitializeAllocationMemento(Node* base,
int base_allocation_size, Node* base_allocation_size,
Node* allocation_site) { Node* allocation_site) {
Comment("[Initialize AllocationMemento");
Node* memento = InnerAllocate(base, base_allocation_size);
StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
StoreObjectFieldNoWriteBarrier( StoreObjectFieldNoWriteBarrier(
base_allocation, AllocationMemento::kMapOffset + base_allocation_size, memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map())));
StoreObjectFieldNoWriteBarrier(
base_allocation,
AllocationMemento::kAllocationSiteOffset + base_allocation_size,
allocation_site);
if (FLAG_allocation_site_pretenuring) { if (FLAG_allocation_site_pretenuring) {
Node* count = LoadObjectField(allocation_site, Node* count = LoadObjectField(allocation_site,
AllocationSite::kPretenureCreateCountOffset); AllocationSite::kPretenureCreateCountOffset);
@ -2787,6 +2786,7 @@ void CodeStubAssembler::InitializeAllocationMemento(Node* base_allocation,
AllocationSite::kPretenureCreateCountOffset, AllocationSite::kPretenureCreateCountOffset,
incremented_count); incremented_count);
} }
Comment("]");
} }
Node* CodeStubAssembler::TryTaggedToFloat64(Node* value, Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,

View File

@ -742,7 +742,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
// Allocation site manipulation // Allocation site manipulation
void InitializeAllocationMemento(Node* base_allocation, void InitializeAllocationMemento(Node* base_allocation,
int base_allocation_size, Node* base_allocation_size,
Node* allocation_site); Node* allocation_site);
Node* TryTaggedToFloat64(Node* value, Label* if_valueisnotnumber); Node* TryTaggedToFloat64(Node* value, Label* if_valueisnotnumber);