Migrate FastCloneShallowObjectStub to TurboFan

BUG=

Committed: https://crrev.com/4c2b04542f263b2679194f9fb75672ebbe72b924
Cr-Commit-Position: refs/heads/master@{#35330}

Committed: https://crrev.com/7fdfdc12d4e4291348112ace4278a827f57f2eb9
Cr-Commit-Position: refs/heads/master@{#35494}

Review URL: https://codereview.chromium.org/1838283003

Cr-Commit-Position: refs/heads/master@{#35563}
This commit is contained in:
verwaest 2016-04-18 01:11:44 -07:00 committed by Commit bot
parent e96c143171
commit 5325379982
15 changed files with 105 additions and 89 deletions

View File

@ -549,79 +549,6 @@ Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
}
template <>
HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
HValue* undefined = graph()->GetConstantUndefined();
HValue* closure = GetParameter(0);
HValue* literal_index = GetParameter(1);
HValue* literals_array = Add<HLoadNamedField>(
closure, nullptr, HObjectAccess::ForLiteralsPointer());
HInstruction* allocation_site = Add<HLoadKeyed>(
literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
IfBuilder checker(this);
checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
undefined);
checker.And();
HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
AllocationSite::kTransitionInfoOffset);
HInstruction* boilerplate =
Add<HLoadNamedField>(allocation_site, nullptr, access);
int length = casted_stub()->length();
if (length == 0) {
// Empty objects have some slack added to them.
length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
}
int size = JSObject::kHeaderSize + length * kPointerSize;
int object_size = size;
if (FLAG_allocation_site_pretenuring) {
size += AllocationMemento::kSize;
}
HValue* boilerplate_map =
Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
HValue* boilerplate_size = Add<HLoadNamedField>(
boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
checker.If<HCompareNumericAndBranch>(boilerplate_size,
size_in_words, Token::EQ);
checker.Then();
HValue* size_in_bytes = Add<HConstant>(size);
HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
NOT_TENURED, JS_OBJECT_TYPE);
for (int i = 0; i < object_size; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
Add<HStoreNamedField>(object, access,
Add<HLoadNamedField>(boilerplate, nullptr, access));
}
DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
if (FLAG_allocation_site_pretenuring) {
BuildCreateAllocationMemento(
object, Add<HConstant>(object_size), allocation_site);
}
environment()->Push(object);
checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
checker.End();
return environment()->Pop();
}
Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
return DoGenerateCode(this);
}
template <>
HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
// This stub is performance sensitive, the generated code must be tuned

View File

@ -3623,6 +3623,82 @@ void LoadIndexedInterceptorStub::GenerateAssembly(
slot, vector);
}
void FastCloneShallowObjectStub::GenerateAssembly(
compiler::CodeStubAssembler* assembler) const {
typedef compiler::CodeStubAssembler::Label Label;
typedef compiler::Node Node;
Label call_runtime(assembler);
Node* closure = assembler->Parameter(0);
Node* literals_index = assembler->Parameter(1);
Node* undefined = assembler->UndefinedConstant();
Node* literals_array =
assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* allocation_site = assembler->LoadFixedArrayElementSmiIndex(
literals_array, literals_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize);
assembler->GotoIf(assembler->WordEqual(allocation_site, undefined),
&call_runtime);
Node* boilerplate = assembler->LoadObjectField(
allocation_site, AllocationSite::kTransitionInfoOffset);
int length = this->length();
if (length == 0) {
length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
}
int allocation_size = JSObject::kHeaderSize + length * kPointerSize;
int object_size = allocation_size;
if (FLAG_allocation_site_pretenuring) {
allocation_size += AllocationMemento::kSize;
}
Node* boilerplate_map = assembler->LoadMap(boilerplate);
Node* instance_size = assembler->LoadMapInstanceSize(boilerplate_map);
Node* size_in_words =
assembler->Int32Constant(object_size >> kPointerSizeLog2);
assembler->GotoUnless(assembler->Word32Equal(instance_size, size_in_words),
&call_runtime);
Node* copy = assembler->Allocate(allocation_size);
for (int i = 0; i < object_size; i += kPointerSize) {
// The Allocate above guarantees that the copy lies in new space. This
// allows us to skip write barriers. This is necessary since we may also be
// copying unboxed doubles.
Node* field =
assembler->LoadObjectField(boilerplate, i, MachineType::IntPtr());
assembler->StoreObjectFieldNoWriteBarrier(
copy, i, field, MachineType::PointerRepresentation());
}
if (FLAG_allocation_site_pretenuring) {
Node* memento = assembler->InnerAllocate(copy, object_size);
assembler->StoreObjectFieldNoWriteBarrier(
memento, HeapObject::kMapOffset,
assembler->LoadRoot(Heap::kAllocationMementoMapRootIndex));
assembler->StoreObjectFieldNoWriteBarrier(
memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
Node* memento_create_count = assembler->LoadObjectField(
allocation_site, AllocationSite::kPretenureCreateCountOffset);
memento_create_count = assembler->SmiAdd(
memento_create_count, assembler->SmiConstant(Smi::FromInt(1)));
assembler->StoreObjectFieldNoWriteBarrier(
allocation_site, AllocationSite::kPretenureCreateCountOffset,
memento_create_count);
}
// TODO(verwaest): Allocate and fill in double boxes.
assembler->Return(copy);
assembler->Bind(&call_runtime);
Node* constant_properties = assembler->Parameter(2);
Node* flags = assembler->Parameter(3);
Node* context = assembler->Parameter(4);
assembler->TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure,
literals_index, constant_properties, flags);
}
template<class StateType>
void HydrogenCodeStub::TraceTransition(StateType from, StateType to) {
// Note: Although a no-op transition is semantically OK, it is hinting at a
@ -3765,14 +3841,6 @@ void FastCloneShallowArrayStub::InitializeDescriptor(
}
void FastCloneShallowObjectStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
FastCloneShallowObjectDescriptor call_descriptor(isolate());
descriptor->Initialize(
Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry);
}
void CreateAllocationSiteStub::InitializeDescriptor(CodeStubDescriptor* d) {}

View File

@ -70,7 +70,6 @@ namespace internal {
V(FastArrayPush) \
V(FastCloneRegExp) \
V(FastCloneShallowArray) \
V(FastCloneShallowObject) \
V(FastNewClosure) \
V(FastNewContext) \
V(FastNewObject) \
@ -122,6 +121,7 @@ namespace internal {
V(BitwiseAnd) \
V(BitwiseOr) \
V(BitwiseXor) \
V(FastCloneShallowObject) \
V(LessThan) \
V(LessThanOrEqual) \
V(GreaterThan) \
@ -1106,26 +1106,25 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub {
DEFINE_HYDROGEN_CODE_STUB(FastCloneShallowArray, HydrogenCodeStub);
};
class FastCloneShallowObjectStub : public HydrogenCodeStub {
class FastCloneShallowObjectStub : public TurboFanCodeStub {
public:
// Maximum number of properties in copied object.
static const int kMaximumClonedProperties = 6;
FastCloneShallowObjectStub(Isolate* isolate, int length)
: HydrogenCodeStub(isolate) {
: TurboFanCodeStub(isolate) {
DCHECK_GE(length, 0);
DCHECK_LE(length, kMaximumClonedProperties);
set_sub_minor_key(LengthBits::encode(length));
minor_key_ = LengthBits::encode(LengthBits::encode(length));
}
int length() const { return LengthBits::decode(sub_minor_key()); }
int length() const { return LengthBits::decode(minor_key_); }
private:
class LengthBits : public BitField<int, 0, 4> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowObject);
DEFINE_HYDROGEN_CODE_STUB(FastCloneShallowObject, HydrogenCodeStub);
DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowObject, TurboFanCodeStub);
};

View File

@ -523,6 +523,11 @@ Node* CodeStubAssembler::LoadFixedArrayElementInt32Index(
return Load(MachineType::AnyTagged(), object, offset);
}
Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
return Load(MachineType::Uint8(), map,
IntPtrConstant(Map::kInstanceSizeOffset - kHeapObjectTag));
}
Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object,
Node* smi_index,
int additional_offset) {
@ -704,6 +709,10 @@ Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
limit_address);
}
Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
return IntPtrAdd(previous, IntPtrConstant(offset));
}
Node* CodeStubAssembler::AllocateHeapNumber() {
Node* result = Allocate(HeapNumber::kSize, kNone);
StoreMapNoWriteBarrier(result, HeapNumberMapConstant());

View File

@ -340,6 +340,8 @@ class CodeStubAssembler {
// Load the hash field of a name.
Node* LoadNameHash(Node* name);
// Load the instance size of a Map.
Node* LoadMapInstanceSize(Node* map);
// Load an array element from a FixedArray.
Node* LoadFixedArrayElementInt32Index(Node* object, Node* int32_index,

View File

@ -1427,6 +1427,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1414,6 +1414,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1356,6 +1356,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1424,6 +1424,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1425,6 +1425,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1389,6 +1389,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1348,6 +1348,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1382,6 +1382,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Move(rdx, Smi::FromInt(flags));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1348,6 +1348,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -2187,7 +2187,9 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
} else if (IsFalse()) {
os << "<false>";
} else {
os << "<Odd Oddball>";
os << "<Odd Oddball: ";
os << Oddball::cast(this)->to_string()->ToCString().get();
os << ">";
}
break;
}