Revert of Migrate FastCloneShallowObjectStub to TurboFan (patchset #7 id:140001 of https://codereview.chromium.org/1838283003/ )

Reason for revert:
Makes some WebGL tests flaky: https://codereview.chromium.org/1866373002/

Bisection on tryserver shows this as the culprit. Local repro unfortunately does not work.

Original issue's description:
> Migrate FastCloneShallowObjectStub to TurboFan
>
> BUG=
>
> Committed: https://crrev.com/4c2b04542f263b2679194f9fb75672ebbe72b924
> Cr-Commit-Position: refs/heads/master@{#35330}

TBR=bmeurer@chromium.org,verwaest@chromium.org
# Not skipping CQ checks because original CL landed more than 1 days ago.
BUG=

Review URL: https://codereview.chromium.org/1875683002

Cr-Commit-Position: refs/heads/master@{#35353}
This commit is contained in:
hablich 2016-04-08 04:50:45 -07:00 committed by Commit bot
parent f7aa8cc70e
commit 47cce8d6ed
15 changed files with 89 additions and 116 deletions

View File

@ -569,6 +569,79 @@ Handle<Code> FastCloneShallowArrayStub::GenerateCode() {
}
template <>
HValue* CodeStubGraphBuilder<FastCloneShallowObjectStub>::BuildCodeStub() {
HValue* undefined = graph()->GetConstantUndefined();
HValue* closure = GetParameter(0);
HValue* literal_index = GetParameter(1);
HValue* literals_array = Add<HLoadNamedField>(
closure, nullptr, HObjectAccess::ForLiteralsPointer());
HInstruction* allocation_site = Add<HLoadKeyed>(
literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS,
NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag);
IfBuilder checker(this);
checker.IfNot<HCompareObjectEqAndBranch, HValue*>(allocation_site,
undefined);
checker.And();
HObjectAccess access = HObjectAccess::ForAllocationSiteOffset(
AllocationSite::kTransitionInfoOffset);
HInstruction* boilerplate =
Add<HLoadNamedField>(allocation_site, nullptr, access);
int length = casted_stub()->length();
if (length == 0) {
// Empty objects have some slack added to them.
length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
}
int size = JSObject::kHeaderSize + length * kPointerSize;
int object_size = size;
if (FLAG_allocation_site_pretenuring) {
size += AllocationMemento::kSize;
}
HValue* boilerplate_map =
Add<HLoadNamedField>(boilerplate, nullptr, HObjectAccess::ForMap());
HValue* boilerplate_size = Add<HLoadNamedField>(
boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize());
HValue* size_in_words = Add<HConstant>(object_size >> kPointerSizeLog2);
checker.If<HCompareNumericAndBranch>(boilerplate_size,
size_in_words, Token::EQ);
checker.Then();
HValue* size_in_bytes = Add<HConstant>(size);
HInstruction* object = Add<HAllocate>(size_in_bytes, HType::JSObject(),
NOT_TENURED, JS_OBJECT_TYPE);
for (int i = 0; i < object_size; i += kPointerSize) {
HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i);
Add<HStoreNamedField>(object, access,
Add<HLoadNamedField>(boilerplate, nullptr, access));
}
DCHECK(FLAG_allocation_site_pretenuring || (size == object_size));
if (FLAG_allocation_site_pretenuring) {
BuildCreateAllocationMemento(
object, Add<HConstant>(object_size), allocation_site);
}
environment()->Push(object);
checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone);
checker.End();
return environment()->Pop();
}
Handle<Code> FastCloneShallowObjectStub::GenerateCode() {
return DoGenerateCode(this);
}
template <>
HValue* CodeStubGraphBuilder<CreateAllocationSiteStub>::BuildCodeStub() {
// This stub is performance sensitive, the generated code must be tuned

View File

@ -3216,82 +3216,6 @@ void LoadIndexedInterceptorStub::GenerateAssembly(
slot, vector);
}
void FastCloneShallowObjectStub::GenerateAssembly(
compiler::CodeStubAssembler* assembler) const {
typedef compiler::CodeStubAssembler::Label Label;
typedef compiler::Node Node;
Label call_runtime(assembler);
Node* closure = assembler->Parameter(0);
Node* literals_index = assembler->Parameter(1);
Node* undefined = assembler->UndefinedConstant();
Node* literals_array =
assembler->LoadObjectField(closure, JSFunction::kLiteralsOffset);
Node* allocation_site = assembler->LoadFixedArrayElementSmiIndex(
literals_array, literals_index,
LiteralsArray::kFirstLiteralIndex * kPointerSize);
assembler->GotoIf(assembler->WordEqual(allocation_site, undefined),
&call_runtime);
Node* boilerplate = assembler->LoadObjectField(
allocation_site, AllocationSite::kTransitionInfoOffset);
int length = this->length();
if (length == 0) {
length = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
}
int size = JSObject::kHeaderSize + length * kPointerSize;
int object_size = size;
if (FLAG_allocation_site_pretenuring) {
size += AllocationMemento::kSize;
}
Node* boilerplate_map = assembler->LoadMap(boilerplate);
Node* instance_size = assembler->LoadMapInstanceSize(boilerplate_map);
Node* size_in_words =
assembler->Int32Constant(object_size >> kPointerSizeLog2);
assembler->GotoUnless(assembler->Word32Equal(instance_size, size_in_words),
&call_runtime);
Node* copy = assembler->Allocate(size);
for (int i = 0; i < size; i += kPointerSize) {
// The Allocate above guarantees that the copy lies in new space. This
// allows us to skip write barriers. This is necessary since we may also be
// copying unboxed doubles.
Node* field =
assembler->LoadObjectField(boilerplate, i, MachineType::IntPtr());
assembler->StoreObjectFieldNoWriteBarrier(
copy, i, field, MachineType::PointerRepresentation());
}
if (FLAG_allocation_site_pretenuring) {
Node* memento = assembler->InnerAllocate(copy, object_size);
assembler->StoreObjectFieldNoWriteBarrier(
memento, HeapObject::kMapOffset,
assembler->LoadRoot(Heap::kAllocationMementoMapRootIndex));
assembler->StoreObjectFieldNoWriteBarrier(
memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
Node* memento_create_count = assembler->LoadObjectField(
allocation_site, AllocationSite::kPretenureCreateCountOffset);
memento_create_count = assembler->SmiAdd(
memento_create_count, assembler->SmiConstant(Smi::FromInt(1)));
assembler->StoreObjectFieldNoWriteBarrier(
allocation_site, AllocationSite::kPretenureCreateCountOffset,
memento_create_count);
}
// TODO(verwaest): Allocate and fill in double boxes.
assembler->Return(copy);
assembler->Bind(&call_runtime);
Node* constant_properties = assembler->Parameter(2);
Node* flags = assembler->Parameter(3);
Node* context = assembler->Parameter(4);
assembler->TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure,
literals_index, constant_properties, flags);
}
template<class StateType>
void HydrogenCodeStub::TraceTransition(StateType from, StateType to) {
// Note: Although a no-op transition is semantically OK, it is hinting at a
@ -3438,6 +3362,14 @@ void FastCloneShallowArrayStub::InitializeDescriptor(
}
void FastCloneShallowObjectStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
FastCloneShallowObjectDescriptor call_descriptor(isolate());
descriptor->Initialize(
Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry);
}
void CreateAllocationSiteStub::InitializeDescriptor(CodeStubDescriptor* d) {}

View File

@ -69,6 +69,7 @@ namespace internal {
V(FastArrayPush) \
V(FastCloneRegExp) \
V(FastCloneShallowArray) \
V(FastCloneShallowObject) \
V(FastNewClosure) \
V(FastNewContext) \
V(FastNewObject) \
@ -115,7 +116,6 @@ namespace internal {
V(BitwiseAnd) \
V(BitwiseOr) \
V(BitwiseXor) \
V(FastCloneShallowObject) \
V(LessThan) \
V(LessThanOrEqual) \
V(GreaterThan) \
@ -1055,25 +1055,26 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub {
DEFINE_HYDROGEN_CODE_STUB(FastCloneShallowArray, HydrogenCodeStub);
};
class FastCloneShallowObjectStub : public TurboFanCodeStub {
class FastCloneShallowObjectStub : public HydrogenCodeStub {
public:
// Maximum number of properties in copied object.
static const int kMaximumClonedProperties = 6;
FastCloneShallowObjectStub(Isolate* isolate, int length)
: TurboFanCodeStub(isolate) {
: HydrogenCodeStub(isolate) {
DCHECK_GE(length, 0);
DCHECK_LE(length, kMaximumClonedProperties);
minor_key_ = LengthBits::encode(LengthBits::encode(length));
set_sub_minor_key(LengthBits::encode(length));
}
int length() const { return LengthBits::decode(minor_key_); }
int length() const { return LengthBits::decode(sub_minor_key()); }
private:
class LengthBits : public BitField<int, 0, 4> {};
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastCloneShallowObject);
DEFINE_TURBOFAN_CODE_STUB(FastCloneShallowObject, TurboFanCodeStub);
DEFINE_HYDROGEN_CODE_STUB(FastCloneShallowObject, HydrogenCodeStub);
};

View File

@ -436,12 +436,6 @@ Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
IntPtrConstant(offset - kHeapObjectTag));
}
Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
Node* object, int offset, Node* value, MachineRepresentation rep) {
return StoreNoWriteBarrier(rep, object,
IntPtrConstant(offset - kHeapObjectTag), value);
}
Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
return Load(MachineType::Float64(), object,
IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag));
@ -497,11 +491,6 @@ Node* CodeStubAssembler::LoadFixedArrayElementInt32Index(
return Load(MachineType::AnyTagged(), object, offset);
}
Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
return Load(MachineType::Uint8(), map,
IntPtrConstant(Map::kInstanceSizeOffset - kHeapObjectTag));
}
Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object,
Node* smi_index,
int additional_offset) {
@ -671,10 +660,6 @@ Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
limit_address);
}
Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
return IntPtrAdd(previous, IntPtrConstant(offset));
}
Node* CodeStubAssembler::AllocateHeapNumber() {
Node* result = Allocate(HeapNumber::kSize, kNone);
StoreMapNoWriteBarrier(result, HeapNumberMapConstant());

View File

@ -308,10 +308,6 @@ class CodeStubAssembler {
// Load a field from an object on the heap.
Node* LoadObjectField(Node* object, int offset,
MachineType rep = MachineType::AnyTagged());
// Store a field to an object on the heap.
Node* StoreObjectFieldNoWriteBarrier(
Node* object, int offset, Node* value,
MachineRepresentation rep = MachineRepresentation::kTagged);
// Load the floating point value of a HeapNumber.
Node* LoadHeapNumberValue(Node* object);
// Store the floating point value of a HeapNumber.
@ -331,8 +327,6 @@ class CodeStubAssembler {
// Load the hash field of a name.
Node* LoadNameHash(Node* name);
// Load the instance size of a Map.
Node* LoadMapInstanceSize(Node* map);
// Load an array element from a FixedArray.
Node* LoadFixedArrayElementInt32Index(Node* object, Node* int32_index,
@ -347,7 +341,6 @@ class CodeStubAssembler {
Node* AllocateHeapNumber();
// Allocate a HeapNumber with a specific value.
Node* AllocateHeapNumberWithValue(Node* value);
Node* InnerAllocate(Node* previous, int offset);
// Store an array element to a FixedArray.
Node* StoreFixedArrayElementNoWriteBarrier(Node* object, Node* index,

View File

@ -1427,7 +1427,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1414,7 +1414,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1356,7 +1356,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1424,7 +1424,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1425,7 +1425,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1389,7 +1389,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1348,7 +1348,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1382,7 +1382,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Move(rdx, Smi::FromInt(flags));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -1348,7 +1348,6 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(edx, Immediate(Smi::FromInt(flags)));
FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
__ CallStub(&stub);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);

View File

@ -2187,9 +2187,7 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
} else if (IsFalse()) {
os << "<false>";
} else {
os << "<Odd Oddball: ";
os << Oddball::cast(this)->to_string()->ToCString().get();
os << ">";
os << "<Odd Oddball>";
}
break;
}