PPC: [stubs] Optimize LoadGlobalViaContextStub and StoreGlobalViaContextStub.

Port d6ee366d5c

Original commit message:
    This is the initial round of optimizations for the
    LoadGlobalViaContextStub and StoreGlobalViaContextStub, basically
    turning them into platform code stubs to avoid the Crankshaft overhead
    in the fast case, and making the runtime interface cheaper.

R=bmeurer@chromium.org, dstence@us.ibm.com, michael_dawson@ca.ibm.com
BUG=chromium:510694
LOG=n

Review URL: https://codereview.chromium.org/1261473002

Cr-Commit-Position: refs/heads/master@{#29867}
This commit is contained in:
mbrandy 2015-07-27 04:19:46 -07:00 committed by Commit bot
parent e01f34fae6
commit 4ad22295d4
5 changed files with 222 additions and 49 deletions

View File

@ -1378,17 +1378,19 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
DCHECK(var->index() > 0); DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty()); DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes. // Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index(); const int slot = var->index();
int depth = scope()->ContextChainLength(var->scope()); const int depth = scope()->ContextChainLength(var->scope());
__ mov(LoadGlobalViaContextDescriptor::DepthRegister(), if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
Operand(Smi::FromInt(depth))); __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(), __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(Smi::FromInt(slot_index))); Operand(var->name()));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(), LoadGlobalViaContextStub stub(isolate(), depth);
Operand(var->name())); __ CallStub(&stub);
LoadGlobalViaContextStub stub(isolate(), depth); } else {
__ CallStub(&stub); __ Push(Smi::FromInt(slot));
__ Push(var->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
} else { } else {
__ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); __ LoadP(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
__ mov(LoadDescriptor::NameRegister(), Operand(var->name())); __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
@ -2714,18 +2716,24 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
DCHECK(var->index() > 0); DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty()); DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes. // Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index() + 1; const int slot = var->index() + 1;
int depth = scope()->ContextChainLength(var->scope()); const int depth = scope()->ContextChainLength(var->scope());
__ mov(StoreGlobalViaContextDescriptor::DepthRegister(), if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
Operand(Smi::FromInt(depth))); __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(), __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(Smi::FromInt(slot_index))); Operand(var->name()));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(), DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r3));
Operand(var->name())); StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r3)); __ CallStub(&stub);
StoreGlobalViaContextStub stub(isolate(), depth, language_mode()); } else {
__ CallStub(&stub); __ Push(Smi::FromInt(slot));
__ Push(var->name());
__ push(r3);
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
} else if (var->mode() == LET && op != Token::INIT_LET) { } else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier. // Non-initializing assignment to let variable needs a write barrier.
DCHECK(!var->IsLookupSlot()); DCHECK(!var->IsLookupSlot());

View File

@ -4208,7 +4208,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(
__ srwi(scratch2, scratch2, Operand(Name::kHashShift)); __ srwi(scratch2, scratch2, Operand(Name::kHashShift));
__ and_(scratch2, scratch1, scratch2); __ and_(scratch2, scratch1, scratch2);
// Scale the index by multiplying by the element size. // Scale the index by multiplying by the entry size.
STATIC_ASSERT(NameDictionary::kEntrySize == 3); STATIC_ASSERT(NameDictionary::kEntrySize == 3);
// scratch2 = scratch2 * 3. // scratch2 = scratch2 * 3.
__ ShiftLeftImm(ip, scratch2, Operand(1)); __ ShiftLeftImm(ip, scratch2, Operand(1));
@ -5313,6 +5313,162 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
} }
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context = cp;
Register result = r3;
Register slot = r5;
Register name = r6;
Label slow_case;
// Go up the context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ LoadP(result, ContextOperand(context, Context::PREVIOUS_INDEX));
context = result;
}
// Load the PropertyCell value at the specified slot.
__ ShiftLeftImm(result, slot, Operand(kPointerSizeLog2));
__ add(result, context, result);
__ LoadP(result, ContextOperand(result));
__ LoadP(result, FieldMemOperand(result, PropertyCell::kValueOffset));
// If the result is not the_hole, return. Otherwise, handle in the runtime.
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
__ Ret(ne);
// Fallback to runtime.
__ bind(&slow_case);
__ SmiTag(slot);
__ Push(slot, name);
__ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
}
void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register value = r3;
Register slot = r5;
Register name = r6;
Register cell = r4;
Register cell_details = r7;
Register cell_value = r8;
Register cell_value_map = r9;
Register scratch = r10;
Register context = cp;
Register context_temp = cell;
Label fast_heapobject_case, fast_smi_case, slow_case;
if (FLAG_debug_code) {
__ CompareRoot(value, Heap::kTheHoleValueRootIndex);
__ Check(ne, kUnexpectedValue);
__ AssertName(name);
}
// Go up the context chain to the script context.
for (int i = 0; i < depth(); i++) {
__ LoadP(context_temp, ContextOperand(context, Context::PREVIOUS_INDEX));
context = context_temp;
}
// Load the PropertyCell at the specified slot.
__ ShiftLeftImm(cell, slot, Operand(kPointerSizeLog2));
__ add(cell, context, cell);
__ LoadP(cell, ContextOperand(cell));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ LoadP(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
__ SmiUntag(cell_details);
__ andi(cell_details, cell_details,
Operand(PropertyDetails::PropertyCellTypeField::kMask |
PropertyDetails::KindField::kMask));
// Check if PropertyCell holds mutable data.
Label not_mutable_data;
__ cmpi(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kMutable) |
PropertyDetails::KindField::encode(kData)));
__ bne(&not_mutable_data);
__ JumpIfSmi(value, &fast_smi_case);
__ bind(&fast_heapobject_case);
__ StoreP(value, FieldMemOperand(cell, PropertyCell::kValueOffset), r0);
// RecordWriteField clobbers the value register, so we copy it before the
// call.
__ mr(r7, value);
__ RecordWriteField(cell, PropertyCell::kValueOffset, r7, scratch,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ Ret();
__ bind(&not_mutable_data);
// Check if PropertyCell value matches the new value (relevant for Constant,
// ConstantType and Undefined cells).
Label not_same_value;
__ LoadP(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
__ cmp(cell_value, value);
__ bne(&not_same_value);
if (FLAG_debug_code) {
Label done;
// This can only be true for Constant, ConstantType and Undefined cells,
// because we never store the_hole via this stub.
__ cmpi(cell_details,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstant) |
PropertyDetails::KindField::encode(kData)));
__ beq(&done);
__ cmpi(cell_details,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ beq(&done);
__ cmpi(cell_details,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kUndefined) |
PropertyDetails::KindField::encode(kData)));
__ Check(eq, kUnexpectedValue);
__ bind(&done);
}
__ Ret();
__ bind(&not_same_value);
// Check if PropertyCell contains data with constant type.
__ cmpi(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ bne(&slow_case);
// Now either both old and new values must be smis or both must be heap
// objects with same map.
Label value_is_heap_object;
__ JumpIfNotSmi(value, &value_is_heap_object);
__ JumpIfNotSmi(cell_value, &slow_case);
// Old and new values are smis, no need for a write barrier here.
__ bind(&fast_smi_case);
__ StoreP(value, FieldMemOperand(cell, PropertyCell::kValueOffset), r0);
__ Ret();
__ bind(&value_is_heap_object);
__ JumpIfSmi(cell_value, &slow_case);
__ LoadP(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
__ LoadP(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
__ cmp(cell_value_map, scratch);
__ beq(&fast_heapobject_case);
// Fallback to runtime.
__ bind(&slow_case);
__ SmiTag(slot);
__ Push(slot, name, value);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3, 1);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address(); return ref0.address() - ref1.address();
} }

View File

@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return r6; }
const Register StoreTransitionDescriptor::MapRegister() { return r6; } const Register StoreTransitionDescriptor::MapRegister() { return r6; }
const Register LoadGlobalViaContextDescriptor::DepthRegister() { return r4; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return r5; } const Register LoadGlobalViaContextDescriptor::SlotRegister() { return r5; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return r6; } const Register LoadGlobalViaContextDescriptor::NameRegister() { return r6; }
const Register StoreGlobalViaContextDescriptor::DepthRegister() { return r4; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r5; } const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r5; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return r6; } const Register StoreGlobalViaContextDescriptor::NameRegister() { return r6; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r3; } const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r3; }

View File

@ -3064,16 +3064,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(cp)); DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(r3)); DCHECK(ToRegister(instr->result()).is(r3));
__ mov(LoadGlobalViaContextDescriptor::DepthRegister(), int const slot = instr->slot_index();
Operand(Smi::FromInt(instr->depth()))); int const depth = instr->depth();
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(), if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
Operand(Smi::FromInt(instr->slot_index()))); __ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(), __ mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name())); Operand(instr->name()));
Handle<Code> stub =
Handle<Code> stub = CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code(); CallCode(stub, RelocInfo::CODE_TARGET, instr);
CallCode(stub, RelocInfo::CODE_TARGET, instr); } else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
} }
@ -4485,17 +4489,24 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) {
DCHECK(ToRegister(instr->value()) DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister())); .is(StoreGlobalViaContextDescriptor::ValueRegister()));
__ mov(StoreGlobalViaContextDescriptor::DepthRegister(), int const slot = instr->slot_index();
Operand(Smi::FromInt(instr->depth()))); int const depth = instr->depth();
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(), if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
Operand(Smi::FromInt(instr->slot_index()))); __ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(), __ mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name())); Operand(instr->name()));
Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
Handle<Code> stub = isolate(), depth, instr->language_mode()).code();
CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(), CallCode(stub, RelocInfo::CODE_TARGET, instr);
instr->language_mode()).code(); } else {
CallCode(stub, RelocInfo::CODE_TARGET, instr); __ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ push(StoreGlobalViaContextDescriptor::ValueRegister());
__ CallRuntime(is_strict(instr->language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
} }

View File

@ -213,7 +213,7 @@ class MacroAssembler : public Assembler {
// |object| is the object being stored into, |value| is the object being // |object| is the object being stored into, |value| is the object being
// stored. value and scratch registers are clobbered by the operation. // stored. value and scratch registers are clobbered by the operation.
// The offset is the offset from the start of the object, not the offset from // The offset is the offset from the start of the object, not the offset from
// the tagged HeapObject pointer. For use with FieldOperand(reg, off). // the tagged HeapObject pointer. For use with FieldMemOperand(reg, off).
void RecordWriteField( void RecordWriteField(
Register object, int offset, Register value, Register scratch, Register object, int offset, Register value, Register scratch,
LinkRegisterStatus lr_status, SaveFPRegsMode save_fp, LinkRegisterStatus lr_status, SaveFPRegsMode save_fp,
@ -1549,7 +1549,7 @@ class CodePatcher {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Static helper functions. // Static helper functions.
inline MemOperand ContextOperand(Register context, int index) { inline MemOperand ContextOperand(Register context, int index = 0) {
return MemOperand(context, Context::SlotOffset(index)); return MemOperand(context, Context::SlotOffset(index));
} }