[stubs] Optimize LoadGlobalViaContextStub and StoreGlobalViaContextStub.

This is the initial round of optimizations for the
LoadGlobalViaContextStub and StoreGlobalViaContextStub, basically
turning them into platform code stubs to avoid the Crankshaft overhead
in the fast case, and making the runtime interface cheaper.

R=ishell@chromium.org
BUG=chromium:510694
LOG=n

Review URL: https://codereview.chromium.org/1238143002

Cr-Commit-Position: refs/heads/master@{#29834}
This commit is contained in:
bmeurer 2015-07-24 00:16:46 -07:00 committed by Commit bot
parent 48d87b57c5
commit d6ee366d5c
36 changed files with 1383 additions and 567 deletions

View File

@ -5057,6 +5057,160 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
}
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context = cp;
Register result = r0;
Register slot = r2;
Register name = r3;
Label slow_case;
// Go up the context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ ldr(result, ContextOperand(context, Context::PREVIOUS_INDEX));
context = result;
}
// Load the PropertyCell value at the specified slot.
__ add(result, context, Operand(slot, LSL, kPointerSizeLog2));
__ ldr(result, ContextOperand(result));
__ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
// If the result is not the_hole, return. Otherwise, handle in the runtime.
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
__ Ret(ne);
// Fallback to runtime.
__ bind(&slow_case);
__ SmiTag(slot);
__ push(slot);
__ push(name);
__ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
}
void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register value = r0;
Register slot = r2;
Register name = r3;
Register cell = r1;
Register cell_details = r4;
Register cell_value = r5;
Register cell_value_map = r6;
Register scratch = r9;
Register context = cp;
Register context_temp = cell;
Label fast_heapobject_case, fast_smi_case, slow_case;
if (FLAG_debug_code) {
__ CompareRoot(value, Heap::kTheHoleValueRootIndex);
__ Check(ne, kUnexpectedValue);
__ AssertName(name);
}
// Go up the context chain to the script context.
for (int i = 0; i < depth(); i++) {
__ ldr(context_temp, ContextOperand(context, Context::PREVIOUS_INDEX));
context = context_temp;
}
// Load the PropertyCell at the specified slot.
__ add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
__ ldr(cell, ContextOperand(cell));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ ldr(cell_details, FieldMemOperand(cell, PropertyCell::kDetailsOffset));
__ SmiUntag(cell_details);
__ and_(cell_details, cell_details,
Operand(PropertyDetails::PropertyCellTypeField::kMask |
PropertyDetails::KindField::kMask));
// Check if PropertyCell holds mutable data.
Label not_mutable_data;
__ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kMutable) |
PropertyDetails::KindField::encode(kData)));
__ b(ne, &not_mutable_data);
__ JumpIfSmi(value, &fast_smi_case);
__ bind(&fast_heapobject_case);
__ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
// RecordWriteField clobbers the value register, so we copy it before the
// call.
__ mov(r4, Operand(value));
__ RecordWriteField(cell, PropertyCell::kValueOffset, r4, scratch,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ Ret();
__ bind(&not_mutable_data);
// Check if PropertyCell value matches the new value (relevant for Constant,
// ConstantType and Undefined cells).
Label not_same_value;
__ ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
__ cmp(cell_value, value);
__ b(ne, &not_same_value);
if (FLAG_debug_code) {
Label done;
// This can only be true for Constant, ConstantType and Undefined cells,
// because we never store the_hole via this stub.
__ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstant) |
PropertyDetails::KindField::encode(kData)));
__ b(eq, &done);
__ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ b(eq, &done);
__ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kUndefined) |
PropertyDetails::KindField::encode(kData)));
__ Check(eq, kUnexpectedValue);
__ bind(&done);
}
__ Ret();
__ bind(&not_same_value);
// Check if PropertyCell contains data with constant type.
__ cmp(cell_details, Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ b(ne, &slow_case);
// Now either both old and new values must be smis or both must be heap
// objects with same map.
Label value_is_heap_object;
__ JumpIfNotSmi(value, &value_is_heap_object);
__ JumpIfNotSmi(cell_value, &slow_case);
// Old and new values are smis, no need for a write barrier here.
__ bind(&fast_smi_case);
__ str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
__ Ret();
__ bind(&value_is_heap_object);
__ JumpIfSmi(cell_value, &slow_case);
__ ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
__ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
__ cmp(cell_value_map, scratch);
__ b(eq, &fast_heapobject_case);
// Fallback to runtime.
__ bind(&slow_case);
__ SmiTag(slot);
__ push(slot);
__ push(name);
__ push(value);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3, 1);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}

View File

@ -1415,17 +1415,19 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index();
int depth = scope()->ContextChainLength(var->scope());
__ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(depth)));
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(slot_index)));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(var->name()));
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
const int slot = var->index();
const int depth = scope()->ContextChainLength(var->scope());
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(var->name()));
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
} else {
__ ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
__ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
@ -2715,18 +2717,24 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index() + 1;
int depth = scope()->ContextChainLength(var->scope());
__ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(depth)));
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(slot_index)));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(var->name()));
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0));
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
const int slot = var->index() + 1;
const int depth = scope()->ContextChainLength(var->scope());
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(var->name()));
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(r0));
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ push(r0);
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
DCHECK(!var->IsLookupSlot());

View File

@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return r3; }
const Register StoreTransitionDescriptor::MapRegister() { return r3; }
const Register LoadGlobalViaContextDescriptor::DepthRegister() { return r1; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return r2; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return r3; }
const Register StoreGlobalViaContextDescriptor::DepthRegister() { return r1; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r2; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return r3; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r0; }

View File

@ -2987,16 +2987,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(r0));
__ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(instr->depth())));
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(instr->slot_index())));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
}
@ -4249,17 +4253,25 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) {
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
__ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(instr->depth())));
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(instr->slot_index())));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
instr->language_mode()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
isolate(), depth, instr->language_mode())
.code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ push(StoreGlobalViaContextDescriptor::ValueRegister());
__ CallRuntime(is_strict(instr->language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
}

View File

@ -250,7 +250,7 @@ class MacroAssembler: public Assembler {
// |object| is the object being stored into, |value| is the object being
// stored. value and scratch registers are clobbered by the operation.
// The offset is the offset from the start of the object, not the offset from
// the tagged HeapObject pointer. For use with FieldOperand(reg, off).
// the tagged HeapObject pointer. For use with FieldMemOperand(reg, off).
void RecordWriteField(
Register object,
int offset,
@ -1532,7 +1532,7 @@ class CodePatcher {
// -----------------------------------------------------------------------------
// Static helper functions.
inline MemOperand ContextOperand(Register context, int index) {
inline MemOperand ContextOperand(Register context, int index = 0) {
return MemOperand(context, Context::SlotOffset(index));
}

View File

@ -5494,6 +5494,153 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
}
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context = cp;
Register result = x0;
Register slot = x2;
Register name = x3;
Label slow_case;
// Go up the context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ Ldr(result, ContextMemOperand(context, Context::PREVIOUS_INDEX));
context = result;
}
// Load the PropertyCell value at the specified slot.
__ Add(result, context, Operand(slot, LSL, kPointerSizeLog2));
__ Ldr(result, ContextMemOperand(result));
__ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
// If the result is not the_hole, return. Otherwise, handle in the runtime.
__ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &slow_case);
__ Ret();
// Fallback to runtime.
__ Bind(&slow_case);
__ SmiTag(slot);
__ Push(slot, name);
__ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
}
void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context = cp;
Register value = x0;
Register slot = x2;
Register name = x3;
Register context_temp = x10;
Register cell = x10;
Register cell_details = x11;
Register cell_value = x12;
Register cell_value_map = x13;
Register value_map = x14;
Label fast_heapobject_case, fast_smi_case, slow_case;
if (FLAG_debug_code) {
__ CompareRoot(value, Heap::kTheHoleValueRootIndex);
__ Check(ne, kUnexpectedValue);
__ AssertName(name);
}
// Go up the context chain to the script context.
for (int i = 0; i < depth(); i++) {
__ Ldr(context_temp, ContextMemOperand(context, Context::PREVIOUS_INDEX));
context = context_temp;
}
// Load the PropertyCell at the specified slot.
__ Add(cell, context, Operand(slot, LSL, kPointerSizeLog2));
__ Ldr(cell, ContextMemOperand(cell));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ Ldr(cell_details,
UntagSmiFieldMemOperand(cell, PropertyCell::kDetailsOffset));
__ And(cell_details, cell_details,
PropertyDetails::PropertyCellTypeField::kMask |
PropertyDetails::KindField::kMask);
// Check if PropertyCell holds mutable data.
Label not_mutable_data;
__ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kMutable) |
PropertyDetails::KindField::encode(kData));
__ B(ne, &not_mutable_data);
__ JumpIfSmi(value, &fast_smi_case);
__ Bind(&fast_heapobject_case);
__ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
// RecordWriteField clobbers the value register, so we copy it before the
// call.
__ Mov(x11, value);
__ RecordWriteField(cell, PropertyCell::kValueOffset, x11, x12,
kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ Ret();
__ Bind(&not_mutable_data);
// Check if PropertyCell value matches the new value (relevant for Constant,
// ConstantType and Undefined cells).
Label not_same_value;
__ Ldr(cell_value, FieldMemOperand(cell, PropertyCell::kValueOffset));
__ Cmp(cell_value, value);
__ B(ne, &not_same_value);
if (FLAG_debug_code) {
Label done;
// This can only be true for Constant, ConstantType and Undefined cells,
// because we never store the_hole via this stub.
__ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstant) |
PropertyDetails::KindField::encode(kData));
__ B(eq, &done);
__ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData));
__ B(eq, &done);
__ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kUndefined) |
PropertyDetails::KindField::encode(kData));
__ Check(eq, kUnexpectedValue);
__ Bind(&done);
}
__ Ret();
__ Bind(&not_same_value);
// Check if PropertyCell contains data with constant type.
__ Cmp(cell_details, PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData));
__ B(ne, &slow_case);
// Now either both old and new values must be smis or both must be heap
// objects with same map.
Label value_is_heap_object;
__ JumpIfNotSmi(value, &value_is_heap_object);
__ JumpIfNotSmi(cell_value, &slow_case);
// Old and new values are smis, no need for a write barrier here.
__ Bind(&fast_smi_case);
__ Str(value, FieldMemOperand(cell, PropertyCell::kValueOffset));
__ Ret();
__ Bind(&value_is_heap_object);
__ JumpIfSmi(cell_value, &slow_case);
__ Ldr(cell_value_map, FieldMemOperand(cell_value, HeapObject::kMapOffset));
__ Ldr(value_map, FieldMemOperand(value, HeapObject::kMapOffset));
__ Cmp(cell_value_map, value_map);
__ B(eq, &fast_heapobject_case);
// Fall back to the runtime.
__ Bind(&slow_case);
__ SmiTag(slot);
__ Push(slot, name, value);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3, 1);
}
// The number of register that CallApiFunctionAndReturn will need to save on
// the stack. The space for these registers need to be allocated in the
// ExitFrame before calling CallApiFunctionAndReturn.

View File

@ -1398,17 +1398,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index();
int depth = scope()->ContextChainLength(var->scope());
__ Mov(LoadGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(depth)));
__ Mov(LoadGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(slot_index)));
__ Mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(var->name()));
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
int const slot = var->index();
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
__ Mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
} else {
__ Ldr(LoadDescriptor::ReceiverRegister(), GlobalObjectMemOperand());
__ Mov(LoadDescriptor::NameRegister(), Operand(var->name()));
@ -2401,18 +2402,23 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index() + 1;
int depth = scope()->ContextChainLength(var->scope());
__ Mov(StoreGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(depth)));
__ Mov(StoreGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(slot_index)));
__ Mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(var->name()));
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
int const slot = var->index() + 1;
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
__ Mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(x0));
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ Push(x0);
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.
DCHECK(!var->IsLookupSlot());

View File

@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return x3; }
const Register StoreTransitionDescriptor::MapRegister() { return x3; }
const Register LoadGlobalViaContextDescriptor::DepthRegister() { return x1; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return x2; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return x3; }
const Register StoreGlobalViaContextDescriptor::DepthRegister() { return x1; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return x2; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return x3; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return x0; }

View File

@ -3384,16 +3384,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(x0));
__ Mov(LoadGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(instr->depth())));
__ Mov(LoadGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(instr->slot_index())));
__ Mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ Mov(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ Mov(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
}
@ -5540,17 +5544,25 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) {
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
__ Mov(StoreGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(instr->depth())));
__ Mov(StoreGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(instr->slot_index())));
__ Mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
instr->language_mode()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ Mov(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ Mov(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
isolate(), depth, instr->language_mode())
.code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ Push(StoreGlobalViaContextDescriptor::ValueRegister());
__ CallRuntime(is_strict(instr->language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
}

View File

@ -1769,7 +1769,7 @@ class MacroAssembler : public Assembler {
// |object| is the object being stored into, |value| is the object being
// stored. value and scratch registers are clobbered by the operation.
// The offset is the offset from the start of the object, not the offset from
// the tagged HeapObject pointer. For use with FieldOperand(reg, off).
// the tagged HeapObject pointer. For use with FieldMemOperand(reg, off).
void RecordWriteField(
Register object,
int offset,
@ -2233,7 +2233,7 @@ class UseScratchRegisterScope {
};
inline MemOperand ContextMemOperand(Register context, int index) {
inline MemOperand ContextMemOperand(Register context, int index = 0) {
return MemOperand(context, Context::SlotOffset(index));
}

View File

@ -1622,220 +1622,6 @@ Handle<Code> StoreGlobalStub::GenerateCode() {
}
template <>
HValue* CodeStubGraphBuilder<LoadGlobalViaContextStub>::BuildCodeStub() {
LoadGlobalViaContextStub* stub = casted_stub();
int depth_value = stub->depth();
HValue* depth = GetParameter(0);
HValue* slot_index = GetParameter(1);
HValue* name = GetParameter(2);
// Choose between dynamic or static context script fetching versions.
depth = depth_value < LoadGlobalViaContextStub::kDynamicDepth
? nullptr
: AddUncasted<HForceRepresentation>(depth, Representation::Smi());
slot_index =
AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
HValue* script_context = BuildGetParentContext(depth, depth_value);
HValue* cell =
Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
HValue* value = Add<HLoadNamedField>(cell, nullptr,
HObjectAccess::ForPropertyCellValue());
IfBuilder builder(this);
HValue* hole_value = graph()->GetConstantHole();
builder.IfNot<HCompareObjectEqAndBranch, HValue*>(value, hole_value);
builder.Then();
{ Push(value); }
builder.Else();
{
Add<HPushArguments>(script_context, slot_index, name);
Push(Add<HCallRuntime>(
isolate()->factory()->empty_string(),
Runtime::FunctionForId(Runtime::kLoadGlobalViaContext), 3));
}
builder.End();
return Pop();
}
Handle<Code> LoadGlobalViaContextStub::GenerateCode() {
return DoGenerateCode(this);
}
template <>
HValue* CodeStubGraphBuilder<StoreGlobalViaContextStub>::BuildCodeStub() {
StoreGlobalViaContextStub* stub = casted_stub();
int depth_value = stub->depth();
HValue* depth = GetParameter(0);
HValue* slot_index = GetParameter(1);
HValue* name = GetParameter(2);
HValue* value = GetParameter(3);
// Choose between dynamic or static context script fetching versions.
depth = depth_value < StoreGlobalViaContextStub::kDynamicDepth
? nullptr
: AddUncasted<HForceRepresentation>(depth, Representation::Smi());
slot_index =
AddUncasted<HForceRepresentation>(slot_index, Representation::Smi());
HValue* script_context = BuildGetParentContext(depth, depth_value);
HValue* cell =
Add<HLoadKeyed>(script_context, slot_index, nullptr, FAST_ELEMENTS);
// Fast case that requires storing to cell.
HIfContinuation if_fast_store_continuation(graph()->CreateBasicBlock(),
graph()->CreateBasicBlock());
// Fast case that does not require storing to cell.
HIfContinuation if_fast_no_store_continuation(graph()->CreateBasicBlock(),
graph()->CreateBasicBlock());
// This stub does the same as StoreGlobalStub but in a dynamic manner.
HValue* cell_contents = Add<HLoadNamedField>(
cell, nullptr, HObjectAccess::ForPropertyCellValue());
IfBuilder if_hole(this);
HValue* hole_value = graph()->GetConstantHole();
if_hole.IfNot<HCompareObjectEqAndBranch, HValue*>(cell_contents, hole_value);
if_hole.Then();
{
HValue* details = Add<HLoadNamedField>(
cell, nullptr, HObjectAccess::ForPropertyCellDetails());
HValue* cell_type =
BuildDecodeField<PropertyDetails::PropertyCellTypeField>(details);
// The code below relies on this.
STATIC_ASSERT(PropertyCellType::kUndefined < PropertyCellType::kConstant);
STATIC_ASSERT(PropertyCellType::kConstant <
PropertyCellType::kConstantType);
STATIC_ASSERT(PropertyCellType::kConstant < PropertyCellType::kMutable);
// Handle all cell type cases.
IfBuilder if_not_const(this);
int cell_type_constant = static_cast<int>(PropertyCellType::kConstant);
if_not_const.If<HCompareNumericAndBranch, HValue*>(
cell_type, Add<HConstant>(cell_type_constant), Token::GT);
if_not_const.Then();
{
// kConstantType or kMutable.
IfBuilder if_const_type(this);
int cell_type_constant_type =
static_cast<int>(PropertyCellType::kConstantType);
if_const_type.If<HCompareNumericAndBranch, HValue*>(
cell_type, Add<HConstant>(cell_type_constant_type), Token::EQ);
if_const_type.Then();
{
// Check that either both value and cell_contents are smi or
// both have the same map.
IfBuilder if_cell_is_smi(this);
if_cell_is_smi.If<HIsSmiAndBranch>(cell_contents);
if_cell_is_smi.Then();
{
IfBuilder if_value_is_smi(this);
if_value_is_smi.If<HIsSmiAndBranch>(value);
if_value_is_smi.Then();
{
// Both cell_contents and value are smis, do store.
}
if_value_is_smi.Else(); // Slow case.
if_value_is_smi.JoinContinuation(&if_fast_store_continuation);
}
if_cell_is_smi.Else();
{
IfBuilder if_value_is_heap_object(this);
if_value_is_heap_object.IfNot<HIsSmiAndBranch>(value);
if_value_is_heap_object.Then();
{
// Both cell_contents and value are heap objects, do store.
HValue* expected_map = Add<HLoadNamedField>(
cell_contents, nullptr, HObjectAccess::ForMap());
HValue* map =
Add<HLoadNamedField>(value, nullptr, HObjectAccess::ForMap());
IfBuilder map_check(this);
map_check.If<HCompareObjectEqAndBranch>(expected_map, map);
map_check.Then();
map_check.Else(); // Slow case.
map_check.JoinContinuation(&if_fast_store_continuation);
// The accessor case is handled by the map check above, since
// the value must not have a AccessorPair map.
}
if_value_is_heap_object.Else(); // Slow case.
if_value_is_heap_object.JoinContinuation(&if_fast_store_continuation);
}
if_cell_is_smi.EndUnreachable();
}
if_const_type.Else();
{
// Check that the property kind is kData.
HValue* kind = BuildDecodeField<PropertyDetails::KindField>(details);
HValue* data_kind_value = Add<HConstant>(kData);
IfBuilder builder(this);
builder.If<HCompareNumericAndBranch, HValue*>(kind, data_kind_value,
Token::EQ);
builder.Then();
builder.Else(); // Slow case.
builder.JoinContinuation(&if_fast_store_continuation);
}
if_const_type.EndUnreachable();
}
if_not_const.Else();
{
// kUndefined or kConstant, just check that the value matches.
IfBuilder builder(this);
builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
builder.Then();
builder.Else(); // Slow case.
builder.JoinContinuation(&if_fast_no_store_continuation);
}
if_not_const.EndUnreachable();
}
if_hole.Else(); // Slow case.
if_hole.JoinContinuation(&if_fast_store_continuation);
// Do store for fast case.
IfBuilder if_fast_store(this, &if_fast_store_continuation);
if_fast_store.Then();
{
// All checks are done, store the value to the cell.
Add<HStoreNamedField>(cell, HObjectAccess::ForPropertyCellValue(), value);
}
if_fast_store.Else();
if_fast_store.JoinContinuation(&if_fast_no_store_continuation);
// Bailout to runtime call for slow case.
IfBuilder if_no_fast_store(this, &if_fast_no_store_continuation);
if_no_fast_store.Then();
{
// Nothing else to do.
}
if_no_fast_store.Else();
{
// Slow case, call runtime.
HInstruction* lang_mode = Add<HConstant>(casted_stub()->language_mode());
Add<HPushArguments>(script_context, slot_index, name, value);
Add<HPushArguments>(lang_mode);
Add<HCallRuntime>(isolate()->factory()->empty_string(),
Runtime::FunctionForId(Runtime::kStoreGlobalViaContext),
5);
}
if_no_fast_store.End();
return value;
}
Handle<Code> StoreGlobalViaContextStub::GenerateCode() {
return DoGenerateCode(this);
}
template <>
HValue* CodeStubGraphBuilder<ElementsTransitionAndStoreStub>::BuildCodeStub() {
HValue* object = GetParameter(StoreTransitionDescriptor::kReceiverIndex);

View File

@ -726,20 +726,6 @@ void RegExpConstructResultStub::InitializeDescriptor(
}
void LoadGlobalViaContextStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
// Must never deoptimize.
descriptor->Initialize(FUNCTION_ADDR(UnexpectedStubMiss));
}
void StoreGlobalViaContextStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
// Must never deoptimize.
descriptor->Initialize(FUNCTION_ADDR(UnexpectedStubMiss));
}
void TransitionElementsKindStub::InitializeDescriptor(
CodeStubDescriptor* descriptor) {
descriptor->Initialize(

View File

@ -1396,57 +1396,50 @@ class StoreGlobalStub : public HandlerStub {
};
class LoadGlobalViaContextStub : public HydrogenCodeStub {
class LoadGlobalViaContextStub final : public PlatformCodeStub {
public:
// Use the loop version for depths higher than this one.
static const int kDynamicDepth = 7;
static const int kMaximumDepth = 15;
LoadGlobalViaContextStub(Isolate* isolate, int depth)
: HydrogenCodeStub(isolate) {
if (depth > kDynamicDepth) depth = kDynamicDepth;
set_sub_minor_key(DepthBits::encode(depth));
: PlatformCodeStub(isolate) {
minor_key_ = DepthBits::encode(depth);
}
int depth() const { return DepthBits::decode(sub_minor_key()); }
int depth() const { return DepthBits::decode(minor_key_); }
private:
class DepthBits : public BitField<unsigned int, 0, 3> {};
STATIC_ASSERT(kDynamicDepth <= DepthBits::kMax);
class DepthBits : public BitField<int, 0, 4> {};
STATIC_ASSERT(DepthBits::kMax == kMaximumDepth);
DEFINE_CALL_INTERFACE_DESCRIPTOR(LoadGlobalViaContext);
DEFINE_HYDROGEN_CODE_STUB(LoadGlobalViaContext, HydrogenCodeStub);
DEFINE_PLATFORM_CODE_STUB(LoadGlobalViaContext, PlatformCodeStub);
};
class StoreGlobalViaContextStub : public HydrogenCodeStub {
class StoreGlobalViaContextStub final : public PlatformCodeStub {
public:
// Use the loop version for depths higher than this one.
static const int kDynamicDepth = 7;
static const int kMaximumDepth = 15;
StoreGlobalViaContextStub(Isolate* isolate, int depth,
LanguageMode language_mode)
: HydrogenCodeStub(isolate) {
if (depth > kDynamicDepth) depth = kDynamicDepth;
set_sub_minor_key(DepthBits::encode(depth) |
LanguageModeBits::encode(language_mode));
: PlatformCodeStub(isolate) {
minor_key_ =
DepthBits::encode(depth) | LanguageModeBits::encode(language_mode);
}
int depth() const { return DepthBits::decode(sub_minor_key()); }
int depth() const { return DepthBits::decode(minor_key_); }
LanguageMode language_mode() const {
return LanguageModeBits::decode(sub_minor_key());
return LanguageModeBits::decode(minor_key_);
}
private:
class DepthBits : public BitField<unsigned int, 0, 4> {};
STATIC_ASSERT(kDynamicDepth <= DepthBits::kMax);
class DepthBits : public BitField<int, 0, 4> {};
STATIC_ASSERT(DepthBits::kMax == kMaximumDepth);
class LanguageModeBits : public BitField<LanguageMode, 4, 2> {};
STATIC_ASSERT(LANGUAGE_END == 3);
private:
DEFINE_CALL_INTERFACE_DESCRIPTOR(StoreGlobalViaContext);
DEFINE_HYDROGEN_CODE_STUB(StoreGlobalViaContext, HydrogenCodeStub);
DEFINE_PLATFORM_CODE_STUB(StoreGlobalViaContext, PlatformCodeStub);
};

View File

@ -338,10 +338,10 @@ void JSGenericLowering::LowerJSLoadGlobal(Node* node) {
if (p.slot_index() >= 0) {
Callable callable = CodeFactory::LoadGlobalViaContext(isolate(), 0);
Node* script_context = node->InputAt(0);
node->ReplaceInput(0, jsgraph()->SmiConstant(0));
node->ReplaceInput(1, jsgraph()->SmiConstant(p.slot_index()));
node->ReplaceInput(2, jsgraph()->HeapConstant(p.name()));
node->ReplaceInput(3, script_context); // Replace old context.
node->ReplaceInput(0, jsgraph()->Int32Constant(p.slot_index()));
node->ReplaceInput(1, jsgraph()->HeapConstant(p.name()));
node->ReplaceInput(2, script_context); // Set new context...
node->RemoveInput(3); // ...instead of old one.
ReplaceWithStubCall(node, callable, flags);
} else {
@ -397,11 +397,11 @@ void JSGenericLowering::LowerJSStoreGlobal(Node* node) {
CodeFactory::StoreGlobalViaContext(isolate(), 0, p.language_mode());
Node* script_context = node->InputAt(0);
Node* value = node->InputAt(2);
node->ReplaceInput(0, jsgraph()->SmiConstant(0));
node->ReplaceInput(1, jsgraph()->SmiConstant(p.slot_index()));
node->ReplaceInput(2, jsgraph()->HeapConstant(p.name()));
node->ReplaceInput(3, value);
node->ReplaceInput(4, script_context); // Replace old context.
node->ReplaceInput(0, jsgraph()->Int32Constant(p.slot_index()));
node->ReplaceInput(1, jsgraph()->HeapConstant(p.name()));
node->ReplaceInput(2, value);
node->ReplaceInput(3, script_context); // Set new context...
node->RemoveInput(4); // ...instead of old one.
ReplaceWithStubCall(node, callable, flags);
} else {

View File

@ -5116,6 +5116,161 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
}
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context_reg = esi;
Register slot_reg = ebx;
Register name_reg = ecx;
Register result_reg = eax;
Label slow_case;
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ mov(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = result_reg;
}
// Load the PropertyCell value at the specified slot.
__ mov(result_reg, ContextOperand(context_reg, slot_reg));
__ mov(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
// Check that value is not the_hole.
__ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
__ j(equal, &slow_case, Label::kNear);
__ Ret();
// Fallback to the runtime.
__ bind(&slow_case);
__ SmiTag(slot_reg);
__ Pop(result_reg); // Pop return address.
__ Push(slot_reg);
__ Push(name_reg);
__ Push(result_reg); // Push return address.
__ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
}
void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context_reg = esi;
Register slot_reg = ebx;
Register name_reg = ecx;
Register value_reg = eax;
Register cell_reg = edi;
Register cell_details_reg = edx;
Label fast_heapobject_case, fast_smi_case, slow_case;
if (FLAG_debug_code) {
__ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
__ Check(not_equal, kUnexpectedValue);
__ AssertName(name_reg);
}
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = cell_reg;
}
// Load the PropertyCell at the specified slot.
__ mov(cell_reg, ContextOperand(context_reg, slot_reg));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ mov(cell_details_reg,
FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
__ SmiUntag(cell_details_reg);
__ and_(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::kMask |
PropertyDetails::KindField::kMask));
// Check if PropertyCell holds mutable data.
Label not_mutable_data;
__ cmp(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kMutable) |
PropertyDetails::KindField::encode(kData)));
__ j(not_equal, &not_mutable_data);
__ JumpIfSmi(value_reg, &fast_smi_case);
__ bind(&fast_heapobject_case);
__ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
__ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// RecordWriteField clobbers the value register, so we need to reload.
__ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
__ Ret();
__ bind(&not_mutable_data);
// Check if PropertyCell value matches the new value (relevant for Constant,
// ConstantType and Undefined cells).
Label not_same_value;
__ cmp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
__ j(not_equal, &not_same_value,
FLAG_debug_code ? Label::kFar : Label::kNear);
if (FLAG_debug_code) {
Label done;
// This can only be true for Constant, ConstantType and Undefined cells,
// because we never store the_hole via this stub.
__ cmp(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstant) |
PropertyDetails::KindField::encode(kData)));
__ j(equal, &done);
__ cmp(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ j(equal, &done);
__ cmp(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kUndefined) |
PropertyDetails::KindField::encode(kData)));
__ Check(equal, kUnexpectedValue);
__ bind(&done);
}
__ Ret();
__ bind(&not_same_value);
// Check if PropertyCell contains data with constant type.
__ cmp(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ j(not_equal, &slow_case, Label::kNear);
// Now either both old and new values must be SMIs or both must be heap
// objects with same map.
Label value_is_heap_object;
Register cell_value_reg = cell_details_reg;
__ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
__ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
__ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
// Old and new values are SMIs, no need for a write barrier here.
__ bind(&fast_smi_case);
__ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
__ Ret();
__ bind(&value_is_heap_object);
__ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
Register cell_value_map_reg = cell_value_reg;
__ mov(cell_value_map_reg,
FieldOperand(cell_value_reg, HeapObject::kMapOffset));
__ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
__ j(equal, &fast_heapobject_case);
// Fallback to the runtime.
__ bind(&slow_case);
__ SmiTag(slot_reg);
__ Pop(cell_reg); // Pop return address.
__ Push(slot_reg);
__ Push(name_reg);
__ Push(value_reg);
__ Push(cell_reg); // Push return address.
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3, 1);
}
// Generates an Operand for saving parameters after PrepareCallApiFunction.
static Operand ApiParameterOperand(int index) {
return Operand(esp, index * kPointerSize);

View File

@ -1341,15 +1341,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index();
int depth = scope()->ContextChainLength(var->scope());
__ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
Immediate(Smi::FromInt(depth)));
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
Immediate(Smi::FromInt(slot_index)));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
int const slot = var->index();
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ Move(LoadGlobalViaContextDescriptor::SlotRegister(), Immediate(slot));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
} else {
__ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
@ -2620,16 +2623,23 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index() + 1;
int depth = scope()->ContextChainLength(var->scope());
__ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
Immediate(Smi::FromInt(depth)));
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
Immediate(Smi::FromInt(slot_index)));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax));
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
int const slot = var->index() + 1;
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ Move(StoreGlobalViaContextDescriptor::SlotRegister(), Immediate(slot));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(eax));
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ Push(eax);
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.

View File

@ -37,12 +37,10 @@ const Register StoreTransitionDescriptor::MapRegister() {
}
const Register LoadGlobalViaContextDescriptor::DepthRegister() { return edx; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return ebx; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return ecx; }
const Register StoreGlobalViaContextDescriptor::DepthRegister() { return edx; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return ebx; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return ecx; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return eax; }

View File

@ -2870,15 +2870,19 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(esi));
DCHECK(ToRegister(instr->result()).is(eax));
__ mov(LoadGlobalViaContextDescriptor::DepthRegister(),
Immediate(Smi::FromInt(instr->depth())));
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(),
Immediate(Smi::FromInt(instr->slot_index())));
__ mov(LoadGlobalViaContextDescriptor::NameRegister(), instr->name());
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ mov(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
__ mov(LoadGlobalViaContextDescriptor::NameRegister(), instr->name());
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
}
@ -4143,16 +4147,24 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) {
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
__ mov(StoreGlobalViaContextDescriptor::DepthRegister(),
Immediate(Smi::FromInt(instr->depth())));
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(),
Immediate(Smi::FromInt(instr->slot_index())));
__ mov(StoreGlobalViaContextDescriptor::NameRegister(), instr->name());
Handle<Code> stub =
CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
instr->language_mode()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ mov(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
__ mov(StoreGlobalViaContextDescriptor::NameRegister(), instr->name());
Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
isolate(), depth, instr->language_mode())
.code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ Push(StoreGlobalViaContextDescriptor::ValueRegister());
__ CallRuntime(is_strict(instr->language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
}

View File

@ -1068,6 +1068,11 @@ inline Operand ContextOperand(Register context, int index) {
}
inline Operand ContextOperand(Register context, Register index) {
return Operand(context, index, times_pointer_size, Context::SlotOffset(0));
}
inline Operand GlobalObjectOperand() {
return ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX);
}

View File

@ -117,17 +117,16 @@ Type::FunctionType*
LoadGlobalViaContextDescriptor::BuildCallInterfaceDescriptorFunctionType(
Isolate* isolate, int paramater_count) {
Type::FunctionType* function = Type::FunctionType::New(
AnyTagged(), Type::Undefined(), 3, isolate->interface_descriptor_zone());
function->InitParameter(0, SmiType());
function->InitParameter(1, SmiType());
function->InitParameter(2, AnyTagged());
AnyTagged(), Type::Undefined(), 2, isolate->interface_descriptor_zone());
function->InitParameter(0, UntaggedSigned32());
function->InitParameter(1, AnyTagged());
return function;
}
void LoadGlobalViaContextDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {DepthRegister(), SlotRegister(), NameRegister()};
Register registers[] = {SlotRegister(), NameRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
@ -136,19 +135,17 @@ Type::FunctionType*
StoreGlobalViaContextDescriptor::BuildCallInterfaceDescriptorFunctionType(
Isolate* isolate, int paramater_count) {
Type::FunctionType* function = Type::FunctionType::New(
AnyTagged(), Type::Undefined(), 4, isolate->interface_descriptor_zone());
function->InitParameter(0, SmiType());
function->InitParameter(1, SmiType());
AnyTagged(), Type::Undefined(), 3, isolate->interface_descriptor_zone());
function->InitParameter(0, UntaggedSigned32());
function->InitParameter(1, AnyTagged());
function->InitParameter(2, AnyTagged());
function->InitParameter(3, AnyTagged());
return function;
}
void StoreGlobalViaContextDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {DepthRegister(), SlotRegister(), NameRegister(),
ValueRegister()};
Register registers[] = {SlotRegister(), NameRegister(), ValueRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}

View File

@ -423,7 +423,6 @@ class LoadGlobalViaContextDescriptor : public CallInterfaceDescriptor {
DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(LoadGlobalViaContextDescriptor,
CallInterfaceDescriptor)
static const Register DepthRegister();
static const Register SlotRegister();
static const Register NameRegister();
};
@ -434,7 +433,6 @@ class StoreGlobalViaContextDescriptor : public CallInterfaceDescriptor {
DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(StoreGlobalViaContextDescriptor,
CallInterfaceDescriptor)
static const Register DepthRegister();
static const Register SlotRegister();
static const Register NameRegister();
static const Register ValueRegister();

View File

@ -5270,6 +5270,155 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
}
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context_reg = cp;
Register slot_reg = a2;
Register name_reg = a3;
Register result_reg = v0;
Label slow_case;
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = result_reg;
}
// Load the PropertyCell value at the specified slot.
__ sll(at, slot_reg, kPointerSizeLog2);
__ Addu(at, at, Operand(cp));
__ Addu(at, at, Context::SlotOffset(0));
__ lw(result_reg, MemOperand(at));
__ lw(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset));
// Check that value is not the_hole.
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Branch(&slow_case, eq, result_reg, Operand(at));
__ Ret();
// Fallback to the runtime.
__ bind(&slow_case);
__ SmiTag(slot_reg);
__ Drop(1); // Pop return address.
__ Push(slot_reg, name_reg, result_reg);
__ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
}
void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context_reg = cp;
Register slot_reg = a2;
Register name_reg = a3;
Register value_reg = a0;
Register cell_reg = t0;
Register cell_details_reg = t1;
Label fast_heapobject_case, fast_smi_case, slow_case;
if (FLAG_debug_code) {
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Check(ne, kUnexpectedValue, value_reg, Operand(at));
__ AssertName(name_reg);
}
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ lw(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = cell_reg;
}
// Load the PropertyCell at the specified slot.
__ sll(at, slot_reg, kPointerSizeLog2);
__ Addu(at, at, Operand(cp));
__ Addu(at, at, Context::SlotOffset(0));
__ lw(cell_reg, MemOperand(at));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ lw(cell_details_reg,
FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset));
__ SmiUntag(cell_details_reg);
__ And(cell_details_reg, cell_details_reg,
PropertyDetails::PropertyCellTypeField::kMask |
PropertyDetails::KindField::kMask);
// Check if PropertyCell holds mutable data.
Label not_mutable_data;
__ Branch(&not_mutable_data, ne, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kMutable) |
PropertyDetails::KindField::encode(kData)));
__ JumpIfSmi(value_reg, &fast_smi_case);
__ bind(&fast_heapobject_case);
__ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs,
EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// RecordWriteField clobbers the value register, so we need to reload.
__ lw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ Ret();
__ bind(&not_mutable_data);
// Check if PropertyCell value matches the new value (relevant for Constant,
// ConstantType and Undefined cells).
Label not_same_value;
__ lw(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ Branch(&not_same_value, ne, value_reg, Operand(at));
if (FLAG_debug_code) {
Label done;
// This can only be true for Constant, ConstantType and Undefined cells,
// because we never store the_hole via this stub.
__ Branch(&done, eq, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstant) |
PropertyDetails::KindField::encode(kData)));
__ Branch(&done, eq, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ Check(eq, kUnexpectedValue, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kUndefined) |
PropertyDetails::KindField::encode(kData)));
__ bind(&done);
}
__ Ret();
__ bind(&not_same_value);
// Check if PropertyCell contains data with constant type.
__ Branch(&slow_case, ne, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
// Now either both old and new values must be SMIs or both must be heap
// objects with same map.
Label value_is_heap_object;
Register cell_value_reg = cell_details_reg;
__ lw(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ JumpIfNotSmi(value_reg, &value_is_heap_object);
__ JumpIfNotSmi(cell_value_reg, &slow_case);
// Old and new values are SMIs, no need for a write barrier here.
__ bind(&fast_smi_case);
__ Ret(USE_DELAY_SLOT);
__ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ bind(&value_is_heap_object);
__ JumpIfSmi(cell_value_reg, &slow_case);
Register cell_value_map_reg = cell_value_reg;
__ lw(cell_value_map_reg,
FieldMemOperand(cell_value_reg, HeapObject::kMapOffset));
__ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
FieldMemOperand(value_reg, HeapObject::kMapOffset));
// Fallback to the runtime.
__ bind(&slow_case);
__ SmiTag(slot_reg);
__ Drop(1); // Pop return address.
__ Push(slot_reg, name_reg, value_reg, cell_reg);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3, 1);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}

View File

@ -1407,15 +1407,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index();
int depth = scope()->ContextChainLength(var->scope());
__ li(LoadGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(depth)));
__ li(LoadGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(slot_index)));
__ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name()));
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
int const slot = var->index();
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ li(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
} else {
__ lw(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
@ -2700,18 +2703,25 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
// Global var, const, or let.
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index() + 1;
int depth = scope()->ContextChainLength(var->scope());
__ li(StoreGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(depth)));
__ li(StoreGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(slot_index)));
__ li(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(var->name()));
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0));
__ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
// Each var occupies two slots in the context: for reads and writes.
int const slot = var->index() + 1;
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ li(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ Push(a0);
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.

View File

@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return a3; }
const Register StoreTransitionDescriptor::MapRegister() { return a3; }
const Register LoadGlobalViaContextDescriptor::DepthRegister() { return a1; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return a3; }
const Register StoreGlobalViaContextDescriptor::DepthRegister() { return a1; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return a3; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; }

View File

@ -2899,15 +2899,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(v0));
__ li(LoadGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(instr->depth())));
__ li(LoadGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(instr->slot_index())));
__ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(instr->name()));
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ li(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
}
@ -4209,17 +4214,25 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) {
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
__ li(StoreGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(instr->depth())));
__ li(StoreGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(instr->slot_index())));
__ li(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
instr->language_mode()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ li(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
isolate(), depth, instr->language_mode())
.code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ Push(StoreGlobalViaContextDescriptor::ValueRegister());
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
}

View File

@ -5301,6 +5301,155 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
}
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context_reg = cp;
Register slot_reg = a2;
Register name_reg = a3;
Register result_reg = v0;
Label slow_case;
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = result_reg;
}
// Load the PropertyCell value at the specified slot.
__ dsll(at, slot_reg, kPointerSizeLog2);
__ Daddu(at, at, Operand(cp));
__ Daddu(at, at, Context::SlotOffset(0));
__ ld(result_reg, MemOperand(at));
__ ld(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset));
// Check that value is not the_hole.
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Branch(&slow_case, eq, result_reg, Operand(at));
__ Ret();
// Fallback to the runtime.
__ bind(&slow_case);
__ SmiTag(slot_reg);
__ Drop(1); // Pop return address.
__ Push(slot_reg, name_reg, result_reg);
__ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
}
void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context_reg = cp;
Register slot_reg = a2;
Register name_reg = a3;
Register value_reg = a0;
Register cell_reg = a4;
Register cell_details_reg = a5;
Label fast_heapobject_case, fast_smi_case, slow_case;
if (FLAG_debug_code) {
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Check(ne, kUnexpectedValue, value_reg, Operand(at));
__ AssertName(name_reg);
}
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ ld(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = cell_reg;
}
// Load the PropertyCell at the specified slot.
__ dsll(at, slot_reg, kPointerSizeLog2);
__ Daddu(at, at, Operand(cp));
__ Daddu(at, at, Context::SlotOffset(0));
__ ld(cell_reg, MemOperand(at));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ ld(cell_details_reg,
FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset));
__ SmiUntag(cell_details_reg);
__ And(cell_details_reg, cell_details_reg,
PropertyDetails::PropertyCellTypeField::kMask |
PropertyDetails::KindField::kMask);
// Check if PropertyCell holds mutable data.
Label not_mutable_data;
__ Branch(&not_mutable_data, ne, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kMutable) |
PropertyDetails::KindField::encode(kData)));
__ JumpIfSmi(value_reg, &fast_smi_case);
__ bind(&fast_heapobject_case);
__ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs,
EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// RecordWriteField clobbers the value register, so we need to reload.
__ Ret(USE_DELAY_SLOT);
__ ld(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ bind(&not_mutable_data);
// Check if PropertyCell value matches the new value (relevant for Constant,
// ConstantType and Undefined cells).
Label not_same_value;
__ ld(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ Branch(&not_same_value, ne, value_reg, Operand(at));
if (FLAG_debug_code) {
Label done;
// This can only be true for Constant, ConstantType and Undefined cells,
// because we never store the_hole via this stub.
__ Branch(&done, eq, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstant) |
PropertyDetails::KindField::encode(kData)));
__ Branch(&done, eq, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ Check(eq, kUnexpectedValue, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kUndefined) |
PropertyDetails::KindField::encode(kData)));
__ bind(&done);
}
__ Ret();
__ bind(&not_same_value);
// Check if PropertyCell contains data with constant type.
__ Branch(&slow_case, ne, cell_details_reg,
Operand(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
// Now either both old and new values must be SMIs or both must be heap
// objects with same map.
Label value_is_heap_object;
Register cell_value_reg = cell_details_reg;
__ ld(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ JumpIfNotSmi(value_reg, &value_is_heap_object);
__ JumpIfNotSmi(cell_value_reg, &slow_case);
// Old and new values are SMIs, no need for a write barrier here.
__ bind(&fast_smi_case);
__ Ret(USE_DELAY_SLOT);
__ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset));
__ bind(&value_is_heap_object);
__ JumpIfSmi(cell_value_reg, &slow_case);
Register cell_value_map_reg = cell_value_reg;
__ ld(cell_value_map_reg,
FieldMemOperand(cell_value_reg, HeapObject::kMapOffset));
__ Branch(&fast_heapobject_case, eq, cell_value_map_reg,
FieldMemOperand(value_reg, HeapObject::kMapOffset));
// Fallback to the runtime.
__ bind(&slow_case);
__ SmiTag(slot_reg);
__ Drop(1); // Pop return address.
__ Push(slot_reg, name_reg, value_reg, cell_reg);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3, 1);
}
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
DCHECK(static_cast<int>(offset) == offset);

View File

@ -1403,15 +1403,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index();
int depth = scope()->ContextChainLength(var->scope());
__ li(LoadGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(depth)));
__ li(LoadGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(slot_index)));
__ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(var->name()));
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
int const slot = var->index();
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ li(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
} else {
__ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
@ -2697,18 +2700,25 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
// Global var, const, or let.
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index() + 1;
int depth = scope()->ContextChainLength(var->scope());
__ li(StoreGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(depth)));
__ li(StoreGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(slot_index)));
__ li(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(var->name()));
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(a0));
__ mov(StoreGlobalViaContextDescriptor::ValueRegister(), result_register());
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
// Each var occupies two slots in the context: for reads and writes.
int const slot = var->index() + 1;
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ li(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ Push(a0);
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.

View File

@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return a3; }
const Register StoreTransitionDescriptor::MapRegister() { return a3; }
const Register LoadGlobalViaContextDescriptor::DepthRegister() { return a1; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return a3; }
const Register StoreGlobalViaContextDescriptor::DepthRegister() { return a1; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return a3; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; }

View File

@ -3003,15 +3003,20 @@ void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->result()).is(v0));
__ li(LoadGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(instr->depth())));
__ li(LoadGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(instr->slot_index())));
__ li(LoadGlobalViaContextDescriptor::NameRegister(), Operand(instr->name()));
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ li(LoadGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ li(LoadGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
}
@ -4402,17 +4407,25 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) {
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
__ li(StoreGlobalViaContextDescriptor::DepthRegister(),
Operand(Smi::FromInt(instr->depth())));
__ li(StoreGlobalViaContextDescriptor::SlotRegister(),
Operand(Smi::FromInt(instr->slot_index())));
__ li(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub =
CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
instr->language_mode()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ li(StoreGlobalViaContextDescriptor::SlotRegister(), Operand(slot));
__ li(StoreGlobalViaContextDescriptor::NameRegister(),
Operand(instr->name()));
Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
isolate(), depth, instr->language_mode())
.code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ Push(StoreGlobalViaContextDescriptor::ValueRegister());
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
}

View File

@ -420,62 +420,62 @@ RUNTIME_FUNCTION(Runtime_ObjectSeal) {
RUNTIME_FUNCTION(Runtime_LoadGlobalViaContext) {
HandleScope scope(isolate);
DCHECK(args.length() == 3);
CONVERT_ARG_HANDLE_CHECKED(Context, script_context, 0);
CONVERT_SMI_ARG_CHECKED(index, 1);
CONVERT_ARG_HANDLE_CHECKED(Name, name, 2);
DCHECK_EQ(2, args.length());
CONVERT_SMI_ARG_CHECKED(slot, 0);
CONVERT_ARG_HANDLE_CHECKED(Name, name, 1);
// Go up context chain to the script context.
Handle<Context> script_context(isolate->context()->script_context(), isolate);
DCHECK(script_context->IsScriptContext());
DCHECK(script_context->get(index)->IsPropertyCell());
DCHECK(script_context->get(slot)->IsPropertyCell());
Handle<GlobalObject> global(script_context->global_object());
// Lookup the named property on the global object.
Handle<GlobalObject> global_object(script_context->global_object(), isolate);
LookupIterator it(global_object, name, LookupIterator::HIDDEN);
LookupIterator it(global, name, LookupIterator::HIDDEN);
// Switch to fast mode only if there is a data property and it's not on
// a hidden prototype.
if (LookupIterator::DATA == it.state() &&
if (it.state() == LookupIterator::DATA &&
it.GetHolder<Object>()->IsJSGlobalObject()) {
// Now update cell in the script context.
// Now update the cell in the script context.
Handle<PropertyCell> cell = it.GetPropertyCell();
script_context->set(index, *cell);
script_context->set(slot, *cell);
} else {
// This is not a fast case, so keep this access in a slow mode.
// Store empty_property_cell here to release the outdated property cell.
script_context->set(index, isolate->heap()->empty_property_cell());
script_context->set(slot, isolate->heap()->empty_property_cell());
}
Handle<Object> result;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result, Object::GetProperty(&it));
return *result;
}
RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext) {
HandleScope scope(isolate);
DCHECK(args.length() == 5);
CONVERT_ARG_HANDLE_CHECKED(Context, script_context, 0);
CONVERT_SMI_ARG_CHECKED(index, 1);
CONVERT_ARG_HANDLE_CHECKED(Name, name, 2);
CONVERT_ARG_HANDLE_CHECKED(Object, value, 3);
CONVERT_LANGUAGE_MODE_ARG_CHECKED(language_mode_arg, 4);
namespace {
Object* StoreGlobalViaContext(Isolate* isolate, int slot, Handle<Name> name,
Handle<Object> value,
LanguageMode language_mode) {
// Go up context chain to the script context.
Handle<Context> script_context(isolate->context()->script_context(), isolate);
DCHECK(script_context->IsScriptContext());
DCHECK(script_context->get(index)->IsPropertyCell());
LanguageMode language_mode = language_mode_arg;
DCHECK(script_context->get(slot)->IsPropertyCell());
Handle<GlobalObject> global(script_context->global_object());
LookupIterator it(global, name, LookupIterator::HIDDEN);
// Lookup the named property on the global object.
Handle<GlobalObject> global_object(script_context->global_object(), isolate);
LookupIterator it(global_object, name, LookupIterator::HIDDEN);
// Switch to fast mode only if there is a data property and it's not on
// a hidden prototype.
if (LookupIterator::DATA == it.state() &&
it.GetHolder<Object>()->IsJSGlobalObject()) {
// Now update cell in the script context.
Handle<PropertyCell> cell = it.GetPropertyCell();
script_context->set(index, *cell);
script_context->set(slot, *cell);
} else {
// This is not a fast case, so keep this access in a slow mode.
// Store empty_property_cell here to release the outdated property cell.
script_context->set(index, isolate->heap()->empty_property_cell());
script_context->set(slot, isolate->heap()->empty_property_cell());
}
Handle<Object> result;
@ -483,10 +483,33 @@ RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext) {
isolate, result,
Object::SetProperty(&it, value, language_mode,
Object::CERTAINLY_NOT_STORE_FROM_KEYED));
return *result;
}
} // namespace
RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext_Sloppy) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_SMI_ARG_CHECKED(slot, 0);
CONVERT_ARG_HANDLE_CHECKED(Name, name, 1);
CONVERT_ARG_HANDLE_CHECKED(Object, value, 2);
return StoreGlobalViaContext(isolate, slot, name, value, SLOPPY);
}
RUNTIME_FUNCTION(Runtime_StoreGlobalViaContext_Strict) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
CONVERT_SMI_ARG_CHECKED(slot, 0);
CONVERT_ARG_HANDLE_CHECKED(Name, name, 1);
CONVERT_ARG_HANDLE_CHECKED(Object, value, 2);
return StoreGlobalViaContext(isolate, slot, name, value, STRICT);
}
RUNTIME_FUNCTION(Runtime_GetProperty) {
HandleScope scope(isolate);

View File

@ -447,8 +447,9 @@ namespace internal {
F(GetPropertyStrong, 2, 1) \
F(KeyedGetProperty, 2, 1) \
F(KeyedGetPropertyStrong, 2, 1) \
F(LoadGlobalViaContext, 3, 1) \
F(StoreGlobalViaContext, 5, 1) \
F(LoadGlobalViaContext, 2, 1) \
F(StoreGlobalViaContext_Sloppy, 3, 1) \
F(StoreGlobalViaContext_Strict, 3, 1) \
F(AddNamedProperty, 4, 1) \
F(SetProperty, 4, 1) \
F(AddElement, 3, 1) \

View File

@ -5033,6 +5033,160 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
}
void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context_reg = rsi;
Register slot_reg = rbx;
Register name_reg = rcx;
Register result_reg = rax;
Label slow_case;
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = rdi;
}
// Load the PropertyCell value at the specified slot.
__ movp(result_reg, ContextOperand(context_reg, slot_reg));
__ movp(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
// Check that value is not the_hole.
__ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
__ j(equal, &slow_case, Label::kNear);
__ Ret();
// Fallback to the runtime.
__ bind(&slow_case);
__ Integer32ToSmi(slot_reg, slot_reg);
__ PopReturnAddressTo(kScratchRegister);
__ Push(slot_reg);
__ Push(name_reg);
__ Push(kScratchRegister);
__ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1);
}
void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
Register context_reg = rsi;
Register slot_reg = rbx;
Register name_reg = rcx;
Register value_reg = rax;
Register cell_reg = r8;
Register cell_details_reg = rdx;
Register cell_value_reg = r9;
Label fast_heapobject_case, fast_smi_case, slow_case;
if (FLAG_debug_code) {
__ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
__ Check(not_equal, kUnexpectedValue);
__ AssertName(name_reg);
}
// Go up context chain to the script context.
for (int i = 0; i < depth(); ++i) {
__ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
context_reg = rdi;
}
// Load the PropertyCell at the specified slot.
__ movp(cell_reg, ContextOperand(context_reg, slot_reg));
// Load PropertyDetails for the cell (actually only the cell_type and kind).
__ SmiToInteger32(cell_details_reg,
FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
__ andl(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::kMask |
PropertyDetails::KindField::kMask));
// Check if PropertyCell holds mutable data.
Label not_mutable_data;
__ cmpl(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kMutable) |
PropertyDetails::KindField::encode(kData)));
__ j(not_equal, &not_mutable_data);
__ JumpIfSmi(value_reg, &fast_smi_case);
__ bind(&fast_heapobject_case);
__ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
__ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// RecordWriteField clobbers the value register, so we need to reload.
__ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
__ Ret();
__ bind(&not_mutable_data);
// Check if PropertyCell value matches the new value (relevant for Constant,
// ConstantType and Undefined cells).
Label not_same_value;
__ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
__ cmpp(cell_value_reg, value_reg);
__ j(not_equal, &not_same_value,
FLAG_debug_code ? Label::kFar : Label::kNear);
if (FLAG_debug_code) {
Label done;
// This can only be true for Constant, ConstantType and Undefined cells,
// because we never store the_hole via this stub.
__ cmpl(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstant) |
PropertyDetails::KindField::encode(kData)));
__ j(equal, &done);
__ cmpl(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ j(equal, &done);
__ cmpl(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kUndefined) |
PropertyDetails::KindField::encode(kData)));
__ Check(equal, kUnexpectedValue);
__ bind(&done);
}
__ Ret();
__ bind(&not_same_value);
// Check if PropertyCell contains data with constant type.
__ cmpl(cell_details_reg,
Immediate(PropertyDetails::PropertyCellTypeField::encode(
PropertyCellType::kConstantType) |
PropertyDetails::KindField::encode(kData)));
__ j(not_equal, &slow_case, Label::kNear);
// Now either both old and new values must be SMIs or both must be heap
// objects with same map.
Label value_is_heap_object;
__ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
__ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
// Old and new values are SMIs, no need for a write barrier here.
__ bind(&fast_smi_case);
__ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
__ Ret();
__ bind(&value_is_heap_object);
__ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
Register cell_value_map_reg = cell_value_reg;
__ movp(cell_value_map_reg,
FieldOperand(cell_value_reg, HeapObject::kMapOffset));
__ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
__ j(equal, &fast_heapobject_case);
// Fallback to the runtime.
__ bind(&slow_case);
__ Integer32ToSmi(slot_reg, slot_reg);
__ PopReturnAddressTo(kScratchRegister);
__ Push(slot_reg);
__ Push(name_reg);
__ Push(value_reg);
__ Push(kScratchRegister);
__ TailCallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3, 1);
}
static int Offset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
// Check that fits into int.

View File

@ -1370,15 +1370,18 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index();
int depth = scope()->ContextChainLength(var->scope());
__ Move(LoadGlobalViaContextDescriptor::DepthRegister(),
Smi::FromInt(depth));
__ Move(LoadGlobalViaContextDescriptor::SlotRegister(),
Smi::FromInt(slot_index));
__ Move(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
int const slot = var->index();
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ Set(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
__ Move(LoadGlobalViaContextDescriptor::NameRegister(), var->name());
LoadGlobalViaContextStub stub(isolate(), depth);
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
} else {
__ Move(LoadDescriptor::NameRegister(), var->name());
@ -2615,16 +2618,23 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
DCHECK(var->index() > 0);
DCHECK(var->IsStaticGlobalObjectProperty());
// Each var occupies two slots in the context: for reads and writes.
int slot_index = var->index() + 1;
int depth = scope()->ContextChainLength(var->scope());
__ Move(StoreGlobalViaContextDescriptor::DepthRegister(),
Smi::FromInt(depth));
__ Move(StoreGlobalViaContextDescriptor::SlotRegister(),
Smi::FromInt(slot_index));
__ Move(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(rax));
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
int const slot = var->index() + 1;
int const depth = scope()->ContextChainLength(var->scope());
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ Set(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
__ Move(StoreGlobalViaContextDescriptor::NameRegister(), var->name());
DCHECK(StoreGlobalViaContextDescriptor::ValueRegister().is(rax));
StoreGlobalViaContextStub stub(isolate(), depth, language_mode());
__ CallStub(&stub);
} else {
__ Push(Smi::FromInt(slot));
__ Push(var->name());
__ Push(rax);
__ CallRuntime(is_strict(language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
} else if (var->mode() == LET && op != Token::INIT_LET) {
// Non-initializing assignment to let variable needs a write barrier.

View File

@ -36,12 +36,10 @@ const Register VectorStoreICDescriptor::VectorRegister() { return rbx; }
const Register StoreTransitionDescriptor::MapRegister() { return rbx; }
const Register LoadGlobalViaContextDescriptor::DepthRegister() { return rdx; }
const Register LoadGlobalViaContextDescriptor::SlotRegister() { return rbx; }
const Register LoadGlobalViaContextDescriptor::NameRegister() { return rcx; }
const Register StoreGlobalViaContextDescriptor::DepthRegister() { return rdx; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return rbx; }
const Register StoreGlobalViaContextDescriptor::NameRegister() { return rcx; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return rax; }

View File

@ -2898,16 +2898,19 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
void LCodeGen::DoLoadGlobalViaContext(LLoadGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(rsi));
DCHECK(ToRegister(instr->result()).is(rax));
__ Move(LoadGlobalViaContextDescriptor::DepthRegister(),
Smi::FromInt(instr->depth()));
__ Move(LoadGlobalViaContextDescriptor::SlotRegister(),
Smi::FromInt(instr->slot_index()));
__ Move(LoadGlobalViaContextDescriptor::NameRegister(), instr->name());
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), instr->depth()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= LoadGlobalViaContextStub::kMaximumDepth) {
__ Set(LoadGlobalViaContextDescriptor::SlotRegister(), slot);
__ Move(LoadGlobalViaContextDescriptor::NameRegister(), instr->name());
Handle<Code> stub =
CodeFactory::LoadGlobalViaContext(isolate(), depth).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ CallRuntime(Runtime::kLoadGlobalViaContext, 2);
}
}
@ -4272,17 +4275,24 @@ void LCodeGen::DoStoreGlobalViaContext(LStoreGlobalViaContext* instr) {
DCHECK(ToRegister(instr->context()).is(rsi));
DCHECK(ToRegister(instr->value())
.is(StoreGlobalViaContextDescriptor::ValueRegister()));
__ Move(StoreGlobalViaContextDescriptor::DepthRegister(),
Smi::FromInt(instr->depth()));
__ Move(StoreGlobalViaContextDescriptor::SlotRegister(),
Smi::FromInt(instr->slot_index()));
__ Move(StoreGlobalViaContextDescriptor::NameRegister(), instr->name());
Handle<Code> stub =
CodeFactory::StoreGlobalViaContext(isolate(), instr->depth(),
instr->language_mode()).code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
int const slot = instr->slot_index();
int const depth = instr->depth();
if (depth <= StoreGlobalViaContextStub::kMaximumDepth) {
__ Set(StoreGlobalViaContextDescriptor::SlotRegister(), slot);
__ Move(StoreGlobalViaContextDescriptor::NameRegister(), instr->name());
Handle<Code> stub = CodeFactory::StoreGlobalViaContext(
isolate(), depth, instr->language_mode())
.code();
CallCode(stub, RelocInfo::CODE_TARGET, instr);
} else {
__ Push(Smi::FromInt(slot));
__ Push(instr->name());
__ Push(StoreGlobalViaContextDescriptor::ValueRegister());
__ CallRuntime(is_strict(instr->language_mode())
? Runtime::kStoreGlobalViaContext_Strict
: Runtime::kStoreGlobalViaContext_Sloppy,
3);
}
}

View File

@ -1568,6 +1568,11 @@ inline Operand ContextOperand(Register context, int index) {
}
inline Operand ContextOperand(Register context, Register index) {
return Operand(context, index, times_pointer_size, Context::SlotOffset(0));
}
inline Operand GlobalObjectOperand() {
return ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX);
}