Revert "Merge arguments branch to bleeding merge."

This reverts commit ceb31498b9d69edca3260820fb4047045891ce6d.

TBR=kmillikin@chromium.org

Review URL: http://codereview.chromium.org/7172030

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8308 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
karlklose@chromium.org 2011-06-16 06:37:49 +00:00
parent dc66620251
commit cc19d1e278
71 changed files with 1743 additions and 2808 deletions

View File

@ -3925,232 +3925,11 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
}
void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// sp[0] : number of parameters
// sp[4] : receiver displacement
// sp[8] : function
// Check if the calling frame is an arguments adaptor frame.
Label runtime;
__ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ ldr(r2, MemOperand(r3, StandardFrameConstants::kContextOffset));
__ cmp(r2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(ne, &runtime);
// Patch the arguments.length and the parameters pointer in the current frame.
__ ldr(r2, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ str(r2, MemOperand(sp, 0 * kPointerSize));
__ add(r3, r3, Operand(r2, LSL, 1));
__ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
__ str(r3, MemOperand(sp, 1 * kPointerSize));
__ bind(&runtime);
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}
void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// Stack layout:
// sp[0] : number of parameters (tagged)
// sp[4] : address of receiver argument
// sp[8] : function
// Registers used over whole function:
// r6 : allocated object (tagged)
// r9 : mapped parameter count (tagged)
__ ldr(r1, MemOperand(sp, 0 * kPointerSize));
// r1 = parameter count (tagged)
// Check if the calling frame is an arguments adaptor frame.
Label runtime;
Label adaptor_frame, try_allocate;
__ ldr(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ ldr(r2, MemOperand(r3, StandardFrameConstants::kContextOffset));
__ cmp(r2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(eq, &adaptor_frame);
// No adaptor, parameter count = argument count.
__ mov(r2, r1);
__ b(&try_allocate);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ ldr(r2, MemOperand(r3, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ add(r3, r3, Operand(r2, LSL, 1));
__ add(r3, r3, Operand(StandardFrameConstants::kCallerSPOffset));
__ str(r3, MemOperand(sp, 1 * kPointerSize));
// r1 = parameter count (tagged)
// r2 = argument count (tagged)
// Compute the mapped parameter count = min(r1, r2) in r1.
__ cmp(r1, Operand(r2));
__ mov(r1, Operand(r2), LeaveCC, gt);
__ bind(&try_allocate);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
__ cmp(r1, Operand(Smi::FromInt(0)));
__ mov(r9, Operand(0), LeaveCC, eq);
__ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne);
__ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
// 2. Backing store.
__ add(r9, r9, Operand(r2, LSL, 1));
__ add(r9, r9, Operand(FixedArray::kHeaderSize));
// 3. Arguments object.
__ add(r9, r9, Operand(Heap::kArgumentsObjectSize));
// Do the allocation of all three objects in one go.
__ AllocateInNewSpace(r9, r0, r3, r4, &runtime, TAG_OBJECT);
// r0 = address of new object(s) (tagged)
// r2 = argument count (tagged)
// Get the arguments boilerplate from the current (global) context into r4.
const int kNormalOffset =
Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
const int kAliasedOffset =
Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX);
__ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
__ cmp(r1, Operand(0));
__ ldr(r4, MemOperand(r4, kNormalOffset), eq);
__ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
// r0 = address of new object (tagged)
// r1 = mapped parameter count (tagged)
// r2 = argument count (tagged)
// r4 = address of boilerplate object (tagged)
// Copy the JS object part.
for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
__ ldr(r3, FieldMemOperand(r4, i));
__ str(r3, FieldMemOperand(r0, i));
}
// Setup the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ ldr(r3, MemOperand(sp, 2 * kPointerSize));
const int kCalleeOffset = JSObject::kHeaderSize +
Heap::kArgumentsCalleeIndex * kPointerSize;
__ str(r3, FieldMemOperand(r0, kCalleeOffset));
// Use the length (smi tagged) and set that as an in-object property too.
STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
const int kLengthOffset = JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize;
__ str(r2, FieldMemOperand(r0, kLengthOffset));
// Setup the elements pointer in the allocated arguments object.
// If we allocated a parameter map, r4 will point there, otherwise
// it will point to the backing store.
__ add(r4, r0, Operand(Heap::kArgumentsObjectSize));
__ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
// r0 = address of new object (tagged)
// r1 = mapped parameter count (tagged)
// r2 = argument count (tagged)
// r4 = address of parameter map or backing store (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ cmp(r1, Operand(Smi::FromInt(0)));
// Move backing store address to r3, because it is
// expected there when filling in the unmapped arguments.
__ mov(r3, r4, LeaveCC, eq);
__ b(eq, &skip_parameter_map);
__ LoadRoot(r6, Heap::kNonStrictArgumentsElementsMapRootIndex);
__ str(r6, FieldMemOperand(r4, FixedArray::kMapOffset));
__ add(r6, r1, Operand(Smi::FromInt(2)));
__ str(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
__ str(r8, FieldMemOperand(r4, FixedArray::kHeaderSize + 0 * kPointerSize));
__ add(r6, r4, Operand(r1, LSL, 1));
__ add(r6, r6, Operand(kParameterMapHeaderSize));
__ str(r6, FieldMemOperand(r4, FixedArray::kHeaderSize + 1 * kPointerSize));
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop, parameters_test;
__ mov(r6, r1);
__ ldr(r9, MemOperand(sp, 0 * kPointerSize));
__ add(r9, r9, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
__ sub(r9, r9, Operand(r1));
__ LoadRoot(r7, Heap::kTheHoleValueRootIndex);
__ add(r3, r4, Operand(r6, LSL, 1));
__ add(r3, r3, Operand(kParameterMapHeaderSize));
// r6 = loop variable (tagged)
// r1 = mapping index (tagged)
// r3 = address of backing store (tagged)
// r4 = address of parameter map (tagged)
// r5 = temporary scratch (a.o., for address calculation)
// r7 = the hole value
__ jmp(&parameters_test);
__ bind(&parameters_loop);
__ sub(r6, r6, Operand(Smi::FromInt(1)));
__ mov(r5, Operand(r6, LSL, 1));
__ add(r5, r5, Operand(kParameterMapHeaderSize - kHeapObjectTag));
__ str(r9, MemOperand(r4, r5));
__ sub(r5, r5, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
__ str(r7, MemOperand(r3, r5));
__ add(r9, r9, Operand(Smi::FromInt(1)));
__ bind(&parameters_test);
__ cmp(r6, Operand(Smi::FromInt(0)));
__ b(ne, &parameters_loop);
__ bind(&skip_parameter_map);
// r2 = argument count (tagged)
// r3 = address of backing store (tagged)
// r5 = scratch
// Copy arguments header and remaining slots (if there are any).
__ LoadRoot(r5, Heap::kFixedArrayMapRootIndex);
__ str(r5, FieldMemOperand(r3, FixedArray::kMapOffset));
__ str(r2, FieldMemOperand(r3, FixedArray::kLengthOffset));
Label arguments_loop, arguments_test;
__ mov(r9, r1);
__ ldr(r4, MemOperand(sp, 1 * kPointerSize));
__ sub(r4, r4, Operand(r9, LSL, 1));
__ jmp(&arguments_test);
__ bind(&arguments_loop);
__ sub(r4, r4, Operand(kPointerSize));
__ ldr(r6, MemOperand(r4, 0));
__ add(r5, r3, Operand(r9, LSL, 1));
__ str(r6, FieldMemOperand(r5, FixedArray::kHeaderSize));
__ add(r9, r9, Operand(Smi::FromInt(1)));
__ bind(&arguments_test);
__ cmp(r9, Operand(r2));
__ b(lt, &arguments_loop);
// Return and remove the on-stack parameters.
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
// Do the runtime call to allocate the arguments object.
// r2 = argument count (taggged)
__ bind(&runtime);
__ str(r2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count.
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// sp[0] : number of parameters
// sp[4] : receiver displacement
// sp[8] : function
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
@ -4179,31 +3958,40 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
__ mov(r1, Operand(r1, LSR, kSmiTagSize));
__ add(r1, r1, Operand(FixedArray::kHeaderSize / kPointerSize));
__ bind(&add_arguments_object);
__ add(r1, r1, Operand(Heap::kArgumentsObjectSizeStrict / kPointerSize));
__ add(r1, r1, Operand(GetArgumentsObjectSize() / kPointerSize));
// Do the allocation of both objects in one go.
__ AllocateInNewSpace(r1,
r0,
r2,
r3,
&runtime,
static_cast<AllocationFlags>(TAG_OBJECT |
SIZE_IN_WORDS));
__ AllocateInNewSpace(
r1,
r0,
r2,
r3,
&runtime,
static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
// Get the arguments boilerplate from the current (global) context.
__ ldr(r4, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
__ ldr(r4, MemOperand(r4, Context::SlotOffset(
Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX)));
__ ldr(r4, MemOperand(r4,
Context::SlotOffset(GetArgumentsBoilerplateIndex())));
// Copy the JS object part.
__ CopyFields(r0, r4, r3.bit(), JSObject::kHeaderSize / kPointerSize);
if (type_ == NEW_NON_STRICT) {
// Setup the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ ldr(r3, MemOperand(sp, 2 * kPointerSize));
const int kCalleeOffset = JSObject::kHeaderSize +
Heap::kArgumentsCalleeIndex * kPointerSize;
__ str(r3, FieldMemOperand(r0, kCalleeOffset));
}
// Get the length (smi tagged) and set that as an in-object property too.
STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
__ ldr(r1, MemOperand(sp, 0 * kPointerSize));
__ str(r1, FieldMemOperand(r0, JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize));
Heap::kArgumentsLengthIndex * kPointerSize));
// If there are no actual arguments, we're done.
Label done;
@ -4215,13 +4003,12 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Setup the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
__ add(r4, r0, Operand(Heap::kArgumentsObjectSizeStrict));
__ add(r4, r0, Operand(GetArgumentsObjectSize()));
__ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
__ LoadRoot(r3, Heap::kFixedArrayMapRootIndex);
__ str(r3, FieldMemOperand(r4, FixedArray::kMapOffset));
__ str(r1, FieldMemOperand(r4, FixedArray::kLengthOffset));
// Untag the length for the loop.
__ mov(r1, Operand(r1, LSR, kSmiTagSize));
__ mov(r1, Operand(r1, LSR, kSmiTagSize)); // Untag the length for the loop.
// Copy the fixed array slots.
Label loop;
@ -4244,7 +4031,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
__ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}

View File

@ -230,17 +230,17 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// function, receiver address, parameter count.
// The stub will rewrite receiever and parameter count if the previous
// stack frame was an arguments adapter frame.
ArgumentsAccessStub::Type type;
if (is_strict_mode()) {
type = ArgumentsAccessStub::NEW_STRICT;
} else if (function()->has_duplicate_parameters()) {
type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
} else {
type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
}
ArgumentsAccessStub stub(type);
ArgumentsAccessStub stub(
is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
: ArgumentsAccessStub::NEW_NON_STRICT);
__ CallStub(&stub);
Variable* arguments_shadow = scope()->arguments_shadow();
if (arguments_shadow != NULL) {
// Duplicate the value; move-to-slot operation might clobber registers.
__ mov(r3, r0);
Move(arguments_shadow->AsSlot(), r3, r1, r2);
}
Move(arguments->AsSlot(), r0, r1, r2);
}
@ -383,7 +383,7 @@ void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
// For simplicity we always test the accumulator register.
codegen()->Move(result_register(), slot);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
@ -417,7 +417,7 @@ void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
if (true_label_ != fall_through_) __ b(true_label_);
} else {
__ LoadRoot(result_register(), index);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
}
@ -464,7 +464,7 @@ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
} else {
// For simplicity we always test the accumulator register.
__ mov(result_register(), Operand(lit));
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
}
@ -500,7 +500,7 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
__ Drop(count);
__ Move(result_register(), reg);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
@ -578,7 +578,8 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
}
void FullCodeGenerator::DoTest(Label* if_true,
void FullCodeGenerator::DoTest(Expression* condition,
Label* if_true,
Label* if_false,
Label* fall_through) {
if (CpuFeatures::IsSupported(VFP3)) {
@ -1255,12 +1256,13 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
void FullCodeGenerator::EmitVariableLoad(Variable* var) {
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
// Four cases: non-this global variables, lookup slots, all other
// types of slots, and parameters that rewrite to explicit property
// accesses on the arguments object.
Slot* slot = var->AsSlot();
ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
Property* property = var->AsProperty();
if (slot == NULL) {
if (var->is_global() && !var->is_this()) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in r2 and the global
// object (receiver) in r0.
@ -1270,7 +1272,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
context()->Plug(r0);
} else if (slot->type() == Slot::LOOKUP) {
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
@ -1286,7 +1288,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
context()->Plug(r0);
} else {
} else if (slot != NULL) {
Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
? "Context slot"
: "Stack slot");
@ -1302,6 +1304,32 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
} else {
context()->Plug(slot);
}
} else {
Comment cmnt(masm_, "Rewritten parameter");
ASSERT_NOT_NULL(property);
// Rewritten parameter accesses are of the form "slot[literal]".
// Assert that the object is in a slot.
Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
ASSERT_NOT_NULL(object_var);
Slot* object_slot = object_var->AsSlot();
ASSERT_NOT_NULL(object_slot);
// Load the object.
Move(r1, object_slot);
// Assert that the key is a smi.
Literal* key_literal = property->key()->AsLiteral();
ASSERT_NOT_NULL(key_literal);
ASSERT(key_literal->handle()->IsSmi());
// Load the key.
__ mov(r0, Operand(key_literal->handle()));
// Call keyed load IC. It has arguments key and receiver in r0 and r1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
context()->Plug(r0);
}
}
@ -1542,7 +1570,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
}
// Left-hand side can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* property = expr->target()->AsProperty();
@ -1568,13 +1596,27 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case KEYED_PROPERTY:
if (expr->is_compound()) {
VisitForStackValue(property->obj());
VisitForAccumulatorValue(property->key());
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
__ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
__ push(r0);
__ mov(r0, Operand(property->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(property->obj());
VisitForAccumulatorValue(property->key());
}
__ ldr(r1, MemOperand(sp, 0));
__ push(r0);
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
__ ldr(r1, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
__ mov(r0, Operand(property->key()->AsLiteral()->handle()));
__ Push(r1, r0);
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
}
}
break;
}
@ -1781,7 +1823,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
// Left-hand side can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->AsProperty();
@ -1812,10 +1854,20 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
case KEYED_PROPERTY: {
__ push(r0); // Preserve value.
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ mov(r1, r0);
__ pop(r2);
if (prop->is_synthetic()) {
ASSERT(prop->obj()->AsVariableProxy() != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
{ AccumulatorValueContext for_object(this);
EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
}
__ mov(r2, r0);
__ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ mov(r1, r0);
__ pop(r2);
}
__ pop(r0); // Restore value.
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
@ -1831,6 +1883,8 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
// Left-hand sides that rewrite to explicit property accesses do not reach
// here.
ASSERT(var != NULL);
ASSERT(var->is_global() || var->AsSlot() != NULL);
@ -3770,7 +3824,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
}
// Expression can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->expression()->AsProperty();
@ -3798,8 +3852,15 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ push(r0);
EmitNamedPropertyLoad(prop);
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
if (prop->is_arguments_access()) {
VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
__ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
__ push(r0);
__ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
}
__ ldr(r1, MemOperand(sp, 0));
__ push(r0);
EmitKeyedPropertyLoad(prop);

View File

@ -887,162 +887,6 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) {
}
static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
Register object,
Register key,
Register scratch1,
Register scratch2,
Register scratch3,
Label* unmapped_case,
Label* slow_case) {
Heap* heap = masm->isolate()->heap();
// Check that the receiver isn't a smi.
__ JumpIfSmi(object, slow_case);
// Check that the key is a positive smi.
__ tst(key, Operand(0x8000001));
__ b(ne, slow_case);
// Load the elements into scratch1 and check its map.
Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
__ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
__ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
// Check if element is in the range of mapped arguments. If not, jump
// to the unmapped lookup with the parameter map in scratch1.
__ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
__ sub(scratch2, scratch2, Operand(Smi::FromInt(2)));
__ cmp(key, Operand(scratch2));
__ b(cs, unmapped_case);
// Load element index and check whether it is the hole.
const int kOffset =
FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
__ mov(scratch3, Operand(kPointerSize >> 1));
__ mul(scratch3, key, scratch3);
__ add(scratch3, scratch3, Operand(kOffset));
__ ldr(scratch2, MemOperand(scratch1, scratch3));
__ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
__ cmp(scratch2, scratch3);
__ b(eq, unmapped_case);
// Load value from context and return it. We can reuse scratch1 because
// we do not jump to the unmapped lookup (which requires the parameter
// map in scratch1).
__ ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
__ mov(scratch3, Operand(kPointerSize >> 1));
__ mul(scratch3, scratch2, scratch3);
__ add(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
return MemOperand(scratch1, scratch3);
}
static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
Register key,
Register parameter_map,
Register scratch,
Label* slow_case) {
// Element is in arguments backing store, which is referenced by the
// second element of the parameter_map. The parameter_map register
// must be loaded with the parameter map of the arguments object and is
// overwritten.
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
Register backing_store = parameter_map;
__ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
__ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
__ cmp(key, Operand(scratch));
__ b(cs, slow_case);
__ mov(scratch, Operand(kPointerSize >> 1));
__ mul(scratch, key, scratch);
__ add(scratch,
scratch,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
return MemOperand(backing_store, scratch);
}
void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
// ---------- S t a t e --------------
// -- lr : return address
// -- r0 : key
// -- r1 : receiver
// -----------------------------------
Label slow, notin;
MemOperand mapped_location =
GenerateMappedArgumentsLookup(masm, r1, r0, r2, r3, r4, &notin, &slow);
__ ldr(r0, mapped_location);
__ Ret();
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in r2.
MemOperand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, r0, r2, r3, &slow);
__ ldr(r2, unmapped_location);
__ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
__ cmp(r2, r3);
__ b(eq, &slow);
__ mov(r0, r2);
__ Ret();
__ bind(&slow);
GenerateMiss(masm, false);
}
void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
// ---------- S t a t e --------------
// -- r0 : value
// -- r1 : key
// -- r2 : receiver
// -- lr : return address
// -----------------------------------
Label slow, notin;
MemOperand mapped_location =
GenerateMappedArgumentsLookup(masm, r2, r1, r3, r4, r5, &notin, &slow);
__ str(r0, mapped_location);
__ Ret();
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in r3.
MemOperand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, r1, r3, r4, &slow);
__ str(r0, unmapped_location);
__ Ret();
__ bind(&slow);
GenerateMiss(masm, false);
}
void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
int argc) {
// ----------- S t a t e -------------
// -- r2 : name
// -- lr : return address
// -----------------------------------
Label slow, notin;
// Load receiver.
__ ldr(r1, MemOperand(sp, argc * kPointerSize));
MemOperand mapped_location =
GenerateMappedArgumentsLookup(masm, r1, r2, r3, r4, r5, &notin, &slow);
__ ldr(r1, mapped_location);
GenerateFunctionTailCall(masm, argc, &slow, r3);
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in r3.
MemOperand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, r2, r3, r4, &slow);
__ ldr(r1, unmapped_location);
__ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
__ cmp(r1, r3);
__ b(eq, &slow);
GenerateFunctionTailCall(masm, argc, &slow, r3);
__ bind(&slow);
GenerateMiss(masm, argc);
}
Object* KeyedLoadIC_Miss(Arguments args);
void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
// ---------- S t a t e --------------
// -- lr : return address

View File

@ -2674,7 +2674,6 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::FAST_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -3479,7 +3478,6 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::FAST_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}

View File

@ -3429,7 +3429,6 @@ static bool IsElementTypeSigned(JSObject::ElementsKind elements_kind) {
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
return false;
}
@ -3515,7 +3514,6 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -3857,7 +3855,6 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -3922,7 +3919,6 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -4061,7 +4057,6 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}

View File

@ -587,7 +587,7 @@ bool CountOperation::IsInlineable() const {
void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
// Record type feedback from the oracle in the AST.
is_monomorphic_ = oracle->LoadIsMonomorphic(this);
is_monomorphic_ = oracle->LoadIsMonomorphicNormal(this);
if (key()->IsPropertyName()) {
if (oracle->LoadIsBuiltin(this, Builtins::kLoadIC_ArrayLength)) {
is_array_length_ = true;
@ -614,7 +614,7 @@ void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
Property* prop = target()->AsProperty();
ASSERT(prop != NULL);
is_monomorphic_ = oracle->StoreIsMonomorphic(this);
is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this);
if (prop->key()->IsPropertyName()) {
Literal* lit_key = prop->key()->AsLiteral();
ASSERT(lit_key != NULL && lit_key->handle()->IsString());
@ -629,7 +629,7 @@ void Assignment::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
void CountOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
is_monomorphic_ = oracle->StoreIsMonomorphic(this);
is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this);
if (is_monomorphic_) {
// Record receiver type for monomorphic keyed loads.
monomorphic_receiver_type_ = oracle->StoreMonomorphicReceiverType(this);

View File

@ -210,7 +210,7 @@ class Expression: public AstNode {
kTest
};
Expression() : id_(GetNextId()) {}
Expression() : id_(GetNextId()), test_id_(GetNextId()) {}
virtual int position() const {
UNREACHABLE();
@ -262,9 +262,11 @@ class Expression: public AstNode {
}
unsigned id() const { return id_; }
unsigned test_id() const { return test_id_; }
private:
unsigned id_;
unsigned test_id_;
};
@ -1030,7 +1032,16 @@ class VariableProxy: public Expression {
DECLARE_NODE_TYPE(VariableProxy)
// Type testing & conversion
Variable* AsVariable() { return (this == NULL) ? NULL : var_; }
virtual Property* AsProperty() {
return var_ == NULL ? NULL : var_->AsProperty();
}
Variable* AsVariable() {
if (this == NULL || var_ == NULL) return NULL;
Expression* rewrite = var_->rewrite();
if (rewrite == NULL || rewrite->AsSlot() != NULL) return var_;
return NULL;
}
virtual bool IsValidLeftHandSide() {
return var_ == NULL ? true : var_->IsValidLeftHandSide();
@ -1159,7 +1170,8 @@ class Property: public Expression {
is_array_length_(false),
is_string_length_(false),
is_string_access_(false),
is_function_prototype_(false) { }
is_function_prototype_(false),
is_arguments_access_(false) { }
DECLARE_NODE_TYPE(Property)
@ -1175,6 +1187,13 @@ class Property: public Expression {
bool IsStringAccess() const { return is_string_access_; }
bool IsFunctionPrototype() const { return is_function_prototype_; }
// Marks that this is actually an argument rewritten to a keyed property
// accessing the argument through the arguments shadow object.
void set_is_arguments_access(bool is_arguments_access) {
is_arguments_access_ = is_arguments_access;
}
bool is_arguments_access() const { return is_arguments_access_; }
// Type feedback information.
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
virtual bool IsMonomorphic() { return is_monomorphic_; }
@ -1196,6 +1215,7 @@ class Property: public Expression {
bool is_string_length_ : 1;
bool is_string_access_ : 1;
bool is_function_prototype_ : 1;
bool is_arguments_access_ : 1;
Handle<Map> monomorphic_receiver_type_;
};
@ -1628,8 +1648,7 @@ class FunctionLiteral: public Expression {
int num_parameters,
int start_position,
int end_position,
bool is_expression,
bool has_duplicate_parameters)
bool is_expression)
: name_(name),
scope_(scope),
body_(body),
@ -1641,12 +1660,10 @@ class FunctionLiteral: public Expression {
num_parameters_(num_parameters),
start_position_(start_position),
end_position_(end_position),
is_expression_(is_expression),
function_token_position_(RelocInfo::kNoPosition),
inferred_name_(HEAP->empty_string()),
is_expression_(is_expression),
pretenure_(false),
has_duplicate_parameters_(has_duplicate_parameters) {
}
pretenure_(false) { }
DECLARE_NODE_TYPE(FunctionLiteral)
@ -1686,8 +1703,6 @@ class FunctionLiteral: public Expression {
void set_pretenure(bool value) { pretenure_ = value; }
virtual bool IsInlineable() const;
bool has_duplicate_parameters() { return has_duplicate_parameters_; }
private:
Handle<String> name_;
Scope* scope_;
@ -1699,11 +1714,10 @@ class FunctionLiteral: public Expression {
int num_parameters_;
int start_position_;
int end_position_;
bool is_expression_;
int function_token_position_;
Handle<String> inferred_name_;
bool is_expression_;
bool pretenure_;
bool has_duplicate_parameters_;
};

View File

@ -1055,21 +1055,6 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
#endif
}
{ // --- aliased_arguments_boilerplate_
Handle<Map> old_map(global_context()->arguments_boilerplate()->map());
Handle<Map> new_map = factory->CopyMapDropTransitions(old_map);
new_map->set_pre_allocated_property_fields(2);
Handle<JSObject> result = factory->NewJSObjectFromMap(new_map);
new_map->set_elements_kind(JSObject::NON_STRICT_ARGUMENTS_ELEMENTS);
// Set up a well-formed parameter map to make assertions happy.
Handle<FixedArray> elements = factory->NewFixedArray(2);
elements->set_map(heap->non_strict_arguments_elements_map());
elements->set(0, *factory->NewFixedArray(0));
elements->set(1, *factory->NewFixedArray(0));
result->set_elements(*elements);
global_context()->set_aliased_arguments_boilerplate(*result);
}
{ // --- strict mode arguments boilerplate
const PropertyAttributes attributes =
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);

View File

@ -1351,9 +1351,6 @@ static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
KeyedLoadIC::GenerateIndexedInterceptor(masm);
}
static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) {
KeyedLoadIC::GenerateNonStrictArguments(masm);
}
static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
StoreIC::GenerateInitialize(masm);
@ -1444,9 +1441,6 @@ static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
KeyedStoreIC::GenerateInitialize(masm);
}
static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) {
KeyedStoreIC::GenerateNonStrictArguments(masm);
}
#ifdef ENABLE_DEBUGGER_SUPPORT
static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {

View File

@ -131,8 +131,6 @@ enum BuiltinExtraArguments {
V(KeyedLoadIC_String, KEYED_LOAD_IC, MEGAMORPHIC, \
Code::kNoExtraICState) \
V(KeyedLoadIC_IndexedInterceptor, KEYED_LOAD_IC, MEGAMORPHIC, \
Code::kNoExtraICState) \
V(KeyedLoadIC_NonStrictArguments, KEYED_LOAD_IC, MEGAMORPHIC, \
Code::kNoExtraICState) \
\
V(StoreIC_Initialize, STORE_IC, UNINITIALIZED, \
@ -165,8 +163,6 @@ enum BuiltinExtraArguments {
kStrictMode) \
V(KeyedStoreIC_Generic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \
kStrictMode) \
V(KeyedStoreIC_NonStrictArguments, KEYED_STORE_IC, MEGAMORPHIC, \
Code::kNoExtraICState) \
\
/* Uses KeyedLoadIC_Initialize; must be after in list. */ \
V(FunctionCall, BUILTIN, UNINITIALIZED, \

View File

@ -632,8 +632,7 @@ class ArgumentsAccessStub: public CodeStub {
public:
enum Type {
READ_ELEMENT,
NEW_NON_STRICT_FAST,
NEW_NON_STRICT_SLOW,
NEW_NON_STRICT,
NEW_STRICT
};
@ -647,9 +646,20 @@ class ArgumentsAccessStub: public CodeStub {
void Generate(MacroAssembler* masm);
void GenerateReadElement(MacroAssembler* masm);
void GenerateNewStrict(MacroAssembler* masm);
void GenerateNewNonStrictFast(MacroAssembler* masm);
void GenerateNewNonStrictSlow(MacroAssembler* masm);
void GenerateNewObject(MacroAssembler* masm);
int GetArgumentsBoilerplateIndex() const {
return (type_ == NEW_STRICT)
? Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX
: Context::ARGUMENTS_BOILERPLATE_INDEX;
}
int GetArgumentsObjectSize() const {
if (type_ == NEW_STRICT)
return Heap::kArgumentsObjectSizeStrict;
else
return Heap::kArgumentsObjectSize;
}
const char* GetName() { return "ArgumentsAccessStub"; }

View File

@ -209,14 +209,9 @@ void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
case READ_ELEMENT:
GenerateReadElement(masm);
break;
case NEW_NON_STRICT_FAST:
GenerateNewNonStrictFast(masm);
break;
case NEW_NON_STRICT_SLOW:
GenerateNewNonStrictSlow(masm);
break;
case NEW_NON_STRICT:
case NEW_STRICT:
GenerateNewStrict(masm);
GenerateNewObject(masm);
break;
}
}

View File

@ -747,8 +747,6 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
*lit->this_property_assignments());
function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
function_info->set_strict_mode(lit->strict_mode());
function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
}

View File

@ -74,10 +74,8 @@ void Context::set_global_proxy(JSObject* object) {
}
Handle<Object> Context::Lookup(Handle<String> name,
ContextLookupFlags flags,
int* index_,
PropertyAttributes* attributes) {
Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
int* index_, PropertyAttributes* attributes) {
Isolate* isolate = GetIsolate();
Handle<Context> context(this, isolate);
@ -100,35 +98,53 @@ Handle<Object> Context::Lookup(Handle<String> name,
// Check extension/with/global object.
if (context->has_extension()) {
Handle<JSObject> extension = Handle<JSObject>(context->extension(),
isolate);
// Context extension objects needs to behave as if they have no
// prototype. So even if we want to follow prototype chains, we
// need to only do a local lookup for context extension objects.
if ((flags & FOLLOW_PROTOTYPE_CHAIN) == 0 ||
extension->IsJSContextExtensionObject()) {
*attributes = extension->GetLocalPropertyAttribute(*name);
} else {
*attributes = extension->GetPropertyAttribute(*name);
}
if (*attributes != ABSENT) {
if (FLAG_trace_contexts) {
PrintF("=> found property in context object %p\n",
reinterpret_cast<void*>(*extension));
if (context->IsCatchContext()) {
// Catch contexts have the variable name in the extension slot.
if (name->Equals(String::cast(context->extension()))) {
if (FLAG_trace_contexts) {
PrintF("=> found in catch context\n");
}
*index_ = Context::THROWN_OBJECT_INDEX;
*attributes = NONE;
return context;
}
} else {
// Global, function, and with contexts may have an object in the
// extension slot.
Handle<JSObject> extension(JSObject::cast(context->extension()),
isolate);
// Context extension objects needs to behave as if they have no
// prototype. So even if we want to follow prototype chains, we
// need to only do a local lookup for context extension objects.
if ((flags & FOLLOW_PROTOTYPE_CHAIN) == 0 ||
extension->IsJSContextExtensionObject()) {
*attributes = extension->GetLocalPropertyAttribute(*name);
} else {
*attributes = extension->GetPropertyAttribute(*name);
}
if (*attributes != ABSENT) {
// property found
if (FLAG_trace_contexts) {
PrintF("=> found property in context object %p\n",
reinterpret_cast<void*>(*extension));
}
return extension;
}
return extension;
}
}
// Only functions can have locals, parameters, and a function name.
if (context->IsFunctionContext()) {
// We may have context-local slots. Check locals in the context.
// we have context-local slots
// check non-parameter locals in context
Handle<SerializedScopeInfo> scope_info(
context->closure()->shared()->scope_info(), isolate);
Variable::Mode mode;
int index = scope_info->ContextSlotIndex(*name, &mode);
ASSERT(index < 0 || index >= MIN_CONTEXT_SLOTS);
if (index >= 0) {
// slot found
if (FLAG_trace_contexts) {
PrintF("=> found local in context slot %d (mode = %d)\n",
index, mode);
@ -141,28 +157,39 @@ Handle<Object> Context::Lookup(Handle<String> name,
// declared variables that were introduced through declaration nodes)
// must not appear here.
switch (mode) {
case Variable::INTERNAL: // Fall through.
case Variable::VAR:
*attributes = NONE;
break;
case Variable::CONST:
*attributes = READ_ONLY;
break;
case Variable::DYNAMIC:
case Variable::DYNAMIC_GLOBAL:
case Variable::DYNAMIC_LOCAL:
case Variable::TEMPORARY:
UNREACHABLE();
break;
case Variable::INTERNAL: // fall through
case Variable::VAR: *attributes = NONE; break;
case Variable::CONST: *attributes = READ_ONLY; break;
case Variable::DYNAMIC: UNREACHABLE(); break;
case Variable::DYNAMIC_GLOBAL: UNREACHABLE(); break;
case Variable::DYNAMIC_LOCAL: UNREACHABLE(); break;
case Variable::TEMPORARY: UNREACHABLE(); break;
}
return context;
}
// Check the slot corresponding to the intermediate context holding
// only the function name variable.
// check parameter locals in context
int param_index = scope_info->ParameterIndex(*name);
if (param_index >= 0) {
// slot found.
int index = scope_info->ContextSlotIndex(
isolate->heap()->arguments_shadow_symbol(), NULL);
ASSERT(index >= 0); // arguments must exist and be in the heap context
Handle<JSObject> arguments(JSObject::cast(context->get(index)),
isolate);
if (FLAG_trace_contexts) {
PrintF("=> found parameter %d in arguments object\n", param_index);
}
*index_ = param_index;
*attributes = NONE;
return arguments;
}
// check intermediate context (holding only the function name variable)
if (follow_context_chain) {
int index = scope_info->FunctionContextSlotIndex(*name);
if (index >= 0) {
// slot found
if (FLAG_trace_contexts) {
PrintF("=> found intermediate function in context slot %d\n",
index);
@ -182,6 +209,7 @@ Handle<Object> Context::Lookup(Handle<String> name,
}
} while (follow_context_chain);
// slot not found
if (FLAG_trace_contexts) {
PrintF("=> no property/slot found\n");
}
@ -196,8 +224,8 @@ bool Context::GlobalIfNotShadowedByEval(Handle<String> name) {
// before the global context and check that there are no context
// extension objects (conservative check for with statements).
while (!context->IsGlobalContext()) {
// Check if the context is a catch or with context, or has called
// non-strict eval.
// Check if the context is a catch or with context, or has introduced
// bindings by calling non-strict eval.
if (context->has_extension()) return false;
// Not a with context so it must be a function context.

View File

@ -88,8 +88,6 @@ enum ContextLookupFlags {
V(JS_ARRAY_MAP_INDEX, Map, js_array_map)\
V(REGEXP_RESULT_MAP_INDEX, Map, regexp_result_map)\
V(ARGUMENTS_BOILERPLATE_INDEX, JSObject, arguments_boilerplate) \
V(ALIASED_ARGUMENTS_BOILERPLATE_INDEX, JSObject, \
aliased_arguments_boilerplate) \
V(STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX, JSObject, \
strict_mode_arguments_boilerplate) \
V(MESSAGE_LISTENERS_INDEX, JSObject, message_listeners) \
@ -183,15 +181,20 @@ class Context: public FixedArray {
CLOSURE_INDEX,
FCONTEXT_INDEX,
PREVIOUS_INDEX,
// The extension slot is used for either the global object (in global
// contexts), eval extension object (function contexts), subject of with
// (with contexts), or the variable name (catch contexts).
EXTENSION_INDEX,
GLOBAL_INDEX,
MIN_CONTEXT_SLOTS,
// This slot holds the thrown value in catch contexts.
THROWN_OBJECT_INDEX = MIN_CONTEXT_SLOTS,
// These slots are only in global contexts.
GLOBAL_PROXY_INDEX = MIN_CONTEXT_SLOTS,
SECURITY_TOKEN_INDEX,
ARGUMENTS_BOILERPLATE_INDEX,
ALIASED_ARGUMENTS_BOILERPLATE_INDEX,
STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX,
JS_ARRAY_MAP_INDEX,
REGEXP_RESULT_MAP_INDEX,
@ -268,9 +271,9 @@ class Context: public FixedArray {
}
void set_previous(Context* context) { set(PREVIOUS_INDEX, context); }
bool has_extension() { return unchecked_extension() != NULL; }
JSObject* extension() { return JSObject::cast(unchecked_extension()); }
void set_extension(JSObject* object) { set(EXTENSION_INDEX, object); }
bool has_extension() { return extension() != NULL; }
Object* extension() { return get(EXTENSION_INDEX); }
void set_extension(Object* object) { set(EXTENSION_INDEX, object); }
GlobalObject* global() {
Object* result = get(GLOBAL_INDEX);
@ -300,6 +303,10 @@ class Context: public FixedArray {
Map* map = this->map();
return map == map->GetHeap()->catch_context_map();
}
bool IsWithContext() {
Map* map = this->map();
return map == map->GetHeap()->with_context_map();
}
// Tells whether the global context is marked with out of memory.
inline bool has_out_of_memory();
@ -388,7 +395,6 @@ class Context: public FixedArray {
private:
// Unchecked access to the slots.
Object* unchecked_previous() { return get(PREVIOUS_INDEX); }
Object* unchecked_extension() { return get(EXTENSION_INDEX); }
#ifdef DEBUG
// Bootstrapping-aware type checks.

View File

@ -0,0 +1,273 @@
// Copyright 2006-2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// TODO(cira): Rename v8Locale into LocaleInfo once we have stable API.
/**
* LocaleInfo class is an aggregate class of all i18n API calls.
* @param {Object} settings - localeID and regionID to create LocaleInfo from.
* {Array.<string>|string} settings.localeID -
* Unicode identifier of the locale.
* See http://unicode.org/reports/tr35/#BCP_47_Conformance
* {string} settings.regionID - ISO3166 region ID with addition of
* invalid, undefined and reserved region codes.
* @constructor
*/
v8Locale = function(settings) {
native function NativeJSLocale();
// Assume user wanted to do v8Locale("sr");
if (typeof(settings) === "string") {
settings = {'localeID': settings};
}
var properties = NativeJSLocale(
v8Locale.__createSettingsOrDefault(settings, {'localeID': 'root'}));
// Keep the resolved ICU locale ID around to avoid resolving localeID to
// ICU locale ID every time BreakIterator, Collator and so forth are called.
this.__icuLocaleID = properties.icuLocaleID;
this.options = {'localeID': properties.localeID,
'regionID': properties.regionID};
};
/**
* Clones existing locale with possible overrides for some of the options.
* @param {!Object} settings - overrides for current locale settings.
* @returns {Object} - new LocaleInfo object.
*/
v8Locale.prototype.derive = function(settings) {
return new v8Locale(
v8Locale.__createSettingsOrDefault(settings, this.options));
};
/**
* v8BreakIterator class implements locale aware segmenatation.
* It is not part of EcmaScript proposal.
* @param {Object} locale - locale object to pass to break
* iterator implementation.
* @param {string} type - type of segmenatation:
* - character
* - word
* - sentence
* - line
* @private
* @constructor
*/
v8Locale.v8BreakIterator = function(locale, type) {
native function NativeJSBreakIterator();
locale = v8Locale.__createLocaleOrDefault(locale);
// BCP47 ID would work in this case, but we use ICU locale for consistency.
var iterator = NativeJSBreakIterator(locale.__icuLocaleID, type);
iterator.type = type;
return iterator;
};
/**
* Type of the break we encountered during previous iteration.
* @type{Enum}
*/
v8Locale.v8BreakIterator.BreakType = {
'unknown': -1,
'none': 0,
'number': 100,
'word': 200,
'kana': 300,
'ideo': 400
};
/**
* Creates new v8BreakIterator based on current locale.
* @param {string} - type of segmentation. See constructor.
* @returns {Object} - new v8BreakIterator object.
*/
v8Locale.prototype.v8CreateBreakIterator = function(type) {
return new v8Locale.v8BreakIterator(this, type);
};
// TODO(jungshik): Set |collator.options| to actually recognized / resolved
// values.
/**
* Collator class implements locale-aware sort.
* @param {Object} locale - locale object to pass to collator implementation.
* @param {Object} settings - collation flags:
* - ignoreCase
* - ignoreAccents
* - numeric
* @private
* @constructor
*/
v8Locale.Collator = function(locale, settings) {
native function NativeJSCollator();
locale = v8Locale.__createLocaleOrDefault(locale);
var collator = NativeJSCollator(
locale.__icuLocaleID, v8Locale.__createSettingsOrDefault(settings, {}));
return collator;
};
/**
* Creates new Collator based on current locale.
* @param {Object} - collation flags. See constructor.
* @returns {Object} - new Collator object.
*/
v8Locale.prototype.createCollator = function(settings) {
return new v8Locale.Collator(this, settings);
};
/**
* DateTimeFormat class implements locale-aware date and time formatting.
* Constructor is not part of public API.
* @param {Object} locale - locale object to pass to formatter.
* @param {Object} settings - formatting flags:
* - skeleton
* - dateType
* - timeType
* - calendar
* @private
* @constructor
*/
v8Locale.__DateTimeFormat = function(locale, settings) {
native function NativeJSDateTimeFormat();
settings = v8Locale.__createSettingsOrDefault(settings, {});
var cleanSettings = {};
if (settings.hasOwnProperty('skeleton')) {
cleanSettings['skeleton'] = settings['skeleton'];
} else {
cleanSettings = {};
if (settings.hasOwnProperty('dateType')) {
var dt = settings['dateType'];
if (!/^short|medium|long|full$/.test(dt)) dt = 'short';
cleanSettings['dateType'] = dt;
}
if (settings.hasOwnProperty('timeType')) {
var tt = settings['timeType'];
if (!/^short|medium|long|full$/.test(tt)) tt = 'short';
cleanSettings['timeType'] = tt;
}
}
// Default is to show short date and time.
if (!cleanSettings.hasOwnProperty('skeleton') &&
!cleanSettings.hasOwnProperty('dateType') &&
!cleanSettings.hasOwnProperty('timeType')) {
cleanSettings = {'dateType': 'short',
'timeType': 'short'};
}
locale = v8Locale.__createLocaleOrDefault(locale);
var formatter = NativeJSDateTimeFormat(locale.__icuLocaleID, cleanSettings);
// NativeJSDateTimeFormat creates formatter.options for us, we just need
// to append actual settings to it.
for (key in cleanSettings) {
formatter.options[key] = cleanSettings[key];
}
/**
* Clones existing date time format with possible overrides for some
* of the options.
* @param {!Object} overrideSettings - overrides for current format settings.
* @returns {Object} - new DateTimeFormat object.
* @public
*/
formatter.derive = function(overrideSettings) {
// To remove a setting user can specify undefined as its value. We'll remove
// it from the map in that case.
for (var prop in overrideSettings) {
if (settings.hasOwnProperty(prop) && !overrideSettings[prop]) {
delete settings[prop];
}
}
return new v8Locale.__DateTimeFormat(
locale, v8Locale.__createSettingsOrDefault(overrideSettings, settings));
};
return formatter;
};
/**
* Creates new DateTimeFormat based on current locale.
* @param {Object} - formatting flags. See constructor.
* @returns {Object} - new DateTimeFormat object.
*/
v8Locale.prototype.createDateTimeFormat = function(settings) {
return new v8Locale.__DateTimeFormat(this, settings);
};
/**
* Merges user settings and defaults.
* Settings that are not of object type are rejected.
* Actual property values are not validated, but whitespace is trimmed if they
* are strings.
* @param {!Object} settings - user provided settings.
* @param {!Object} defaults - default values for this type of settings.
* @returns {Object} - valid settings object.
* @private
*/
v8Locale.__createSettingsOrDefault = function(settings, defaults) {
if (!settings || typeof(settings) !== 'object' ) {
return defaults;
}
for (var key in defaults) {
if (!settings.hasOwnProperty(key)) {
settings[key] = defaults[key];
}
}
// Clean up settings.
for (var key in settings) {
// Trim whitespace.
if (typeof(settings[key]) === "string") {
settings[key] = settings[key].trim();
}
// Remove all properties that are set to undefined/null. This allows
// derive method to remove a setting we don't need anymore.
if (!settings[key]) {
delete settings[key];
}
}
return settings;
};
/**
* If locale is valid (defined and of v8Locale type) we return it. If not
* we create default locale and return it.
* @param {!Object} locale - user provided locale.
* @returns {Object} - v8Locale object.
* @private
*/
v8Locale.__createLocaleOrDefault = function(locale) {
if (!locale || !(locale instanceof v8Locale)) {
return new v8Locale();
} else {
return locale;
}
};

View File

@ -250,10 +250,11 @@ Handle<Context> Factory::NewFunctionContext(int length,
Handle<Context> Factory::NewCatchContext(Handle<Context> previous,
Handle<JSObject> extension) {
Handle<String> name,
Handle<Object> thrown_object) {
CALL_HEAP_FUNCTION(
isolate(),
isolate()->heap()->AllocateCatchContext(*previous, *extension),
isolate()->heap()->AllocateCatchContext(*previous, *name, *thrown_object),
Context);
}

View File

@ -151,7 +151,8 @@ class Factory {
// Create a catch context.
Handle<Context> NewCatchContext(Handle<Context> previous,
Handle<JSObject> extension);
Handle<String> name,
Handle<Object> thrown_object);
// Create a 'with' context.
Handle<Context> NewWithContext(Handle<Context> previous,

View File

@ -441,7 +441,7 @@ void FullCodeGenerator::TestContext::Plug(Register reg) const {
// For simplicity we always test the accumulator register.
__ Move(result_register(), reg);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
@ -463,7 +463,7 @@ void FullCodeGenerator::TestContext::PlugTOS() const {
// For simplicity we always test the accumulator register.
__ pop(result_register());
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
@ -513,6 +513,14 @@ void FullCodeGenerator::TestContext::PrepareTest(
}
void FullCodeGenerator::DoTest(const TestContext* context) {
DoTest(context->condition(),
context->true_label(),
context->false_label(),
context->fall_through());
}
void FullCodeGenerator::VisitDeclarations(
ZoneList<Declaration*>* declarations) {
int length = declarations->length();
@ -734,9 +742,9 @@ void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
Label discard, restore;
PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
if (is_logical_and) {
DoTest(&discard, &restore, &restore);
DoTest(left, &discard, &restore, &restore);
} else {
DoTest(&restore, &discard, &restore);
DoTest(left, &restore, &discard, &restore);
}
__ bind(&restore);
__ pop(result_register());
@ -753,9 +761,9 @@ void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
Label discard;
PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
if (is_logical_and) {
DoTest(&discard, &done, &discard);
DoTest(left, &discard, &done, &discard);
} else {
DoTest(&done, &discard, &discard);
DoTest(left, &done, &discard, &discard);
}
__ bind(&discard);
__ Drop(1);
@ -1099,9 +1107,7 @@ void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
{ Comment cmnt(masm_, "[ Extend catch context");
__ Push(stmt->name());
__ push(result_register());
__ CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
__ push(result_register());
__ CallRuntime(Runtime::kPushCatchContext, 1);
__ CallRuntime(Runtime::kPushCatchContext, 2);
StoreToFrameField(StandardFrameConstants::kContextOffset,
context_register());
}

View File

@ -113,6 +113,7 @@ class FullCodeGenerator: public AstVisitor {
class TryFinally;
class Finally;
class ForIn;
class TestContext;
class NestedStatement BASE_EMBEDDED {
public:
@ -298,7 +299,11 @@ class FullCodeGenerator: public AstVisitor {
// Helper function to convert a pure value into a test context. The value
// is expected on the stack or the accumulator, depending on the platform.
// See the platform-specific implementation for details.
void DoTest(Label* if_true, Label* if_false, Label* fall_through);
void DoTest(Expression* condition,
Label* if_true,
Label* if_false,
Label* fall_through);
void DoTest(const TestContext* context);
// Helper function to split control flow and avoid a branch to the
// fall-through label if it is set up.
@ -347,7 +352,7 @@ class FullCodeGenerator: public AstVisitor {
Label* if_true,
Label* if_false,
Label* fall_through) {
TestContext context(this, if_true, if_false, fall_through);
TestContext context(this, expr, if_true, if_false, fall_through);
VisitInCurrentContext(expr);
}
@ -670,11 +675,13 @@ class FullCodeGenerator: public AstVisitor {
class TestContext : public ExpressionContext {
public:
explicit TestContext(FullCodeGenerator* codegen,
Label* true_label,
Label* false_label,
Label* fall_through)
TestContext(FullCodeGenerator* codegen,
Expression* condition,
Label* true_label,
Label* false_label,
Label* fall_through)
: ExpressionContext(codegen),
condition_(condition),
true_label_(true_label),
false_label_(false_label),
fall_through_(fall_through) { }
@ -684,6 +691,7 @@ class FullCodeGenerator: public AstVisitor {
return reinterpret_cast<const TestContext*>(context);
}
Expression* condition() const { return condition_; }
Label* true_label() const { return true_label_; }
Label* false_label() const { return false_label_; }
Label* fall_through() const { return fall_through_; }
@ -704,6 +712,7 @@ class FullCodeGenerator: public AstVisitor {
virtual bool IsTest() const { return true; }
private:
Expression* condition_;
Label* true_label_;
Label* false_label_;
Label* fall_through_;

View File

@ -507,8 +507,7 @@ Handle<Object> SetElement(Handle<JSObject> object,
}
}
CALL_HEAP_FUNCTION(object->GetIsolate(),
object->SetElement(index, *value, strict_mode, true),
Object);
object->SetElement(index, *value, strict_mode), Object);
}

View File

@ -159,9 +159,9 @@ class JSObjectsCluster BASE_EMBEDDED {
// Their actual value is irrelevant for us.
switch (special) {
case ROOTS: return HEAP->result_symbol();
case GLOBAL_PROPERTY: return HEAP->catch_var_symbol();
case CODE: return HEAP->code_symbol();
case SELF: return HEAP->this_symbol();
case GLOBAL_PROPERTY: return HEAP->code_symbol();
case CODE: return HEAP->arguments_shadow_symbol();
case SELF: return HEAP->catch_var_symbol();
default:
UNREACHABLE();
return NULL;

View File

@ -1849,12 +1849,6 @@ bool Heap::CreateInitialMaps() {
}
set_external_float_array_map(Map::cast(obj));
{ MaybeObject* maybe_obj =
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_non_strict_arguments_elements_map(Map::cast(obj));
{ MaybeObject* maybe_obj = AllocateMap(EXTERNAL_DOUBLE_ARRAY_TYPE,
ExternalArray::kAlignedSize);
if (!maybe_obj->ToObject(&obj)) return false;
@ -3942,9 +3936,12 @@ MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
MaybeObject* Heap::AllocateCatchContext(Context* previous,
JSObject* extension) {
String* name,
Object* thrown_object) {
STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
Object* result;
{ MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
{ MaybeObject* maybe_result =
AllocateFixedArray(Context::MIN_CONTEXT_SLOTS + 1);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
@ -3952,8 +3949,9 @@ MaybeObject* Heap::AllocateCatchContext(Context* previous,
context->set_closure(previous->closure());
context->set_fcontext(previous->fcontext());
context->set_previous(previous);
context->set_extension(extension);
context->set_extension(name);
context->set_global(previous->global());
context->set(Context::THROWN_OBJECT_INDEX, thrown_object);
return context;
}

View File

@ -107,7 +107,6 @@ inline Heap* _inline_get_heap_();
V(Map, external_unsigned_int_array_map, ExternalUnsignedIntArrayMap) \
V(Map, external_float_array_map, ExternalFloatArrayMap) \
V(Map, external_double_array_map, ExternalDoubleArrayMap) \
V(Map, non_strict_arguments_elements_map, NonStrictArgumentsElementsMap) \
V(Map, function_context_map, FunctionContextMap) \
V(Map, catch_context_map, CatchContextMap) \
V(Map, with_context_map, WithContextMap) \
@ -144,6 +143,7 @@ inline Heap* _inline_get_heap_();
V(StringImpl_symbol, "StringImpl") \
V(arguments_symbol, "arguments") \
V(Arguments_symbol, "Arguments") \
V(arguments_shadow_symbol, ".arguments") \
V(call_symbol, "call") \
V(apply_symbol, "apply") \
V(caller_symbol, "caller") \
@ -648,7 +648,8 @@ class Heap {
// Allocate a catch context.
MUST_USE_RESULT MaybeObject* AllocateCatchContext(Context* previous,
JSObject* extension);
String* name,
Object* thrown_object);
// Allocate a 'with' context.
MUST_USE_RESULT MaybeObject* AllocateWithContext(Context* previous,
JSObject* extension);

View File

@ -1389,7 +1389,6 @@ void HLoadKeyedSpecializedArrayElement::PrintDataTo(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -1475,7 +1474,6 @@ void HStoreKeyedSpecializedArrayElement::PrintDataTo(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}

View File

@ -1286,7 +1286,12 @@ class SparseSet {
: capacity_(capacity),
length_(0),
dense_(zone->NewArray<int>(capacity)),
sparse_(zone->NewArray<int>(capacity)) {}
sparse_(zone->NewArray<int>(capacity)) {
#ifndef NVALGRIND
// Initialize the sparse array to make valgrind happy.
memset(sparse_, 0, sizeof(sparse_[0]) * capacity);
#endif
}
bool Contains(int n) const {
ASSERT(0 <= n && n < capacity_);
@ -2007,9 +2012,10 @@ FunctionState::FunctionState(HGraphBuilder* owner,
HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
if_true->MarkAsInlineReturnTarget();
if_false->MarkAsInlineReturnTarget();
Expression* cond = TestContext::cast(owner->ast_context())->condition();
// The AstContext constructor pushed on the context stack. This newed
// instance is the reason that AstContext can't be BASE_EMBEDDED.
test_context_ = new TestContext(owner, if_true, if_false);
test_context_ = new TestContext(owner, cond, if_true, if_false);
} else {
function_return_ = owner->graph()->CreateBasicBlock();
function_return()->MarkAsInlineReturnTarget();
@ -2179,7 +2185,7 @@ void HGraphBuilder::VisitForTypeOf(Expression* expr) {
void HGraphBuilder::VisitForControl(Expression* expr,
HBasicBlock* true_block,
HBasicBlock* false_block) {
TestContext for_test(this, true_block, false_block);
TestContext for_test(this, expr, true_block, false_block);
Visit(expr);
}
@ -2380,13 +2386,18 @@ void HGraphBuilder::SetupScope(Scope* scope) {
// Handle the arguments and arguments shadow variables specially (they do
// not have declarations).
if (scope->arguments() != NULL) {
if (!scope->arguments()->IsStackAllocated()) {
if (!scope->arguments()->IsStackAllocated() ||
(scope->arguments_shadow() != NULL &&
!scope->arguments_shadow()->IsStackAllocated())) {
return Bailout("context-allocated arguments");
}
HArgumentsObject* object = new(zone()) HArgumentsObject;
AddInstruction(object);
graph()->SetArgumentsObject(object);
environment()->Bind(scope->arguments(), object);
if (scope->arguments_shadow() != NULL) {
environment()->Bind(scope->arguments_shadow(), object);
}
}
}
@ -3509,20 +3520,6 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
} else if (var->IsStackAllocated()) {
Bind(var, Top());
} else if (var->IsContextSlot()) {
// Bail out if we try to mutate a parameter value in a function using
// the arguments object. We do not (yet) correctly handle the
// arguments property of the function.
if (info()->scope()->arguments() != NULL) {
// Parameters will rewrite to context slots. We have no direct way
// to detect that the variable is a parameter.
int count = info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == info()->scope()->parameter(i)) {
Bailout("assignment to parameter, function uses arguments object");
}
}
}
HValue* context = BuildContextChainWalk(var);
int index = var->AsSlot()->index();
HStoreContextSlot* instr =
@ -3646,20 +3643,6 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
} else if (var->IsContextSlot()) {
ASSERT(var->mode() != Variable::CONST);
// Bail out if we try to mutate a parameter value in a function using
// the arguments object. We do not (yet) correctly handle the
// arguments property of the function.
if (info()->scope()->arguments() != NULL) {
// Parameters will rewrite to context slots. We have no direct way
// to detect that the variable is a parameter.
int count = info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == info()->scope()->parameter(i)) {
Bailout("assignment to parameter, function uses arguments object");
}
}
}
CHECK_ALIVE(VisitForValue(expr->value()));
HValue* context = BuildContextChainWalk(var);
int index = var->AsSlot()->index();
@ -3925,7 +3908,6 @@ HInstruction* HGraphBuilder::BuildStoreKeyedSpecializedArrayElement(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -4822,7 +4804,7 @@ void HGraphBuilder::VisitDelete(UnaryOperation* expr) {
// Result of deleting parameters is false, even when they rewrite
// to accesses on the arguments object.
ast_context()->ReturnValue(graph()->GetConstantFalse());
} else {
} else {
CHECK_ALIVE(VisitForValue(prop->obj()));
CHECK_ALIVE(VisitForValue(prop->key()));
HValue* key = Pop();
@ -5007,20 +4989,6 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
} else if (var->IsStackAllocated()) {
Bind(var, after);
} else if (var->IsContextSlot()) {
// Bail out if we try to mutate a parameter value in a function using
// the arguments object. We do not (yet) correctly handle the
// arguments property of the function.
if (info()->scope()->arguments() != NULL) {
// Parameters will rewrite to context slots. We have no direct way
// to detect that the variable is a parameter.
int count = info()->scope()->num_parameters();
for (int i = 0; i < count; ++i) {
if (var == info()->scope()->parameter(i)) {
Bailout("assignment to parameter, function uses arguments object");
}
}
}
HValue* context = BuildContextChainWalk(var);
int index = var->AsSlot()->index();
HStoreContextSlot* instr =

View File

@ -551,9 +551,11 @@ class ValueContext: public AstContext {
class TestContext: public AstContext {
public:
TestContext(HGraphBuilder* owner,
Expression* condition,
HBasicBlock* if_true,
HBasicBlock* if_false)
: AstContext(owner, Expression::kTest),
condition_(condition),
if_true_(if_true),
if_false_(if_false) {
}
@ -566,6 +568,7 @@ class TestContext: public AstContext {
return reinterpret_cast<TestContext*>(context);
}
Expression* condition() const { return condition_; }
HBasicBlock* if_true() const { return if_true_; }
HBasicBlock* if_false() const { return if_false_; }
@ -574,6 +577,7 @@ class TestContext: public AstContext {
// control flow.
void BuildBranch(HValue* value);
Expression* condition_;
HBasicBlock* if_true_;
HBasicBlock* if_false_;
};

View File

@ -2898,259 +2898,16 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
}
void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// esp[0] : return address
// esp[4] : number of parameters
// esp[8] : receiver displacement
// esp[12] : function
// esp[16] : function
// Check if the calling frame is an arguments adaptor frame.
Label runtime;
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
__ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(not_equal, &runtime, Label::kNear);
// Patch the arguments.length and the parameters pointer.
__ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ mov(Operand(esp, 1 * kPointerSize), ecx);
__ lea(edx, Operand(edx, ecx, times_2,
StandardFrameConstants::kCallerSPOffset));
__ mov(Operand(esp, 2 * kPointerSize), edx);
__ bind(&runtime);
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}
void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// esp[0] : return address
// esp[4] : number of parameters (tagged)
// esp[8] : receiver displacement
// esp[12] : function
// ebx = parameter count (tagged)
__ mov(ebx, Operand(esp, 1 * kPointerSize));
// Check if the calling frame is an arguments adaptor frame.
// TODO(rossberg): Factor out some of the bits that are shared with the other
// Generate* functions.
Label runtime;
Label adaptor_frame, try_allocate;
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
__ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ j(equal, &adaptor_frame, Label::kNear);
// No adaptor, parameter count = argument count.
__ mov(ecx, ebx);
__ jmp(&try_allocate, Label::kNear);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ lea(edx, Operand(edx, ecx, times_2,
StandardFrameConstants::kCallerSPOffset));
__ mov(Operand(esp, 2 * kPointerSize), edx);
// ebx = parameter count (tagged)
// ecx = argument count (tagged)
// esp[4] = parameter count (tagged)
// esp[8] = address of receiver argument
// Compute the mapped parameter count = min(ebx, ecx) in ebx.
__ cmp(ebx, Operand(ecx));
__ j(less_equal, &try_allocate, Label::kNear);
__ mov(ebx, ecx);
__ bind(&try_allocate);
// Save mapped parameter count.
__ push(ebx);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
Label no_parameter_map;
__ test(ebx, Operand(ebx));
__ j(zero, &no_parameter_map, Label::kNear);
__ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
__ bind(&no_parameter_map);
// 2. Backing store.
__ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
// 3. Arguments object.
__ add(Operand(ebx), Immediate(Heap::kArgumentsObjectSize));
// Do the allocation of all three objects in one go.
__ AllocateInNewSpace(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
// eax = address of new object(s) (tagged)
// ecx = argument count (tagged)
// esp[0] = mapped parameter count (tagged)
// esp[8] = parameter count (tagged)
// esp[12] = address of receiver argument
// Get the arguments boilerplate from the current (global) context into edi.
Label has_mapped_parameters, copy;
__ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
__ mov(ebx, Operand(esp, 0 * kPointerSize));
__ test(ebx, Operand(ebx));
__ j(not_zero, &has_mapped_parameters, Label::kNear);
__ mov(edi, Operand(edi,
Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX)));
__ jmp(&copy, Label::kNear);
__ bind(&has_mapped_parameters);
__ mov(edi, Operand(edi,
Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX)));
__ bind(&copy);
// eax = address of new object (tagged)
// ebx = mapped parameter count (tagged)
// ecx = argument count (tagged)
// edi = address of boilerplate object (tagged)
// esp[0] = mapped parameter count (tagged)
// esp[8] = parameter count (tagged)
// esp[12] = address of receiver argument
// Copy the JS object part.
for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
__ mov(edx, FieldOperand(edi, i));
__ mov(FieldOperand(eax, i), edx);
}
// Setup the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ mov(edx, Operand(esp, 4 * kPointerSize));
__ mov(FieldOperand(eax, JSObject::kHeaderSize +
Heap::kArgumentsCalleeIndex * kPointerSize),
edx);
// Use the length (smi tagged) and set that as an in-object property too.
STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
__ mov(FieldOperand(eax, JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize),
ecx);
// Setup the elements pointer in the allocated arguments object.
// If we allocated a parameter map, edi will point there, otherwise to the
// backing store.
__ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
__ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
// eax = address of new object (tagged)
// ebx = mapped parameter count (tagged)
// ecx = argument count (tagged)
// edi = address of parameter map or backing store (tagged)
// esp[0] = mapped parameter count (tagged)
// esp[8] = parameter count (tagged)
// esp[12] = address of receiver argument
// Free a register.
__ push(eax);
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ test(ebx, Operand(ebx));
__ j(zero, &skip_parameter_map);
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
Immediate(FACTORY->non_strict_arguments_elements_map()));
__ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
__ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
__ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
__ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop, parameters_test;
__ push(ecx);
__ mov(eax, Operand(esp, 2 * kPointerSize));
__ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
__ add(ebx, Operand(esp, 4 * kPointerSize));
__ sub(ebx, Operand(eax));
__ mov(ecx, FACTORY->the_hole_value());
__ mov(edx, edi);
__ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
// eax = loop variable (tagged)
// ebx = mapping index (tagged)
// ecx = the hole value
// edx = address of parameter map (tagged)
// edi = address of backing store (tagged)
// esp[0] = argument count (tagged)
// esp[4] = address of new object (tagged)
// esp[8] = mapped parameter count (tagged)
// esp[16] = parameter count (tagged)
// esp[20] = address of receiver argument
__ jmp(&parameters_test, Label::kNear);
__ bind(&parameters_loop);
__ sub(Operand(eax), Immediate(Smi::FromInt(1)));
__ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
__ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
__ add(Operand(ebx), Immediate(Smi::FromInt(1)));
__ bind(&parameters_test);
__ test(eax, Operand(eax));
__ j(not_zero, &parameters_loop, Label::kNear);
__ pop(ecx);
__ bind(&skip_parameter_map);
// ecx = argument count (tagged)
// edi = address of backing store (tagged)
// esp[0] = address of new object (tagged)
// esp[4] = mapped parameter count (tagged)
// esp[12] = parameter count (tagged)
// esp[16] = address of receiver argument
// Copy arguments header and remaining slots (if there are any).
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
Immediate(FACTORY->fixed_array_map()));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
Label arguments_loop, arguments_test;
__ mov(ebx, Operand(esp, 1 * kPointerSize));
__ mov(edx, Operand(esp, 4 * kPointerSize));
__ sub(Operand(edx), ebx); // Is there a smarter way to do negative scaling?
__ sub(Operand(edx), ebx);
__ jmp(&arguments_test, Label::kNear);
__ bind(&arguments_loop);
__ sub(Operand(edx), Immediate(kPointerSize));
__ mov(eax, Operand(edx, 0));
__ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
__ add(Operand(ebx), Immediate(Smi::FromInt(1)));
__ bind(&arguments_test);
__ cmp(ebx, Operand(ecx));
__ j(less, &arguments_loop, Label::kNear);
// Restore.
__ pop(eax); // Address of arguments object.
__ pop(ebx); // Parameter count.
// Return and remove the on-stack parameters.
__ ret(3 * kPointerSize);
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
__ pop(eax); // Remove saved parameter count.
__ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
__ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
}
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// esp[0] : return address
// esp[4] : number of parameters
// esp[8] : receiver displacement
// esp[12] : function
// The displacement is used for skipping the return address and the
// frame pointer on the stack. It is the offset of the last
// parameter (if any) relative to the frame pointer.
static const int kDisplacement = 2 * kPointerSize;
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
@ -3167,8 +2924,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
__ bind(&adaptor_frame);
__ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ mov(Operand(esp, 1 * kPointerSize), ecx);
__ lea(edx, Operand(edx, ecx, times_2,
StandardFrameConstants::kCallerSPOffset));
__ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
__ mov(Operand(esp, 2 * kPointerSize), edx);
// Try the new space allocation. Start out with computing the size of
@ -3179,7 +2935,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
__ j(zero, &add_arguments_object, Label::kNear);
__ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
__ bind(&add_arguments_object);
__ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSizeStrict));
__ add(Operand(ecx), Immediate(GetArgumentsObjectSize()));
// Do the allocation of both objects in one go.
__ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
@ -3187,9 +2943,8 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the arguments boilerplate from the current (global) context.
__ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
const int offset =
Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
__ mov(edi, Operand(edi, offset));
__ mov(edi, Operand(edi,
Context::SlotOffset(GetArgumentsBoilerplateIndex())));
// Copy the JS object part.
for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
@ -3197,11 +2952,20 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
__ mov(FieldOperand(eax, i), ebx);
}
if (type_ == NEW_NON_STRICT) {
// Setup the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ mov(ebx, Operand(esp, 3 * kPointerSize));
__ mov(FieldOperand(eax, JSObject::kHeaderSize +
Heap::kArgumentsCalleeIndex * kPointerSize),
ebx);
}
// Get the length (smi tagged) and set that as an in-object property too.
STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
__ mov(ecx, Operand(esp, 1 * kPointerSize));
__ mov(FieldOperand(eax, JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize),
Heap::kArgumentsLengthIndex * kPointerSize),
ecx);
// If there are no actual arguments, we're done.
@ -3214,10 +2978,10 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Setup the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
__ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict));
__ lea(edi, Operand(eax, GetArgumentsObjectSize()));
__ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
Immediate(FACTORY->fixed_array_map()));
Immediate(masm->isolate()->factory()->fixed_array_map()));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
// Untag the length for the loop below.
@ -3239,7 +3003,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
__ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}

View File

@ -218,21 +218,20 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
__ push(edx);
__ SafePush(Immediate(Smi::FromInt(scope()->num_parameters())));
// Arguments to ArgumentsAccessStub and/or New...:
// Arguments to ArgumentsAccessStub:
// function, receiver address, parameter count.
// The stub will rewrite receiver and parameter count if the previous
// stack frame was an arguments adapter frame.
ArgumentsAccessStub::Type type;
if (is_strict_mode()) {
type = ArgumentsAccessStub::NEW_STRICT;
} else if (function()->has_duplicate_parameters()) {
type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
} else {
type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
}
ArgumentsAccessStub stub(type);
ArgumentsAccessStub stub(
is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
: ArgumentsAccessStub::NEW_NON_STRICT);
__ CallStub(&stub);
Variable* arguments_shadow = scope()->arguments_shadow();
if (arguments_shadow != NULL) {
__ mov(ecx, eax); // Duplicate result.
Move(arguments_shadow->AsSlot(), ecx, ebx, edx);
}
Move(arguments->AsSlot(), eax, ebx, edx);
}
@ -375,7 +374,7 @@ void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
// For simplicity we always test the accumulator register.
codegen()->Move(result_register(), slot);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
@ -449,7 +448,7 @@ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
} else {
// For simplicity we always test the accumulator register.
__ mov(result_register(), lit);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
}
@ -485,7 +484,7 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
__ Drop(count);
__ Move(result_register(), reg);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
@ -562,7 +561,8 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
}
void FullCodeGenerator::DoTest(Label* if_true,
void FullCodeGenerator::DoTest(Expression* condition,
Label* if_true,
Label* if_false,
Label* fall_through) {
ToBooleanStub stub;
@ -1209,12 +1209,13 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
void FullCodeGenerator::EmitVariableLoad(Variable* var) {
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
// Four cases: non-this global variables, lookup slots, all other
// types of slots, and parameters that rewrite to explicit property
// accesses on the arguments object.
Slot* slot = var->AsSlot();
ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
Property* property = var->AsProperty();
if (slot == NULL) {
if (var->is_global() && !var->is_this()) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in ecx and the global
// object on the stack.
@ -1224,7 +1225,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
context()->Plug(eax);
} else if (slot->type() == Slot::LOOKUP) {
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
@ -1240,7 +1241,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
context()->Plug(eax);
} else {
} else if (slot != NULL) {
Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
? "Context slot"
: "Stack slot");
@ -1258,6 +1259,36 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
} else {
context()->Plug(slot);
}
} else {
Comment cmnt(masm_, "Rewritten parameter");
ASSERT_NOT_NULL(property);
// Rewritten parameter accesses are of the form "slot[literal]".
// Assert that the object is in a slot.
Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
ASSERT_NOT_NULL(object_var);
Slot* object_slot = object_var->AsSlot();
ASSERT_NOT_NULL(object_slot);
// Load the object.
MemOperand object_loc = EmitSlotSearch(object_slot, eax);
__ mov(edx, object_loc);
// Assert that the key is a smi.
Literal* key_literal = property->key()->AsLiteral();
ASSERT_NOT_NULL(key_literal);
ASSERT(key_literal->handle()->IsSmi());
// Load the key.
__ SafeSet(eax, Immediate(key_literal->handle()));
// Do a keyed property load.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
// Drop key and object left on the stack by IC.
context()->Plug(eax);
}
}
@ -1490,7 +1521,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
}
// Left-hand side can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* property = expr->target()->AsProperty();
@ -1516,13 +1547,29 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case KEYED_PROPERTY: {
if (expr->is_compound()) {
VisitForStackValue(property->obj());
VisitForAccumulatorValue(property->key());
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
__ push(slot_operand);
__ SafeSet(eax, Immediate(property->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(property->obj());
VisitForAccumulatorValue(property->key());
}
__ mov(edx, Operand(esp, 0));
__ push(eax);
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
__ push(slot_operand);
__ SafePush(Immediate(property->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
}
}
break;
}
@ -1726,7 +1773,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
// Left-hand side can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->AsProperty();
@ -1786,6 +1833,8 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
// Left-hand sides that rewrite to explicit property accesses do not reach
// here.
ASSERT(var != NULL);
ASSERT(var->is_global() || var->AsSlot() != NULL);
@ -3738,7 +3787,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
}
// Expression can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->expression()->AsProperty();
@ -3765,8 +3814,16 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ push(eax);
EmitNamedPropertyLoad(prop);
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
if (prop->is_arguments_access()) {
VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
__ push(slot_operand);
__ SafeSet(eax, Immediate(prop->key()->AsLiteral()->handle()));
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
}
__ mov(edx, Operand(esp, 0));
__ push(eax);
EmitKeyedPropertyLoad(prop);

View File

@ -465,76 +465,6 @@ static void GenerateKeyStringCheck(MacroAssembler* masm,
}
static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
Register object,
Register key,
Register scratch1,
Register scratch2,
Label* unmapped_case,
Label* slow_case) {
Heap* heap = masm->isolate()->heap();
Factory* factory = masm->isolate()->factory();
// Check that the receiver isn't a smi.
__ JumpIfSmi(object, slow_case);
// Check that the key is a positive smi.
__ test(key, Immediate(0x8000001));
__ j(not_zero, slow_case);
// Load the elements into scratch1 and check its map.
Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
__ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset));
__ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
// Check if element is in the range of mapped arguments. If not, jump
// to the unmapped lookup with the parameter map in scratch1.
__ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
__ sub(Operand(scratch2), Immediate(Smi::FromInt(2)));
__ cmp(key, Operand(scratch2));
__ j(greater_equal, unmapped_case);
// Load element index and check whether it is the hole.
const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
__ mov(scratch2, FieldOperand(scratch1,
key,
times_half_pointer_size,
kHeaderSize));
__ cmp(scratch2, factory->the_hole_value());
__ j(equal, unmapped_case);
// Load value from context and return it. We can reuse scratch1 because
// we do not jump to the unmapped lookup (which requires the parameter
// map in scratch1).
const int kContextOffset = FixedArray::kHeaderSize;
__ mov(scratch1, FieldOperand(scratch1, kContextOffset));
return FieldOperand(scratch1,
scratch2,
times_half_pointer_size,
Context::kHeaderSize);
}
static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
Register key,
Register parameter_map,
Register scratch,
Label* slow_case) {
// Element is in arguments backing store, which is referenced by the
// second element of the parameter_map.
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
Register backing_store = parameter_map;
__ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
__ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
__ cmp(key, Operand(scratch));
__ j(greater_equal, slow_case);
return FieldOperand(backing_store,
key,
times_half_pointer_size,
FixedArray::kHeaderSize);
}
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : key
@ -769,54 +699,6 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
}
void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label slow, notin;
Factory* factory = masm->isolate()->factory();
Operand mapped_location =
GenerateMappedArgumentsLookup(masm, edx, eax, ebx, ecx, &notin, &slow);
__ mov(eax, mapped_location);
__ Ret();
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in ebx.
Operand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, eax, ebx, ecx, &slow);
__ cmp(unmapped_location, factory->the_hole_value());
__ j(equal, &slow);
__ mov(eax, unmapped_location);
__ Ret();
__ bind(&slow);
GenerateMiss(masm, false);
}
void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label slow, notin;
Operand mapped_location =
GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, edi, &notin, &slow);
__ mov(mapped_location, eax);
__ Ret();
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in ebx.
Operand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, ecx, ebx, edi, &slow);
__ mov(unmapped_location, eax);
__ Ret();
__ bind(&slow);
GenerateMiss(masm, false);
}
void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
StrictModeFlag strict_mode) {
// ----------- S t a t e -------------
@ -1264,35 +1146,6 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
}
void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
int argc) {
// ----------- S t a t e -------------
// -- ecx : name
// -- esp[0] : return address
// -- esp[(argc - n) * 4] : arg[n] (zero-based)
// -- ...
// -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
Label slow, notin;
Factory* factory = masm->isolate()->factory();
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
Operand mapped_location =
GenerateMappedArgumentsLookup(masm, edx, ecx, ebx, eax, &notin, &slow);
__ mov(edi, mapped_location);
GenerateFunctionTailCall(masm, argc, &slow);
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in ebx.
Operand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, ecx, ebx, eax, &slow);
__ cmp(unmapped_location, factory->the_hole_value());
__ j(equal, &slow);
__ mov(edi, unmapped_location);
GenerateFunctionTailCall(masm, argc, &slow);
__ bind(&slow);
GenerateMiss(masm, argc);
}
void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// ----------- S t a t e -------------
// -- ecx : name

View File

@ -2504,7 +2504,6 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -3241,7 +3240,6 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}

View File

@ -793,35 +793,18 @@ MaybeObject* KeyedCallIC::LoadFunction(State state,
return TypeError("non_object_property_call", object, key);
}
if (FLAG_use_ic && state != MEGAMORPHIC && object->IsHeapObject()) {
if (FLAG_use_ic && state != MEGAMORPHIC && !object->IsAccessCheckNeeded()) {
int argc = target()->arguments_count();
InLoopFlag in_loop = target()->ic_in_loop();
Heap* heap = Handle<HeapObject>::cast(object)->GetHeap();
Map* map = heap->non_strict_arguments_elements_map();
if (object->IsJSObject() &&
Handle<JSObject>::cast(object)->elements()->map() == map) {
MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallArguments(
argc, in_loop, Code::KEYED_CALL_IC);
Object* code;
if (maybe_code->ToObject(&code)) {
set_target(Code::cast(code));
MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallMegamorphic(
argc, in_loop, Code::KEYED_CALL_IC, Code::kNoExtraICState);
Object* code;
if (maybe_code->ToObject(&code)) {
set_target(Code::cast(code));
#ifdef DEBUG
TraceIC(
"KeyedCallIC", key, state, target(), in_loop ? " (in-loop)" : "");
TraceIC(
"KeyedCallIC", key, state, target(), in_loop ? " (in-loop)" : "");
#endif
}
} else if (FLAG_use_ic && state != MEGAMORPHIC &&
!object->IsAccessCheckNeeded()) {
MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallMegamorphic(
argc, in_loop, Code::KEYED_CALL_IC, Code::kNoExtraICState);
Object* code;
if (maybe_code->ToObject(&code)) {
set_target(Code::cast(code));
#ifdef DEBUG
TraceIC(
"KeyedCallIC", key, state, target(), in_loop ? " (in-loop)" : "");
#endif
}
}
}
@ -1254,13 +1237,9 @@ MaybeObject* KeyedLoadIC::Load(State state,
}
} else if (object->IsJSObject()) {
JSObject* receiver = JSObject::cast(*object);
Heap* heap = Handle<JSObject>::cast(object)->GetHeap();
Map* elements_map = Handle<JSObject>::cast(object)->elements()->map();
if (elements_map == heap->non_strict_arguments_elements_map()) {
stub = non_strict_arguments_stub();
} else if (receiver->HasIndexedInterceptor()) {
if (receiver->HasIndexedInterceptor()) {
stub = indexed_interceptor_stub();
} else if (key->IsSmi() && (target() != non_strict_arguments_stub())) {
} else if (key->IsSmi()) {
MaybeObject* maybe_stub = ComputeStub(receiver,
false,
kNonStrictMode,
@ -1830,21 +1809,15 @@ MaybeObject* KeyedStoreIC::Store(State state,
Code* stub = (strict_mode == kStrictMode)
? generic_stub_strict()
: generic_stub();
if (object->IsJSObject()) {
JSObject* receiver = JSObject::cast(*object);
Heap* heap = Handle<JSObject>::cast(object)->GetHeap();
Map* elements_map = Handle<JSObject>::cast(object)->elements()->map();
if (elements_map == heap->non_strict_arguments_elements_map()) {
stub = non_strict_arguments_stub();
} else if (!force_generic) {
if (key->IsSmi() && (target() != non_strict_arguments_stub())) {
MaybeObject* maybe_stub = ComputeStub(receiver,
true,
strict_mode,
stub);
stub = maybe_stub->IsFailure() ?
NULL : Code::cast(maybe_stub->ToObjectUnchecked());
}
if (!force_generic) {
if (object->IsJSObject() && key->IsSmi()) {
JSObject* receiver = JSObject::cast(*object);
MaybeObject* maybe_stub = ComputeStub(receiver,
true,
strict_mode,
stub);
stub = maybe_stub->IsFailure() ?
NULL : Code::cast(maybe_stub->ToObjectUnchecked());
}
}
if (stub != NULL) set_target(stub);

View File

@ -283,7 +283,6 @@ class KeyedCallIC: public CallICBase {
static void GenerateMiss(MacroAssembler* masm, int argc);
static void GenerateMegamorphic(MacroAssembler* masm, int argc);
static void GenerateNormal(MacroAssembler* masm, int argc);
static void GenerateNonStrictArguments(MacroAssembler* masm, int argc);
};
@ -409,8 +408,8 @@ class KeyedLoadIC: public KeyedIC {
}
static void GenerateGeneric(MacroAssembler* masm);
static void GenerateString(MacroAssembler* masm);
static void GenerateIndexedInterceptor(MacroAssembler* masm);
static void GenerateNonStrictArguments(MacroAssembler* masm);
// Bit mask to be tested against bit field for the cases when
// generic stub should go into slow case.
@ -468,10 +467,6 @@ class KeyedLoadIC: public KeyedIC {
return isolate()->builtins()->builtin(
Builtins::kKeyedLoadIC_IndexedInterceptor);
}
Code* non_strict_arguments_stub() {
return isolate()->builtins()->builtin(
Builtins::kKeyedLoadIC_NonStrictArguments);
}
static void Clear(Address address, Code* target);
@ -572,7 +567,6 @@ class KeyedStoreIC: public KeyedIC {
static void GenerateRuntimeSetProperty(MacroAssembler* masm,
StrictModeFlag strict_mode);
static void GenerateGeneric(MacroAssembler* masm, StrictModeFlag strict_mode);
static void GenerateNonStrictArguments(MacroAssembler* masm);
virtual MaybeObject* GetFastElementStubWithoutMapCheck(
bool is_js_array);
@ -631,10 +625,6 @@ class KeyedStoreIC: public KeyedIC {
return isolate()->builtins()->builtin(
Builtins::kKeyedStoreIC_Generic_Strict);
}
Code* non_strict_arguments_stub() {
return isolate()->builtins()->builtin(
Builtins::kKeyedStoreIC_NonStrictArguments);
}
static void Clear(Address address, Code* target);

View File

@ -184,7 +184,6 @@ int ElementsKindToShiftSize(JSObject::ElementsKind elements_kind) {
return 3;
case JSObject::FAST_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
return kPointerSizeLog2;
}
UNREACHABLE();

View File

@ -869,18 +869,6 @@ class MarkingVisitor : public ObjectVisitor {
StaticMarkingVisitor::VisitPointers(heap_, start, end);
}
void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
StaticMarkingVisitor::VisitCodeTarget(heap, rinfo);
}
void VisitGlobalPropertyCell(Heap* heap, RelocInfo* rinfo) {
StaticMarkingVisitor::VisitGlobalPropertyCell(heap, rinfo);
}
void VisitDebugTarget(Heap* heap, RelocInfo* rinfo) {
StaticMarkingVisitor::VisitDebugTarget(heap, rinfo);
}
private:
Heap* heap_;
};

View File

@ -190,7 +190,7 @@ function FormatMessage(message) {
proto_object_or_null: ["Object prototype may only be an Object or null"],
property_desc_object: ["Property description must be an object: ", "%0"],
redefine_disallowed: ["Cannot redefine property: ", "%0"],
define_disallowed: ["Cannot define property, object is not extensible: ", "%0"],
define_disallowed: ["Cannot define property:", "%0", ", object is not extensible."],
non_extensible_proto: ["%0", " is not extensible"],
handler_non_object: ["Proxy.", "%0", " called with non-object as handler"],
handler_trap_missing: ["Proxy handler ", "%0", " has no '", "%1", "' trap"],

View File

@ -3098,15 +3098,8 @@ MaybeObject* KeyedLoadStubCompiler::CompileLoadElement(Map* receiver_map) {
// -- a0 : key
// -- a1 : receiver
// -----------------------------------
MaybeObject* maybe_stub;
if (receiver_map->has_fast_elements()) {
maybe_stub = KeyedLoadFastElementStub().TryGetCode();
} else {
ASSERT(receiver_map->has_external_array_elements());
JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
maybe_stub = KeyedLoadExternalArrayStub(elements_kind).TryGetCode();
}
Code* stub;
MaybeObject* maybe_stub = ComputeSharedKeyedLoadElementStub(receiver_map);
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(a1,
a2,
@ -3188,8 +3181,7 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
}
MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(
Map* receiver_map) {
MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(Map* receiver_map) {
// ----------- S t a t e -------------
// -- a0 : value
// -- a1 : key
@ -3197,16 +3189,8 @@ MaybeObject* KeyedStoreStubCompiler::CompileStoreElement(
// -- ra : return address
// -- a3 : scratch
// -----------------------------------
MaybeObject* maybe_stub;
if (receiver_map->has_fast_elements()) {
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
maybe_stub = KeyedStoreFastElementStub(is_js_array).TryGetCode();
} else {
ASSERT(receiver_map->has_external_array_elements());
JSObject::ElementsKind elements_kind = receiver_map->elements_kind();
maybe_stub = KeyedStoreExternalArrayStub(elements_kind).TryGetCode();
}
Code* stub;
MaybeObject* maybe_stub = ComputeSharedKeyedStoreElementStub(receiver_map);
if (!maybe_stub->To(&stub)) return maybe_stub;
__ DispatchMap(a2,
a3,

View File

@ -645,8 +645,8 @@ bool Object::IsHashTable() {
bool Object::IsDictionary() {
return IsHashTable() &&
this != HeapObject::cast(this)->GetHeap()->symbol_table();
return IsHashTable() && this !=
HeapObject::cast(this)->GetHeap()->symbol_table();
}
@ -3288,22 +3288,13 @@ BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
kIsExpressionBit)
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
kIsTopLevelBit)
BOOL_GETTER(SharedFunctionInfo,
compiler_hints,
BOOL_GETTER(SharedFunctionInfo, compiler_hints,
has_only_simple_this_property_assignments,
kHasOnlySimpleThisPropertyAssignments)
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
allows_lazy_compilation,
kAllowLazyCompilation)
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
uses_arguments,
kUsesArguments)
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
has_duplicate_parameters,
kHasDuplicateParameters)
#if V8_HOST_ARCH_32_BIT
@ -3387,10 +3378,18 @@ void SharedFunctionInfo::set_construction_count(int value) {
}
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
live_objects_may_exist,
kLiveObjectsMayExist)
bool SharedFunctionInfo::live_objects_may_exist() {
return (compiler_hints() & (1 << kLiveObjectsMayExist)) != 0;
}
void SharedFunctionInfo::set_live_objects_may_exist(bool value) {
if (value) {
set_compiler_hints(compiler_hints() | (1 << kLiveObjectsMayExist));
} else {
set_compiler_hints(compiler_hints() & ~(1 << kLiveObjectsMayExist));
}
}
bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
@ -3398,10 +3397,9 @@ bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
}
BOOL_GETTER(SharedFunctionInfo,
compiler_hints,
optimization_disabled,
kOptimizationDisabled)
bool SharedFunctionInfo::optimization_disabled() {
return BooleanBit::get(compiler_hints(), kOptimizationDisabled);
}
void SharedFunctionInfo::set_optimization_disabled(bool disable) {
@ -3416,10 +3414,16 @@ void SharedFunctionInfo::set_optimization_disabled(bool disable) {
}
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
strict_mode,
kStrictModeFunction)
bool SharedFunctionInfo::strict_mode() {
return BooleanBit::get(compiler_hints(), kStrictModeFunction);
}
void SharedFunctionInfo::set_strict_mode(bool value) {
set_compiler_hints(BooleanBit::set(compiler_hints(),
kStrictModeFunction,
value));
}
bool SharedFunctionInfo::native() {

View File

@ -351,15 +351,6 @@ void JSObject::PrintElements(FILE* out) {
case DICTIONARY_ELEMENTS:
elements()->Print(out);
break;
case NON_STRICT_ARGUMENTS_ELEMENTS: {
FixedArray* p = FixedArray::cast(elements());
for (int i = 2; i < p->length(); i++) {
PrintF(out, " %d: ", i);
p->get(i)->ShortPrint(out);
PrintF(out, "\n");
}
break;
}
default:
UNREACHABLE();
break;

File diff suppressed because it is too large Load Diff

View File

@ -1438,7 +1438,6 @@ class JSObject: public JSReceiver {
// The "slow" kind.
DICTIONARY_ELEMENTS,
NON_STRICT_ARGUMENTS_ELEMENTS,
// The "fast" kind for external arrays
EXTERNAL_BYTE_ELEMENTS,
EXTERNAL_UNSIGNED_BYTE_ELEMENTS,
@ -1476,16 +1475,13 @@ class JSObject: public JSReceiver {
//
// In the fast mode elements is a FixedArray and so each element can
// be quickly accessed. This fact is used in the generated code. The
// elements array can have one of three maps in this mode:
// fixed_array_map, non_strict_arguments_elements_map or
// fixed_cow_array_map (for copy-on-write arrays). In the latter case
// the elements array may be shared by a few objects and so before
// writing to any element the array must be copied. Use
// EnsureWritableFastElements in this case.
// elements array can have one of the two maps in this mode:
// fixed_array_map or fixed_cow_array_map (for copy-on-write
// arrays). In the latter case the elements array may be shared by a
// few objects and so before writing to any element the array must
// be copied. Use EnsureWritableFastElements in this case.
//
// In the slow mode the elements is either a NumberDictionary, an
// ExternalArray, or a FixedArray parameter map for a (non-strict)
// arguments object.
// In the slow mode elements is either a NumberDictionary or an ExternalArray.
DECL_ACCESSORS(elements, HeapObject)
inline void initialize_elements();
MUST_USE_RESULT inline MaybeObject* ResetElements();
@ -1503,12 +1499,9 @@ class JSObject: public JSReceiver {
inline bool HasExternalUnsignedIntElements();
inline bool HasExternalFloatElements();
inline bool HasExternalDoubleElements();
bool HasFastArgumentsElements();
bool HasDictionaryArgumentsElements();
inline bool AllowsSetElementsLength();
inline NumberDictionary* element_dictionary(); // Gets slow elements.
// Requires: HasFastElements().
// Requires: this->HasFastElements().
MUST_USE_RESULT inline MaybeObject* EnsureWritableFastElements();
// Collects elements starting at index 0.
@ -1694,11 +1687,7 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* SetFastElement(uint32_t index,
Object* value,
StrictModeFlag strict_mode,
bool check_prototype);
MUST_USE_RESULT MaybeObject* SetDictionaryElement(uint32_t index,
Object* value,
StrictModeFlag strict_mode,
bool check_prototype);
bool check_prototype = true);
MUST_USE_RESULT MaybeObject* SetFastDoubleElement(
uint32_t index,
@ -1711,7 +1700,7 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* SetElement(uint32_t index,
Object* value,
StrictModeFlag strict_mode,
bool check_prototype);
bool check_prototype = true);
// Returns the index'th element.
// The undefined object if index is out of bounds.
@ -1723,9 +1712,6 @@ class JSObject: public JSReceiver {
// failed.
MaybeObject* GetExternalElement(uint32_t index);
// Replace the elements' backing store with fast elements of the given
// capacity. Update the length for JSArrays. Returns the new backing
// store.
MUST_USE_RESULT MaybeObject* SetFastElementsCapacityAndLength(int capacity,
int length);
MUST_USE_RESULT MaybeObject* SetFastDoubleElementsCapacityAndLength(
@ -1855,9 +1841,6 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* NormalizeProperties(
PropertyNormalizationMode mode,
int expected_additional_properties);
// Convert and update the elements backing store to be a NumberDictionary
// dictionary. Returns the backing after conversion.
MUST_USE_RESULT MaybeObject* NormalizeElements();
MUST_USE_RESULT MaybeObject* UpdateMapCodeCache(String* name, Code* code);
@ -2003,17 +1986,6 @@ class JSObject: public JSReceiver {
DeleteMode mode);
MUST_USE_RESULT MaybeObject* DeleteElementWithInterceptor(uint32_t index);
MUST_USE_RESULT MaybeObject* DeleteFastElement(uint32_t index);
MUST_USE_RESULT MaybeObject* DeleteDictionaryElement(uint32_t index,
DeleteMode mode);
bool ReferencesObjectFromElements(FixedArray* elements,
ElementsKind kind,
Object* object);
bool HasElementInElements(FixedArray* elements,
ElementsKind kind,
uint32_t index);
// Returns true if most of the elements backing storage is used.
bool HasDenseElements();
@ -3922,8 +3894,6 @@ class Map: public HeapObject {
(bit_field2() & kElementsKindMask) >> kElementsKindShift);
}
// Tells whether the instance has fast elements.
// Equivalent to instance->GetElementsKind() == FAST_ELEMENTS.
inline bool has_fast_elements() {
return elements_kind() == JSObject::FAST_ELEMENTS;
}
@ -4505,7 +4475,9 @@ class SharedFunctionInfo: public HeapObject {
// False if there are definitely no live objects created from this function.
// True if live objects _may_ exist (existence not guaranteed).
// May go back from true to false after GC.
DECL_BOOLEAN_ACCESSORS(live_objects_may_exist)
inline bool live_objects_may_exist();
inline void set_live_objects_may_exist(bool value);
// [instance class name]: class name for instances.
DECL_ACCESSORS(instance_class_name, Object)
@ -4596,7 +4568,8 @@ class SharedFunctionInfo: public HeapObject {
// Indicates if this function can be lazy compiled.
// This is used to determine if we can safely flush code from a function
// when doing GC if we expect that the function will no longer be used.
DECL_BOOLEAN_ACCESSORS(allows_lazy_compilation)
inline bool allows_lazy_compilation();
inline void set_allows_lazy_compilation(bool flag);
// Indicates how many full GCs this function has survived with assigned
// code object. Used to determine when it is relatively safe to flush
@ -4610,16 +4583,12 @@ class SharedFunctionInfo: public HeapObject {
// shared function info. If a function is repeatedly optimized or if
// we cannot optimize the function we disable optimization to avoid
// spending time attempting to optimize it again.
DECL_BOOLEAN_ACCESSORS(optimization_disabled)
inline bool optimization_disabled();
inline void set_optimization_disabled(bool value);
// Indicates whether the function is a strict mode function.
DECL_BOOLEAN_ACCESSORS(strict_mode)
// False if the function definitely does not allocate an arguments object.
DECL_BOOLEAN_ACCESSORS(uses_arguments)
// True if the function has any duplicated parameter names.
DECL_BOOLEAN_ACCESSORS(has_duplicate_parameters)
inline bool strict_mode();
inline void set_strict_mode(bool value);
// Indicates whether the function is a native function.
// These needs special threatment in .call and .apply since
@ -4811,21 +4780,15 @@ class SharedFunctionInfo: public HeapObject {
static const int kStartPositionMask = ~((1 << kStartPositionShift) - 1);
// Bit positions in compiler_hints.
static const int kCodeAgeSize = 3;
static const int kCodeAgeMask = (1 << kCodeAgeSize) - 1;
enum CompilerHints {
kHasOnlySimpleThisPropertyAssignments,
kAllowLazyCompilation,
kLiveObjectsMayExist,
kCodeAgeShift,
kOptimizationDisabled = kCodeAgeShift + kCodeAgeSize,
kStrictModeFunction,
kUsesArguments,
kHasDuplicateParameters,
kNative,
kBoundFunction
};
static const int kHasOnlySimpleThisPropertyAssignments = 0;
static const int kAllowLazyCompilation = 1;
static const int kLiveObjectsMayExist = 2;
static const int kCodeAgeShift = 3;
static const int kCodeAgeMask = 0x7;
static const int kOptimizationDisabled = 6;
static const int kStrictModeFunction = 7;
static const int kNative = 8;
static const int kBoundFunction = 9;
private:
#if V8_HOST_ARCH_32_BIT

View File

@ -657,7 +657,6 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source,
0,
0,
source->length(),
false,
false);
} else if (stack_overflow_) {
isolate()->StackOverflow();
@ -3550,7 +3549,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
int end_pos;
bool only_simple_this_property_assignments;
Handle<FixedArray> this_property_assignments;
bool has_duplicate_parameters = false;
// Parse function body.
{ LexicalScope lexical_scope(this, scope, isolate());
top_scope_->SetScopeName(name);
@ -3574,7 +3572,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
name_loc = scanner().location();
}
if (!dupe_loc.IsValid() && top_scope_->IsDeclared(param_name)) {
has_duplicate_parameters = true;
dupe_loc = scanner().location();
}
if (!reserved_loc.IsValid() && is_reserved) {
@ -3710,8 +3707,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
num_parameters,
start_pos,
end_pos,
(function_name->length() > 0),
has_duplicate_parameters);
(function_name->length() > 0));
function_literal->set_function_token_position(function_token_position);
if (fni_ != NULL && !is_named) fni_->AddFunction(function_literal);

View File

@ -1370,6 +1370,10 @@ void JsonAstBuilder::VisitThrow(Throw* expr) {
void JsonAstBuilder::VisitProperty(Property* expr) {
TagScope tag(this, "Property");
{
AttributesScope attributes(this);
AddAttribute("type", expr->is_synthetic() ? "SYNTHETIC" : "NORMAL");
}
Visit(expr->obj());
Visit(expr->key());
}

View File

@ -185,7 +185,7 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
// We are not prepared to do OSR for a function that already has an
// allocated arguments object. The optimized code would bypass it for
// arguments accesses, which is unsound. Don't try OSR.
if (shared->uses_arguments()) return;
if (shared->scope_info()->HasArgumentsShadow()) return;
// We're using on-stack replacement: patch the unoptimized code so that
// any back edge in any unoptimized frame will trigger on-stack

View File

@ -614,31 +614,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetHandler) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateCatchExtensionObject) {
ASSERT(args.length() == 2);
CONVERT_CHECKED(String, key, args[0]);
Object* value = args[1];
ASSERT(!value->IsFailure());
// Create a catch context extension object.
JSFunction* constructor =
isolate->context()->global_context()->
context_extension_function();
Object* object;
{ MaybeObject* maybe_object = isolate->heap()->AllocateJSObject(constructor);
if (!maybe_object->ToObject(&object)) return maybe_object;
}
// Assign the exception value to the catch variable and make sure
// that the catch variable is DontDelete.
{ MaybeObject* maybe_value =
// Passing non-strict per ECMA-262 5th Ed. 12.14. Catch, bullet #4.
JSObject::cast(object)->SetProperty(
key, value, DONT_DELETE, kNonStrictMode);
if (!maybe_value->ToObject(&value)) return maybe_value;
}
return object;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_ClassOf) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
@ -901,13 +876,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
ASSERT(proto->IsJSGlobalObject());
holder = Handle<JSObject>(JSObject::cast(proto));
}
FixedArray* elements = FixedArray::cast(holder->elements());
NumberDictionary* dictionary = NULL;
if (elements->map() == heap->non_strict_arguments_elements_map()) {
dictionary = NumberDictionary::cast(elements->get(1));
} else {
dictionary = NumberDictionary::cast(elements);
}
NumberDictionary* dictionary = holder->element_dictionary();
int entry = dictionary->FindEntry(index);
ASSERT(entry != NumberDictionary::kNotFound);
PropertyDetails details = dictionary->DetailsAt(entry);
@ -1263,6 +1232,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
// Declarations are always done in the function context.
context = Handle<Context>(context->fcontext());
ASSERT(context->IsFunctionContext() || context->IsGlobalContext());
int index;
PropertyAttributes attributes;
@ -1316,7 +1286,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
Handle<JSObject> context_ext;
if (context->has_extension()) {
// The function context's extension context exists - use it.
context_ext = Handle<JSObject>(context->extension());
context_ext = Handle<JSObject>(JSObject::cast(context->extension()));
} else {
// The function context's extension context does not exists - allocate
// it.
@ -3975,6 +3945,29 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) {
}
// Special case for elements if any of the flags are true.
// If elements are in fast case we always implicitly assume that:
// DONT_DELETE: false, DONT_ENUM: false, READ_ONLY: false.
static MaybeObject* NormalizeObjectSetElement(Isolate* isolate,
Handle<JSObject> js_object,
uint32_t index,
Handle<Object> value,
PropertyAttributes attr) {
// Normalize the elements to enable attributes on the property.
NormalizeElements(js_object);
Handle<NumberDictionary> dictionary(js_object->element_dictionary());
// Make sure that we never go back to fast case.
dictionary->set_requires_slow_elements();
PropertyDetails details = PropertyDetails(attr, NORMAL);
Handle<NumberDictionary> extended_dictionary =
NumberDictionarySet(dictionary, index, value, details);
if (*extended_dictionary != *dictionary) {
js_object->set_elements(*extended_dictionary);
}
return *value;
}
MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
Handle<Object> object,
Handle<Object> key,
@ -4010,6 +4003,10 @@ MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
return *value;
}
if (((attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0)) {
return NormalizeObjectSetElement(isolate, js_object, index, value, attr);
}
Handle<Object> result = SetElement(js_object, index, value, strict_mode);
if (result.is_null()) return Failure::Exception();
return *value;
@ -4018,6 +4015,13 @@ MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
if (key->IsString()) {
Handle<Object> result;
if (Handle<String>::cast(key)->AsArrayIndex(&index)) {
if (((attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0)) {
return NormalizeObjectSetElement(isolate,
js_object,
index,
value,
attr);
}
result = SetElement(js_object, index, value, strict_mode);
} else {
Handle<String> key_string = Handle<String>::cast(key);
@ -4035,7 +4039,7 @@ MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
Handle<String> name = Handle<String>::cast(converted);
if (name->AsArrayIndex(&index)) {
return js_object->SetElement(index, *value, strict_mode, true);
return js_object->SetElement(index, *value, strict_mode);
} else {
return js_object->SetProperty(*name, *value, attr, strict_mode);
}
@ -4063,12 +4067,12 @@ MaybeObject* Runtime::ForceSetObjectProperty(Isolate* isolate,
return *value;
}
return js_object->SetElement(index, *value, kNonStrictMode, true);
return js_object->SetElement(index, *value, kNonStrictMode);
}
if (key->IsString()) {
if (Handle<String>::cast(key)->AsArrayIndex(&index)) {
return js_object->SetElement(index, *value, kNonStrictMode, true);
return js_object->SetElement(index, *value, kNonStrictMode);
} else {
Handle<String> key_string = Handle<String>::cast(key);
key_string->TryFlatten();
@ -4085,7 +4089,7 @@ MaybeObject* Runtime::ForceSetObjectProperty(Isolate* isolate,
Handle<String> name = Handle<String>::cast(converted);
if (name->AsArrayIndex(&index)) {
return js_object->SetElement(index, *value, kNonStrictMode, true);
return js_object->SetElement(index, *value, kNonStrictMode);
} else {
return js_object->SetLocalPropertyIgnoreAttributes(*name, *value, attr);
}
@ -7339,103 +7343,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateYMDFromTime) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_NewArgumentsFast) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
Handle<JSFunction> callee = args.at<JSFunction>(0);
Object** parameters = reinterpret_cast<Object**>(args[1]);
const int argument_count = Smi::cast(args[2])->value();
Handle<JSObject> result =
isolate->factory()->NewArgumentsObject(callee, argument_count);
// Allocate the elements if needed.
int parameter_count = callee->shared()->formal_parameter_count();
if (argument_count > 0) {
if (parameter_count > 0) {
int mapped_count = Min(argument_count, parameter_count);
Handle<FixedArray> parameter_map =
isolate->factory()->NewFixedArray(mapped_count + 2, NOT_TENURED);
parameter_map->set_map(
isolate->heap()->non_strict_arguments_elements_map());
Handle<Map> old_map(result->map());
Handle<Map> new_map =
isolate->factory()->CopyMapDropTransitions(old_map);
new_map->set_elements_kind(JSObject::NON_STRICT_ARGUMENTS_ELEMENTS);
result->set_map(*new_map);
result->set_elements(*parameter_map);
// Store the context and the arguments array at the beginning of the
// parameter map.
Handle<Context> context(isolate->context());
Handle<FixedArray> arguments =
isolate->factory()->NewFixedArray(argument_count, NOT_TENURED);
parameter_map->set(0, *context);
parameter_map->set(1, *arguments);
// Loop over the actual parameters backwards.
int index = argument_count - 1;
while (index >= mapped_count) {
// These go directly in the arguments array and have no
// corresponding slot in the parameter map.
arguments->set(index, *(parameters - index - 1));
--index;
}
ScopeInfo<> scope_info(callee->shared()->scope_info());
while (index >= 0) {
// Detect duplicate names to the right in the parameter list.
Handle<String> name = scope_info.parameter_name(index);
int context_slot_count = scope_info.number_of_context_slots();
bool duplicate = false;
for (int j = index + 1; j < parameter_count; ++j) {
if (scope_info.parameter_name(j).is_identical_to(name)) {
duplicate = true;
break;
}
}
if (duplicate) {
// This goes directly in the arguments array with a hole in the
// parameter map.
arguments->set(index, *(parameters - index - 1));
parameter_map->set_the_hole(index + 2);
} else {
// The context index goes in the parameter map with a hole in the
// arguments array.
int context_index = -1;
for (int j = Context::MIN_CONTEXT_SLOTS;
j < context_slot_count;
++j) {
if (scope_info.context_slot_name(j).is_identical_to(name)) {
context_index = j;
break;
}
}
ASSERT(context_index >= 0);
arguments->set_the_hole(index);
parameter_map->set(index + 2, Smi::FromInt(context_index));
}
--index;
}
} else {
// If there is no aliasing, the arguments object elements are not
// special in any way.
Handle<FixedArray> elements =
isolate->factory()->NewFixedArray(argument_count, NOT_TENURED);
result->set_elements(*elements);
for (int i = 0; i < argument_count; ++i) {
elements->set(i, *(parameters - i - 1));
}
}
}
return *result;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_NewStrictArgumentsFast) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
@ -7877,7 +7784,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
CONVERT_ARG_CHECKED(JSFunction, function, 0);
// We're not prepared to handle a function with arguments object.
ASSERT(!function->shared()->uses_arguments());
ASSERT(!function->shared()->scope_info()->HasArgumentsShadow());
// We have hit a back edge in an unoptimized frame for a function that was
// selected for on-stack replacement. Find the unoptimized code object.
@ -8054,12 +7961,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushWithContext) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_PushCatchContext) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
JSObject* extension_object = JSObject::cast(args[0]);
ASSERT(args.length() == 2);
String* name = String::cast(args[0]);
Object* thrown_object = args[1];
Context* context;
MaybeObject* maybe_context =
isolate->heap()->AllocateCatchContext(isolate->context(),
extension_object);
name,
thrown_object);
if (!maybe_context->To(&context)) return maybe_context;
isolate->set_context(context);
return context;
@ -8840,8 +8749,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushIfAbsent) {
}
Object* obj;
// Strict not needed. Used for cycle detection in Array join implementation.
{ MaybeObject* maybe_obj =
array->SetFastElement(length, element, kNonStrictMode, true);
{ MaybeObject* maybe_obj = array->SetFastElement(length, element,
kNonStrictMode);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
return isolate->heap()->true_value();
@ -10152,14 +10061,18 @@ static bool CopyContextLocalsToScopeObject(
int context_index = serialized_scope_info->ContextSlotIndex(
*scope_info.context_slot_name(i), NULL);
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate,
SetProperty(scope_object,
scope_info.context_slot_name(i),
Handle<Object>(context->get(context_index), isolate),
NONE,
kNonStrictMode),
false);
// Don't include the arguments shadow (.arguments) context variable.
if (*scope_info.context_slot_name(i) !=
isolate->heap()->arguments_shadow_symbol()) {
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate,
SetProperty(scope_object,
scope_info.context_slot_name(i),
Handle<Object>(context->get(context_index), isolate),
NONE,
kNonStrictMode),
false);
}
}
return true;
@ -10254,6 +10167,29 @@ static Handle<JSObject> MaterializeClosure(Isolate* isolate,
Handle<JSObject> closure_scope =
isolate->factory()->NewJSObject(isolate->object_function());
// Check whether the arguments shadow object exists.
int arguments_shadow_index =
shared->scope_info()->ContextSlotIndex(
isolate->heap()->arguments_shadow_symbol(), NULL);
if (arguments_shadow_index >= 0) {
// In this case all the arguments are available in the arguments shadow
// object.
Handle<JSObject> arguments_shadow(
JSObject::cast(context->get(arguments_shadow_index)));
for (int i = 0; i < scope_info.number_of_parameters(); ++i) {
// We don't expect exception-throwing getters on the arguments shadow.
Object* element = arguments_shadow->GetElement(i)->ToObjectUnchecked();
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate,
SetProperty(closure_scope,
scope_info.parameter_name(i),
Handle<Object>(element, isolate),
NONE,
kNonStrictMode),
Handle<JSObject>());
}
}
// Fill all context locals to the context extension.
if (!CopyContextLocalsToScopeObject(isolate,
serialized_scope_info, scope_info,
@ -10285,6 +10221,23 @@ static Handle<JSObject> MaterializeClosure(Isolate* isolate,
}
// Create a plain JSObject which materializes the scope for the specified
// catch context.
static Handle<JSObject> MaterializeCatchScope(Isolate* isolate,
Handle<Context> context) {
ASSERT(context->IsCatchContext());
Handle<String> name(String::cast(context->extension()));
Handle<Object> thrown_object(context->get(Context::THROWN_OBJECT_INDEX));
Handle<JSObject> catch_scope =
isolate->factory()->NewJSObject(isolate->object_function());
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate,
SetProperty(catch_scope, name, thrown_object, NONE, kNonStrictMode),
Handle<JSObject>());
return catch_scope;
}
// Iterate over the actual scopes visible from a stack frame. All scopes are
// backed by an actual context except the local scope, which is inserted
// "artifically" in the context chain.
@ -10295,10 +10248,6 @@ class ScopeIterator {
ScopeTypeLocal,
ScopeTypeWith,
ScopeTypeClosure,
// Every catch block contains an implicit with block (its parameter is
// a JSContextExtensionObject) that extends current scope with a variable
// holding exception object. Such with blocks are treated as scopes of their
// own type.
ScopeTypeCatch
};
@ -10322,8 +10271,8 @@ class ScopeIterator {
} else if (context_->IsFunctionContext()) {
at_local_ = true;
} else if (context_->closure() != *function_) {
// The context_ is a with block from the outer function.
ASSERT(context_->has_extension());
// The context_ is a with or catch block from the outer function.
ASSERT(context_->IsWithContext() || context_->IsCatchContext());
at_local_ = true;
}
}
@ -10375,15 +10324,10 @@ class ScopeIterator {
if (context_->IsFunctionContext()) {
return ScopeTypeClosure;
}
ASSERT(context_->has_extension());
// Current scope is either an explicit with statement or a with statement
// implicitely generated for a catch block.
// If the extension object here is a JSContextExtensionObject then
// current with statement is one frome a catch block otherwise it's a
// regular with statement.
if (context_->extension()->IsJSContextExtensionObject()) {
if (context_->IsCatchContext()) {
return ScopeTypeCatch;
}
ASSERT(context_->IsWithContext());
return ScopeTypeWith;
}
@ -10392,20 +10336,17 @@ class ScopeIterator {
switch (Type()) {
case ScopeIterator::ScopeTypeGlobal:
return Handle<JSObject>(CurrentContext()->global());
break;
case ScopeIterator::ScopeTypeLocal:
// Materialize the content of the local scope into a JSObject.
return MaterializeLocalScope(isolate_, frame_);
break;
case ScopeIterator::ScopeTypeWith:
case ScopeIterator::ScopeTypeCatch:
// Return the with object.
return Handle<JSObject>(CurrentContext()->extension());
break;
return Handle<JSObject>(JSObject::cast(CurrentContext()->extension()));
case ScopeIterator::ScopeTypeCatch:
return MaterializeCatchScope(isolate_, CurrentContext());
case ScopeIterator::ScopeTypeClosure:
// Materialize the content of the closure scope into a JSObject.
return MaterializeClosure(isolate_, CurrentContext());
break;
}
UNREACHABLE();
return Handle<JSObject>();
@ -10436,8 +10377,7 @@ class ScopeIterator {
if (!CurrentContext().is_null()) {
CurrentContext()->Print();
if (CurrentContext()->has_extension()) {
Handle<JSObject> extension =
Handle<JSObject>(CurrentContext()->extension());
Handle<Object> extension(CurrentContext()->extension());
if (extension->IsJSContextExtensionObject()) {
extension->Print();
}
@ -10446,34 +10386,27 @@ class ScopeIterator {
break;
}
case ScopeIterator::ScopeTypeWith: {
case ScopeIterator::ScopeTypeWith:
PrintF("With:\n");
Handle<JSObject> extension =
Handle<JSObject>(CurrentContext()->extension());
extension->Print();
CurrentContext()->extension()->Print();
break;
}
case ScopeIterator::ScopeTypeCatch: {
case ScopeIterator::ScopeTypeCatch:
PrintF("Catch:\n");
Handle<JSObject> extension =
Handle<JSObject>(CurrentContext()->extension());
extension->Print();
CurrentContext()->extension()->Print();
CurrentContext()->get(Context::THROWN_OBJECT_INDEX)->Print();
break;
}
case ScopeIterator::ScopeTypeClosure: {
case ScopeIterator::ScopeTypeClosure:
PrintF("Closure:\n");
CurrentContext()->Print();
if (CurrentContext()->has_extension()) {
Handle<JSObject> extension =
Handle<JSObject>(CurrentContext()->extension());
Handle<Object> extension(CurrentContext()->extension());
if (extension->IsJSContextExtensionObject()) {
extension->Print();
}
}
break;
}
default:
UNREACHABLE();
@ -10952,10 +10885,17 @@ static Handle<Context> CopyWithContextChain(Isolate* isolate,
HandleScope scope(isolate);
Handle<Context> previous(current->previous());
Handle<Context> new_previous = CopyWithContextChain(isolate, previous, base);
Handle<JSObject> extension(JSObject::cast(current->extension()));
Handle<Context> new_current = current->IsCatchContext()
? isolate->factory()->NewCatchContext(new_previous, extension)
: isolate->factory()->NewWithContext(new_previous, extension);
Handle<Context> new_current;
if (current->IsCatchContext()) {
Handle<String> name(String::cast(current->extension()));
Handle<Object> thrown_object(current->get(Context::THROWN_OBJECT_INDEX));
new_current =
isolate->factory()->NewCatchContext(new_previous, name, thrown_object);
} else {
Handle<JSObject> extension(JSObject::cast(current->extension()));
new_current =
isolate->factory()->NewWithContext(new_previous, extension);
}
return scope.CloseAndEscape(new_current);
}

View File

@ -82,7 +82,6 @@ namespace internal {
F(GetFunctionDelegate, 1, 1) \
F(GetConstructorDelegate, 1, 1) \
F(NewArgumentsFast, 3, 1) \
F(NewStrictArgumentsFast, 3, 1) \
F(LazyCompile, 1, 1) \
F(LazyRecompile, 1, 1) \
F(NotifyDeoptimized, 1, 1) \
@ -285,9 +284,6 @@ namespace internal {
F(IsJSProxy, 1, 1) \
F(GetHandler, 1, 1) \
\
/* Catch context extension objects */ \
F(CreateCatchExtensionObject, 2, 1) \
\
/* Statements */ \
F(NewClosure, 3, 1) \
F(NewObject, 1, 1) \
@ -302,7 +298,7 @@ namespace internal {
/* Contexts */ \
F(NewFunctionContext, 1, 1) \
F(PushWithContext, 1, 1) \
F(PushCatchContext, 1, 1) \
F(PushCatchContext, 2, 1) \
F(DeleteContextSlot, 2, 1) \
F(LoadContextSlot, 2, 2) \
F(LoadContextSlotNoReferenceError, 2, 2) \

View File

@ -117,6 +117,11 @@ class SerializedScopeInfo : public FixedArray {
// Is this scope a strict mode scope?
bool IsStrictMode();
// Does this scope have an arguments shadow?
bool HasArgumentsShadow() {
return StackSlotIndex(GetHeap()->arguments_shadow_symbol()) >= 0;
}
// Return the number of stack slots for code.
int NumberOfStackSlots();

View File

@ -157,6 +157,27 @@ Scope::Scope(Scope* inner_scope, Handle<SerializedScopeInfo> scope_info)
}
AddInnerScope(inner_scope);
// This scope's arguments shadow (if present) is context-allocated if an inner
// scope accesses this one's parameters. Allocate the arguments_shadow_
// variable if necessary.
Isolate* isolate = Isolate::Current();
Variable::Mode mode;
int arguments_shadow_index =
scope_info_->ContextSlotIndex(
isolate->heap()->arguments_shadow_symbol(), &mode);
if (arguments_shadow_index >= 0) {
ASSERT(mode == Variable::INTERNAL);
arguments_shadow_ = new Variable(
this,
isolate->factory()->arguments_shadow_symbol(),
Variable::INTERNAL,
true,
Variable::ARGUMENTS);
arguments_shadow_->set_rewrite(
new Slot(arguments_shadow_, Slot::CONTEXT, arguments_shadow_index));
arguments_shadow_->set_is_used(true);
}
}
@ -170,6 +191,7 @@ void Scope::SetDefaults(Type type,
receiver_ = NULL;
function_ = NULL;
arguments_ = NULL;
arguments_shadow_ = NULL;
illegal_redecl_ = NULL;
scope_inside_with_ = false;
scope_contains_with_ = false;
@ -277,33 +299,52 @@ Variable* Scope::LocalLookup(Handle<String> name) {
if (result != NULL || !resolved()) {
return result;
}
// If the scope is resolved, we can find a variable in serialized scope
// info.
//
// We should never lookup 'arguments' in this scope as it is implicitly
// present in every scope.
// If the scope is resolved, we can find a variable in serialized scope info.
// We should never lookup 'arguments' in this scope
// as it is implicitly present in any scope.
ASSERT(*name != *FACTORY->arguments_symbol());
// There should be no local slot with the given name.
// Assert that there is no local slot with the given name.
ASSERT(scope_info_->StackSlotIndex(*name) < 0);
// Check context slot lookup.
Variable::Mode mode;
int index = scope_info_->ContextSlotIndex(*name, &mode);
if (index < 0) {
// Check parameters.
mode = Variable::VAR;
index = scope_info_->ParameterIndex(*name);
if (index < 0) {
// Check the function name.
index = scope_info_->FunctionContextSlotIndex(*name);
if (index < 0) return NULL;
}
if (index >= 0) {
Variable* var =
variables_.Declare(this, name, mode, true, Variable::NORMAL);
var->set_rewrite(new Slot(var, Slot::CONTEXT, index));
return var;
}
Variable* var =
variables_.Declare(this, name, mode, true, Variable::NORMAL);
var->set_rewrite(new Slot(var, Slot::CONTEXT, index));
return var;
index = scope_info_->ParameterIndex(*name);
if (index >= 0) {
// ".arguments" must be present in context slots.
ASSERT(arguments_shadow_ != NULL);
Variable* var =
variables_.Declare(this, name, Variable::VAR, true, Variable::NORMAL);
Property* rewrite =
new Property(new VariableProxy(arguments_shadow_),
new Literal(Handle<Object>(Smi::FromInt(index))),
RelocInfo::kNoPosition,
Property::SYNTHETIC);
rewrite->set_is_arguments_access(true);
var->set_rewrite(rewrite);
return var;
}
index = scope_info_->FunctionContextSlotIndex(*name);
if (index >= 0) {
// Check that there is no local slot with the given name.
ASSERT(scope_info_->StackSlotIndex(*name) < 0);
Variable* var =
variables_.Declare(this, name, Variable::VAR, true, Variable::NORMAL);
var->set_rewrite(new Slot(var, Slot::CONTEXT, index));
return var;
}
return NULL;
}
@ -911,17 +952,36 @@ void Scope::AllocateParameterLocals() {
Variable* arguments = LocalLookup(FACTORY->arguments_symbol());
ASSERT(arguments != NULL); // functions have 'arguments' declared implicitly
bool uses_nonstrict_arguments = false;
// Parameters are rewritten to arguments[i] if 'arguments' is used in
// a non-strict mode function. Strict mode code doesn't alias arguments.
bool rewrite_parameters = false;
if (MustAllocate(arguments) && !HasArgumentsParameter()) {
// 'arguments' is used. Unless there is also a parameter called
// 'arguments', we must be conservative and allocate all parameters to
// the context assuming they will be captured by the arguments object.
// If we have a parameter named 'arguments', a (new) value is always
// assigned to it via the function invocation. Then 'arguments' denotes
// that specific parameter value and cannot be used to access the
// parameters, which is why we don't need to allocate an arguments
// object in that case.
// 'arguments', we must be conservative and access all parameters via
// the arguments object: The i'th parameter is rewritten into
// '.arguments[i]' (*). If we have a parameter named 'arguments', a
// (new) value is always assigned to it via the function
// invocation. Then 'arguments' denotes that specific parameter value
// and cannot be used to access the parameters, which is why we don't
// need to rewrite in that case.
//
// (*) Instead of having a parameter called 'arguments', we may have an
// assignment to 'arguments' in the function body, at some arbitrary
// point in time (possibly through an 'eval()' call!). After that
// assignment any re-write of parameters would be invalid (was bug
// 881452). Thus, we introduce a shadow '.arguments'
// variable which also points to the arguments object. For rewrites we
// use '.arguments' which remains valid even if we assign to
// 'arguments'. To summarize: If we need to rewrite, we allocate an
// 'arguments' object dynamically upon function invocation. The compiler
// introduces 2 local variables 'arguments' and '.arguments', both of
// which originally point to the arguments object that was
// allocated. All parameters are rewritten into property accesses via
// the '.arguments' variable. Thus, any changes to properties of
// 'arguments' are reflected in the variables and vice versa. If the
// 'arguments' variable is changed, '.arguments' still points to the
// correct arguments object and the rewrites still work.
// We are using 'arguments'. Tell the code generator that is needs to
// allocate the arguments object by setting 'arguments_'.
@ -930,31 +990,75 @@ void Scope::AllocateParameterLocals() {
// In strict mode 'arguments' does not alias formal parameters.
// Therefore in strict mode we allocate parameters as if 'arguments'
// were not used.
uses_nonstrict_arguments = !is_strict_mode();
rewrite_parameters = !is_strict_mode();
}
// The same parameter may occur multiple times in the parameters_ list.
// If it does, and if it is not copied into the context object, it must
// receive the highest parameter index for that parameter; thus iteration
// order is relevant!
for (int i = params_.length() - 1; i >= 0; --i) {
Variable* var = params_[i];
ASSERT(var->scope() == this);
if (uses_nonstrict_arguments) {
// Give the parameter a use from an inner scope, to force allocation
// to the context.
var->MarkAsAccessedFromInnerScope();
if (rewrite_parameters) {
// We also need the '.arguments' shadow variable. Declare it and create
// and bind the corresponding proxy. It's ok to declare it only now
// because it's a local variable that is allocated after the parameters
// have been allocated.
//
// Note: This is "almost" at temporary variable but we cannot use
// NewTemporary() because the mode needs to be INTERNAL since this
// variable may be allocated in the heap-allocated context (temporaries
// are never allocated in the context).
arguments_shadow_ = new Variable(this,
FACTORY->arguments_shadow_symbol(),
Variable::INTERNAL,
true,
Variable::ARGUMENTS);
arguments_shadow_->set_is_used(true);
temps_.Add(arguments_shadow_);
// Allocate the parameters by rewriting them into '.arguments[i]' accesses.
for (int i = 0; i < params_.length(); i++) {
Variable* var = params_[i];
ASSERT(var->scope() == this);
if (MustAllocate(var)) {
if (MustAllocateInContext(var)) {
// It is ok to set this only now, because arguments is a local
// variable that is allocated after the parameters have been
// allocated.
arguments_shadow_->MarkAsAccessedFromInnerScope();
}
Property* rewrite =
new Property(new VariableProxy(arguments_shadow_),
new Literal(Handle<Object>(Smi::FromInt(i))),
RelocInfo::kNoPosition,
Property::SYNTHETIC);
rewrite->set_is_arguments_access(true);
var->set_rewrite(rewrite);
}
}
if (MustAllocate(var)) {
if (MustAllocateInContext(var)) {
ASSERT(var->rewrite() == NULL || var->IsContextSlot());
if (var->rewrite() == NULL) {
AllocateHeapSlot(var);
}
} else {
ASSERT(var->rewrite() == NULL || var->IsParameter());
if (var->rewrite() == NULL) {
} else {
// The arguments object is not used, so we can access parameters directly.
// The same parameter may occur multiple times in the parameters_ list.
// If it does, and if it is not copied into the context object, it must
// receive the highest parameter index for that parameter; thus iteration
// order is relevant!
for (int i = 0; i < params_.length(); i++) {
Variable* var = params_[i];
ASSERT(var->scope() == this);
if (MustAllocate(var)) {
if (MustAllocateInContext(var)) {
ASSERT(var->rewrite() == NULL ||
(var->AsSlot() != NULL &&
var->AsSlot()->type() == Slot::CONTEXT));
if (var->rewrite() == NULL) {
// Only set the heap allocation if the parameter has not
// been allocated yet.
AllocateHeapSlot(var);
}
} else {
ASSERT(var->rewrite() == NULL ||
(var->AsSlot() != NULL &&
var->AsSlot()->type() == Slot::PARAMETER));
// Set the parameter index always, even if the parameter
// was seen before! (We need to access the actual parameter
// supplied for the last occurrence of a multiply declared
// parameter.)
var->set_rewrite(new Slot(var, Slot::PARAMETER, i));
}
}
@ -966,9 +1070,8 @@ void Scope::AllocateParameterLocals() {
void Scope::AllocateNonParameterLocal(Variable* var) {
ASSERT(var->scope() == this);
ASSERT(var->rewrite() == NULL ||
!var->IsVariable(FACTORY->result_symbol()) ||
var->AsSlot() == NULL ||
var->AsSlot()->type() != Slot::LOCAL);
(!var->IsVariable(FACTORY->result_symbol())) ||
(var->AsSlot() == NULL || var->AsSlot()->type() != Slot::LOCAL));
if (var->rewrite() == NULL && MustAllocate(var)) {
if (MustAllocateInContext(var)) {
AllocateHeapSlot(var);

View File

@ -251,8 +251,13 @@ class Scope: public ZoneObject {
int num_parameters() const { return params_.length(); }
// The local variable 'arguments' if we need to allocate it; NULL otherwise.
// If arguments() exist, arguments_shadow() exists, too.
Variable* arguments() const { return arguments_; }
// The '.arguments' shadow variable if we need to allocate it; NULL otherwise.
// If arguments_shadow() exist, arguments() exists, too.
Variable* arguments_shadow() const { return arguments_shadow_; }
// Declarations list.
ZoneList<Declaration*>* declarations() { return &decls_; }
@ -350,6 +355,8 @@ class Scope: public ZoneObject {
Variable* function_;
// Convenience variable; function scopes only.
Variable* arguments_;
// Convenience variable; function scopes only.
Variable* arguments_shadow_;
// Illegal redeclaration.
Expression* illegal_redecl_;

View File

@ -1040,26 +1040,6 @@ MaybeObject* StubCache::ComputeCallNormal(int argc,
}
MaybeObject* StubCache::ComputeCallArguments(int argc,
InLoopFlag in_loop,
Code::Kind kind) {
ASSERT(kind == Code::KEYED_CALL_IC);
Code::Flags flags = Code::ComputeFlags(kind,
in_loop,
MEGAMORPHIC,
Code::kNoExtraICState,
NORMAL,
argc);
Object* probe;
{ MaybeObject* maybe_probe = ProbeCache(isolate_, flags);
if (!maybe_probe->ToObject(&probe)) return maybe_probe;
}
if (!probe->IsUndefined()) return probe;
StubCompiler compiler;
return FillCache(isolate_, compiler.CompileCallArguments(flags));
}
MaybeObject* StubCache::ComputeCallMegamorphic(
int argc,
InLoopFlag in_loop,
@ -1515,26 +1495,6 @@ MaybeObject* StubCompiler::CompileCallMegamorphic(Code::Flags flags) {
}
MaybeObject* StubCompiler::CompileCallArguments(Code::Flags flags) {
HandleScope scope(isolate());
int argc = Code::ExtractArgumentsCountFromFlags(flags);
KeyedCallIC::GenerateNonStrictArguments(masm(), argc);
Code::Kind kind = Code::ExtractKindFromFlags(flags);
Object* result;
{ MaybeObject* maybe_result =
GetCodeWithFlags(flags, "CompileCallArguments");
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Code* code = Code::cast(result);
USE(code);
PROFILE(isolate(),
CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_MEGAMORPHIC_TAG),
code, code->arguments_count()));
GDBJIT(AddCode(GDBJITInterface::CALL_MEGAMORPHIC, Code::cast(code)));
return result;
}
MaybeObject* StubCompiler::CompileCallMiss(Code::Flags flags) {
HandleScope scope(isolate());
int argc = Code::ExtractArgumentsCountFromFlags(flags);

View File

@ -261,10 +261,6 @@ class StubCache {
Code::Kind kind,
Code::ExtraICState state);
MUST_USE_RESULT MaybeObject* ComputeCallArguments(int argc,
InLoopFlag in_loop,
Code::Kind kind);
MUST_USE_RESULT MaybeObject* ComputeCallMegamorphic(int argc,
InLoopFlag in_loop,
Code::Kind kind,
@ -429,7 +425,6 @@ class StubCompiler BASE_EMBEDDED {
MUST_USE_RESULT MaybeObject* CompileCallPreMonomorphic(Code::Flags flags);
MUST_USE_RESULT MaybeObject* CompileCallNormal(Code::Flags flags);
MUST_USE_RESULT MaybeObject* CompileCallMegamorphic(Code::Flags flags);
MUST_USE_RESULT MaybeObject* CompileCallArguments(Code::Flags flags);
MUST_USE_RESULT MaybeObject* CompileCallMiss(Code::Flags flags);
#ifdef ENABLE_DEBUGGER_SUPPORT
MUST_USE_RESULT MaybeObject* CompileCallDebugBreak(Code::Flags flags);

View File

@ -74,26 +74,28 @@ Handle<Object> TypeFeedbackOracle::GetInfo(unsigned ast_id) {
}
bool TypeFeedbackOracle::LoadIsMonomorphic(Property* expr) {
bool TypeFeedbackOracle::LoadIsMonomorphicNormal(Property* expr) {
Handle<Object> map_or_code(GetInfo(expr->id()));
if (map_or_code->IsMap()) return true;
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
return code->is_keyed_load_stub() &&
code->ic_state() == MONOMORPHIC &&
Code::ExtractTypeFromFlags(code->flags()) == NORMAL &&
code->FindFirstMap() != NULL;
}
return false;
}
bool TypeFeedbackOracle::StoreIsMonomorphic(Expression* expr) {
bool TypeFeedbackOracle::StoreIsMonomorphicNormal(Expression* expr) {
Handle<Object> map_or_code(GetInfo(expr->id()));
if (map_or_code->IsMap()) return true;
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
return code->is_keyed_store_stub() &&
code->ic_state() == MONOMORPHIC;
code->ic_state() == MONOMORPHIC &&
Code::ExtractTypeFromFlags(code->flags()) == NORMAL;
}
return false;
}
@ -106,7 +108,7 @@ bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) {
Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
ASSERT(LoadIsMonomorphic(expr));
ASSERT(LoadIsMonomorphicNormal(expr));
Handle<Object> map_or_code(GetInfo(expr->id()));
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
@ -119,7 +121,7 @@ Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
Handle<Map> TypeFeedbackOracle::StoreMonomorphicReceiverType(Expression* expr) {
ASSERT(StoreIsMonomorphic(expr));
ASSERT(StoreIsMonomorphicNormal(expr));
Handle<Object> map_or_code(GetInfo(expr->id()));
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);

View File

@ -215,8 +215,8 @@ class TypeFeedbackOracle BASE_EMBEDDED {
public:
TypeFeedbackOracle(Handle<Code> code, Handle<Context> global_context);
bool LoadIsMonomorphic(Property* expr);
bool StoreIsMonomorphic(Expression* expr);
bool LoadIsMonomorphicNormal(Property* expr);
bool StoreIsMonomorphicNormal(Expression* expr);
bool CallIsMonomorphic(Call* expr);
Handle<Map> LoadMonomorphicReceiverType(Property* expr);

View File

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -35,13 +35,10 @@
#if defined(GOOGLE3)
// Google3 special flag handling.
#if defined(DEBUG) && defined(NDEBUG)
// If both are defined in Google3, then we are building an optimized v8 with
// assertions enabled.
// V8 only uses DEBUG and whenever it is set we are building a debug
// version of V8. We do not use NDEBUG and simply undef it here for
// consistency.
#undef NDEBUG
#elif !defined(DEBUG) && !defined(NDEBUG)
// If neither is defined in Google3, then we are building a debug v8. Mark it
// as such.
#define DEBUG
#endif
#endif // defined(GOOGLE3)

View File

@ -629,7 +629,7 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
// Step 3
if (IS_UNDEFINED(current) && !extensible) {
if (should_throw) {
throw MakeTypeError("define_disallowed", ["defineProperty"]);
throw MakeTypeError("define_disallowed", [p]);
} else {
return;
}
@ -659,7 +659,7 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
(desc.hasEnumerable() &&
desc.isEnumerable() != current.isEnumerable())) {
if (should_throw) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
throw MakeTypeError("redefine_disallowed", [p]);
} else {
return;
}
@ -669,7 +669,7 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
// Step 9a
if (IsDataDescriptor(current) != IsDataDescriptor(desc)) {
if (should_throw) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
throw MakeTypeError("redefine_disallowed", [p]);
} else {
return;
}
@ -678,7 +678,7 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
if (IsDataDescriptor(current) && IsDataDescriptor(desc)) {
if (!current.isWritable() && desc.isWritable()) {
if (should_throw) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
throw MakeTypeError("redefine_disallowed", [p]);
} else {
return;
}
@ -686,7 +686,7 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
if (!current.isWritable() && desc.hasValue() &&
!SameValue(desc.getValue(), current.getValue())) {
if (should_throw) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
throw MakeTypeError("redefine_disallowed", [p]);
} else {
return;
}
@ -696,14 +696,14 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
if (IsAccessorDescriptor(desc) && IsAccessorDescriptor(current)) {
if (desc.hasSetter() && !SameValue(desc.getSet(), current.getSet())) {
if (should_throw) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
throw MakeTypeError("redefine_disallowed", [p]);
} else {
return;
}
}
if (desc.hasGetter() && !SameValue(desc.getGet(),current.getGet())) {
if (should_throw) {
throw MakeTypeError("redefine_disallowed", ["defineProperty"]);
throw MakeTypeError("redefine_disallowed", [p]);
} else {
return;
}
@ -1016,7 +1016,7 @@ function ObjectIsFrozen(obj) {
// ES5 section 15.2.3.13
function ObjectIsExtensible(obj) {
if (!IS_SPEC_OBJECT(obj)) {
throw MakeTypeError("obj_ctor_property_non_object", ["preventExtension"]);
throw MakeTypeError("obj_ctor_property_non_object", ["isExtensible"]);
}
return %IsExtensible(obj);
}

View File

@ -57,26 +57,32 @@ Property* Variable::AsProperty() const {
}
Slot* Variable::AsSlot() const { return rewrite_; }
Slot* Variable::AsSlot() const {
return rewrite_ == NULL ? NULL : rewrite_->AsSlot();
}
bool Variable::IsStackAllocated() const {
return rewrite_ != NULL && rewrite_->IsStackAllocated();
Slot* slot = AsSlot();
return slot != NULL && slot->IsStackAllocated();
}
bool Variable::IsParameter() const {
return rewrite_ != NULL && rewrite_->type() == Slot::PARAMETER;
Slot* s = AsSlot();
return s != NULL && s->type() == Slot::PARAMETER;
}
bool Variable::IsStackLocal() const {
return rewrite_ != NULL && rewrite_->type() == Slot::LOCAL;
Slot* s = AsSlot();
return s != NULL && s->type() == Slot::LOCAL;
}
bool Variable::IsContextSlot() const {
return rewrite_ != NULL && rewrite_->type() == Slot::CONTEXT;
Slot* s = AsSlot();
return s != NULL && s->type() == Slot::CONTEXT;
}

View File

@ -81,7 +81,7 @@ class Variable: public ZoneObject {
// Printing support
static const char* Mode2String(Mode mode);
// Type testing & conversion. Global variables are not slots.
// Type testing & conversion
Property* AsProperty() const;
Slot* AsSlot() const;
@ -138,8 +138,8 @@ class Variable: public ZoneObject {
local_if_not_shadowed_ = local;
}
Slot* rewrite() const { return rewrite_; }
void set_rewrite(Slot* slot) { rewrite_ = slot; }
Expression* rewrite() const { return rewrite_; }
void set_rewrite(Expression* expr) { rewrite_ = expr; }
private:
Scope* scope_;
@ -150,7 +150,8 @@ class Variable: public ZoneObject {
Variable* local_if_not_shadowed_;
// Code generation.
Slot* rewrite_;
// rewrite_ is usually a Slot or a Property, but may be any expression.
Expression* rewrite_;
// Valid as a LHS? (const and this are not valid LHS, for example)
bool is_valid_LHS_;

View File

@ -1945,288 +1945,49 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
}
void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
// Stack layout:
// rsp[0] : return address
// rsp[8] : number of parameters (tagged)
// rsp[16] : receiver displacement
// rsp[24] : function
// Registers used over the whole function:
// rbx: the mapped parameter count (untagged)
// rax: the allocated object (tagged).
Factory* factory = masm->isolate()->factory();
__ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize));
// rbx = parameter count (untagged)
// Check if the calling frame is an arguments adaptor frame.
Label runtime;
Label adaptor_frame, try_allocate;
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
__ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adaptor_frame);
// No adaptor, parameter count = argument count.
__ movq(rcx, rbx);
__ jmp(&try_allocate, Label::kNear);
// We have an adaptor frame. Patch the parameters pointer.
__ bind(&adaptor_frame);
__ SmiToInteger64(rcx,
Operand(rdx,
ArgumentsAdaptorFrameConstants::kLengthOffset));
__ lea(rdx, Operand(rdx, rcx, times_pointer_size,
StandardFrameConstants::kCallerSPOffset));
__ movq(Operand(rsp, 2 * kPointerSize), rdx);
// rbx = parameter count (untagged)
// rcx = argument count (untagged)
// Compute the mapped parameter count = min(rbx, rcx) in rbx.
__ cmpq(rbx, rcx);
__ j(less_equal, &try_allocate, Label::kNear);
__ movq(rbx, rcx);
__ bind(&try_allocate);
// Compute the sizes of backing store, parameter map, and arguments object.
// 1. Parameter map, has 2 extra words containing context and backing store.
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
Label no_parameter_map;
__ testq(rbx, rbx);
__ j(zero, &no_parameter_map, Label::kNear);
__ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
__ bind(&no_parameter_map);
// 2. Backing store.
__ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
// 3. Arguments object.
__ addq(r8, Immediate(Heap::kArgumentsObjectSize));
// Do the allocation of all three objects in one go.
__ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
// rax = address of new object(s) (tagged)
// rcx = argument count (untagged)
// Get the arguments boilerplate from the current (global) context into rdi.
Label has_mapped_parameters, copy;
__ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
__ testq(rbx, rbx);
__ j(not_zero, &has_mapped_parameters, Label::kNear);
const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
__ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
__ jmp(&copy, Label::kNear);
const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
__ bind(&has_mapped_parameters);
__ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
__ bind(&copy);
// rax = address of new object (tagged)
// rbx = mapped parameter count (untagged)
// rcx = argument count (untagged)
// rdi = address of boilerplate object (tagged)
// Copy the JS object part.
for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
__ movq(rdx, FieldOperand(rdi, i));
__ movq(FieldOperand(rax, i), rdx);
}
// Setup the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ movq(rdx, Operand(rsp, 3 * kPointerSize));
__ movq(FieldOperand(rax, JSObject::kHeaderSize +
Heap::kArgumentsCalleeIndex * kPointerSize),
rdx);
// Use the length (smi tagged) and set that as an in-object property too.
// Note: rcx is tagged from here on.
STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
__ Integer32ToSmi(rcx, rcx);
__ movq(FieldOperand(rax, JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize),
rcx);
// Setup the elements pointer in the allocated arguments object.
// If we allocated a parameter map, edi will point there, otherwise to the
// backing store.
__ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
__ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
// rax = address of new object (tagged)
// rbx = mapped parameter count (untagged)
// rcx = argument count (tagged)
// rdi = address of parameter map or backing store (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ testq(rbx, rbx);
__ j(zero, &skip_parameter_map);
__ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex);
// rbx contains the untagged argument count. Add 2 and tag to write.
__ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
__ Integer64PlusConstantToSmi(r9, rbx, 2);
__ movq(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
__ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
__ lea(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
__ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
// Copy the parameter slots and the holes in the arguments.
// We need to fill in mapped_parameter_count slots. They index the context,
// where parameters are stored in reverse order, at
// MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
// The mapped parameter thus need to get indices
// MIN_CONTEXT_SLOTS+parameter_count-1 ..
// MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
// We loop from right to left.
Label parameters_loop, parameters_test;
// Load tagged parameter count into r9.
__ movq(r9, Operand(rsp, 1 * kPointerSize));
__ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
__ addq(r8, Operand(rsp, 3 * kPointerSize));
__ subq(r8, r9);
__ Move(r11, factory->the_hole_value());
__ movq(rdx, rdi);
__ SmiToInteger64(kScratchRegister, r9);
__ lea(rdi, Operand(rdi, kScratchRegister,
times_pointer_size,
kParameterMapHeaderSize));
// r9 = loop variable (tagged)
// r8 = mapping index (tagged)
// r11 = the hole value
// rdx = address of parameter map (tagged)
// rdi = address of backing store (tagged)
__ jmp(&parameters_test, Label::kNear);
__ bind(&parameters_loop);
__ SmiSubConstant(r9, r9, Smi::FromInt(1));
__ SmiToInteger64(kScratchRegister, r9);
__ movq(FieldOperand(rdx, kScratchRegister,
times_pointer_size,
kParameterMapHeaderSize),
r8);
__ movq(FieldOperand(rdi, kScratchRegister,
times_pointer_size,
FixedArray::kHeaderSize),
r11);
__ SmiAddConstant(r8, r8, Smi::FromInt(1));
__ bind(&parameters_test);
__ SmiTest(r9);
__ j(not_zero, &parameters_loop, Label::kNear);
__ bind(&skip_parameter_map);
// rcx = argument count (tagged)
// rdi = address of backing store (tagged)
// Copy arguments header and remaining slots (if there are any).
__ Move(FieldOperand(rdi, FixedArray::kMapOffset),
factory->fixed_array_map());
__ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
Label arguments_loop, arguments_test;
__ movq(r8, rbx);
__ movq(rdx, Operand(rsp, 2 * kPointerSize));
// Untag rcx and r8 for the loop below.
__ SmiToInteger64(rcx, rcx);
__ SmiToInteger64(r8, r8);
__ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
__ subq(rdx, kScratchRegister);
__ jmp(&arguments_test, Label::kNear);
__ bind(&arguments_loop);
__ subq(rdx, Immediate(kPointerSize));
__ movq(r9, Operand(rdx, 0));
__ movq(FieldOperand(rdi, r8,
times_pointer_size,
FixedArray::kHeaderSize),
r9);
__ addq(r8, Immediate(1));
__ bind(&arguments_test);
__ cmpq(r8, rcx);
__ j(less, &arguments_loop, Label::kNear);
// Return and remove the on-stack parameters.
__ ret(3 * kPointerSize);
// Do the runtime call to allocate the arguments object.
// rcx = argument count (untagged)
__ bind(&runtime);
__ Integer32ToSmi(rcx, rcx);
__ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count.
__ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
}
void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
// esp[0] : return address
// esp[8] : number of parameters
// esp[16] : receiver displacement
// esp[24] : function
// Check if the calling frame is an arguments adaptor frame.
Label runtime;
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
__ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &runtime);
// Patch the arguments.length and the parameters pointer.
__ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ movq(Operand(rsp, 1 * kPointerSize), rcx);
__ SmiToInteger64(rcx, rcx);
__ lea(rdx, Operand(rdx, rcx, times_pointer_size,
StandardFrameConstants::kCallerSPOffset));
__ movq(Operand(rsp, 2 * kPointerSize), rdx);
__ bind(&runtime);
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// rsp[0] : return address
// rsp[8] : number of parameters
// rsp[16] : receiver displacement
// rsp[24] : function
// The displacement is used for skipping the return address and the
// frame pointer on the stack. It is the offset of the last
// parameter (if any) relative to the frame pointer.
static const int kDisplacement = 2 * kPointerSize;
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
__ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adaptor_frame);
// Get the length from the frame.
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
__ SmiToInteger64(rcx, rcx);
__ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
__ jmp(&try_allocate);
// Patch the arguments.length and the parameters pointer.
__ bind(&adaptor_frame);
__ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ movq(Operand(rsp, 1 * kPointerSize), rcx);
__ SmiToInteger64(rcx, rcx);
__ lea(rdx, Operand(rdx, rcx, times_pointer_size,
StandardFrameConstants::kCallerSPOffset));
__ SmiToInteger32(rcx,
Operand(rdx,
ArgumentsAdaptorFrameConstants::kLengthOffset));
// Space on stack must already hold a smi.
__ Integer32ToSmiField(Operand(rsp, 1 * kPointerSize), rcx);
// Do not clobber the length index for the indexing operation since
// it is used compute the size for allocation later.
__ lea(rdx, Operand(rdx, rcx, times_pointer_size, kDisplacement));
__ movq(Operand(rsp, 2 * kPointerSize), rdx);
// Try the new space allocation. Start out with computing the size of
// the arguments object and the elements array.
Label add_arguments_object;
__ bind(&try_allocate);
__ testq(rcx, rcx);
__ j(zero, &add_arguments_object, Label::kNear);
__ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
__ testl(rcx, rcx);
__ j(zero, &add_arguments_object);
__ leal(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
__ bind(&add_arguments_object);
__ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
__ addl(rcx, Immediate(GetArgumentsObjectSize()));
// Do the allocation of both objects in one go.
__ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
@ -2234,51 +1995,59 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the arguments boilerplate from the current (global) context.
__ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
const int offset =
Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
__ movq(rdi, Operand(rdi, offset));
__ movq(rdi, Operand(rdi,
Context::SlotOffset(GetArgumentsBoilerplateIndex())));
// Copy the JS object part.
for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
__ movq(rbx, FieldOperand(rdi, i));
__ movq(FieldOperand(rax, i), rbx);
STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
__ movq(kScratchRegister, FieldOperand(rdi, 0 * kPointerSize));
__ movq(rdx, FieldOperand(rdi, 1 * kPointerSize));
__ movq(rbx, FieldOperand(rdi, 2 * kPointerSize));
__ movq(FieldOperand(rax, 0 * kPointerSize), kScratchRegister);
__ movq(FieldOperand(rax, 1 * kPointerSize), rdx);
__ movq(FieldOperand(rax, 2 * kPointerSize), rbx);
if (type_ == NEW_NON_STRICT) {
// Setup the callee in-object property.
ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ movq(kScratchRegister, Operand(rsp, 3 * kPointerSize));
__ movq(FieldOperand(rax, JSObject::kHeaderSize +
Heap::kArgumentsCalleeIndex * kPointerSize),
kScratchRegister);
}
// Get the length (smi tagged) and set that as an in-object property too.
STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
ASSERT(Heap::kArgumentsLengthIndex == 0);
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
__ movq(FieldOperand(rax, JSObject::kHeaderSize +
Heap::kArgumentsLengthIndex * kPointerSize),
Heap::kArgumentsLengthIndex * kPointerSize),
rcx);
// If there are no actual arguments, we're done.
Label done;
__ testq(rcx, rcx);
__ SmiTest(rcx);
__ j(zero, &done);
// Get the parameters pointer from the stack.
// Get the parameters pointer from the stack and untag the length.
__ movq(rdx, Operand(rsp, 2 * kPointerSize));
// Setup the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
__ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
__ lea(rdi, Operand(rax, GetArgumentsObjectSize()));
__ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
__ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
__ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
__ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
// Untag the length for the loop below.
__ SmiToInteger64(rcx, rcx);
__ SmiToInteger32(rcx, rcx); // Untag length for the loop below.
// Copy the fixed array slots.
Label loop;
__ bind(&loop);
__ movq(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
__ movq(FieldOperand(rdi, FixedArray::kHeaderSize), rbx);
__ movq(kScratchRegister, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
__ movq(FieldOperand(rdi, FixedArray::kHeaderSize), kScratchRegister);
__ addq(rdi, Immediate(kPointerSize));
__ subq(rdx, Immediate(kPointerSize));
__ decq(rcx);
__ decl(rcx);
__ j(not_zero, &loop);
// Return and remove the on-stack parameters.
@ -2287,7 +2056,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
__ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
}

View File

@ -226,9 +226,15 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// stack frame was an arguments adapter frame.
ArgumentsAccessStub stub(
is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
: ArgumentsAccessStub::NEW_NON_STRICT_SLOW);
: ArgumentsAccessStub::NEW_NON_STRICT);
__ CallStub(&stub);
Variable* arguments_shadow = scope()->arguments_shadow();
if (arguments_shadow != NULL) {
// Store new arguments object in both "arguments" and ".arguments" slots.
__ movq(rcx, rax);
Move(arguments_shadow->AsSlot(), rcx, rbx, rdx);
}
Move(arguments->AsSlot(), rax, rbx, rdx);
}
@ -371,7 +377,7 @@ void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
codegen()->Move(result_register(), slot);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
@ -404,7 +410,7 @@ void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
if (true_label_ != fall_through_) __ jmp(true_label_);
} else {
__ LoadRoot(result_register(), index);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
}
@ -449,7 +455,7 @@ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
} else {
// For simplicity we always test the accumulator register.
__ Move(result_register(), lit);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
}
@ -485,7 +491,7 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
__ Drop(count);
__ Move(result_register(), reg);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(true_label_, false_label_, fall_through_);
codegen()->DoTest(this);
}
@ -560,7 +566,8 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
}
void FullCodeGenerator::DoTest(Label* if_true,
void FullCodeGenerator::DoTest(Expression* condition,
Label* if_true,
Label* if_false,
Label* fall_through) {
ToBooleanStub stub;
@ -1214,12 +1221,13 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
void FullCodeGenerator::EmitVariableLoad(Variable* var) {
// Three cases: non-this global variables, lookup slots, and all other
// types of slots.
// Four cases: non-this global variables, lookup slots, all other
// types of slots, and parameters that rewrite to explicit property
// accesses on the arguments object.
Slot* slot = var->AsSlot();
ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
Property* property = var->AsProperty();
if (slot == NULL) {
if (var->is_global() && !var->is_this()) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in rcx and the global
// object on the stack.
@ -1245,7 +1253,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
context()->Plug(rax);
} else {
} else if (slot != NULL) {
Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
? "Context slot"
: "Stack slot");
@ -1263,6 +1271,34 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
} else {
context()->Plug(slot);
}
} else {
Comment cmnt(masm_, "Rewritten parameter");
ASSERT_NOT_NULL(property);
// Rewritten parameter accesses are of the form "slot[literal]".
// Assert that the object is in a slot.
Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
ASSERT_NOT_NULL(object_var);
Slot* object_slot = object_var->AsSlot();
ASSERT_NOT_NULL(object_slot);
// Load the object.
MemOperand object_loc = EmitSlotSearch(object_slot, rax);
__ movq(rdx, object_loc);
// Assert that the key is a smi.
Literal* key_literal = property->key()->AsLiteral();
ASSERT_NOT_NULL(key_literal);
ASSERT(key_literal->handle()->IsSmi());
// Load the key.
__ Move(rax, key_literal->handle());
// Do a keyed property load.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
context()->Plug(rax);
}
}
@ -1495,7 +1531,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
}
// Left-hand side can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* property = expr->target()->AsProperty();
@ -1521,13 +1557,29 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case KEYED_PROPERTY: {
if (expr->is_compound()) {
VisitForStackValue(property->obj());
VisitForAccumulatorValue(property->key());
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
__ push(slot_operand);
__ Move(rax, property->key()->AsLiteral()->handle());
} else {
VisitForStackValue(property->obj());
VisitForAccumulatorValue(property->key());
}
__ movq(rdx, Operand(rsp, 0));
__ push(rax);
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
if (property->is_arguments_access()) {
VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
__ push(slot_operand);
__ Push(property->key()->AsLiteral()->handle());
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
}
}
break;
}
@ -1694,7 +1746,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
// Left-hand side can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->AsProperty();
@ -1725,10 +1777,20 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
case KEYED_PROPERTY: {
__ push(rax); // Preserve value.
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ movq(rcx, rax);
__ pop(rdx);
if (prop->is_synthetic()) {
ASSERT(prop->obj()->AsVariableProxy() != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
{ AccumulatorValueContext for_object(this);
EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
}
__ movq(rdx, rax);
__ Move(rcx, prop->key()->AsLiteral()->handle());
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ movq(rcx, rax);
__ pop(rdx);
}
__ pop(rax); // Restore value.
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
@ -1744,6 +1806,8 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
// Left-hand sides that rewrite to explicit property accesses do not reach
// here.
ASSERT(var != NULL);
ASSERT(var->is_global() || var->AsSlot() != NULL);
@ -3698,7 +3762,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
}
// Expression can only be a property, a global or a (parameter or local)
// slot.
// slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->expression()->AsProperty();
@ -3724,8 +3788,16 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ push(rax); // Copy of receiver, needed for later store.
EmitNamedPropertyLoad(prop);
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
if (prop->is_arguments_access()) {
VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
MemOperand slot_operand =
EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
__ push(slot_operand);
__ Move(rax, prop->key()->AsLiteral()->handle());
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
}
__ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack
__ push(rax); // Copy of key, needed for later store.
EmitKeyedPropertyLoad(prop);

View File

@ -1198,158 +1198,6 @@ void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
}
static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
Register object,
Register key,
Register scratch1,
Register scratch2,
Register scratch3,
Label* unmapped_case,
Label* slow_case) {
Heap* heap = masm->isolate()->heap();
// Check that the receiver isn't a smi.
__ JumpIfSmi(object, slow_case);
// Check that the key is a positive smi.
Condition check = masm->CheckNonNegativeSmi(key);
__ j(NegateCondition(check), slow_case);
// Load the elements into scratch1 and check its map. If not, jump
// to the unmapped lookup with the parameter map in scratch1.
Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
__ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset));
__ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
// Check if element is in the range of mapped arguments.
__ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
__ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
__ cmpq(key, scratch2);
__ j(greater_equal, unmapped_case);
// Load element index and check whether it is the hole.
const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
__ SmiToInteger64(scratch3, key);
__ movq(scratch2, FieldOperand(scratch1,
scratch3,
times_pointer_size,
kHeaderSize));
__ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
__ j(equal, unmapped_case);
// Load value from context and return it. We can reuse scratch1 because
// we do not jump to the unmapped lookup (which requires the parameter
// map in scratch1).
__ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
__ SmiToInteger64(scratch3, scratch2);
return FieldOperand(scratch1,
scratch3,
times_pointer_size,
Context::kHeaderSize);
}
static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
Register key,
Register parameter_map,
Register scratch,
Label* slow_case) {
// Element is in arguments backing store, which is referenced by the
// second element of the parameter_map. The parameter_map register
// must be loaded with the parameter map of the arguments object and is
// overwritten.
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
Register backing_store = parameter_map;
__ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
__ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
__ cmpq(key, scratch);
__ j(greater_equal, slow_case);
__ SmiToInteger64(scratch, key);
return FieldOperand(backing_store,
scratch,
times_pointer_size,
FixedArray::kHeaderSize);
}
void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : key
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
Label slow, notin;
Operand mapped_location =
GenerateMappedArgumentsLookup(
masm, rdx, rax, rbx, rcx, rdi, &notin, &slow);
__ movq(rax, mapped_location);
__ Ret();
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in rbx.
Operand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow);
__ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
__ j(equal, &slow);
__ movq(rax, unmapped_location);
__ Ret();
__ bind(&slow);
GenerateMiss(masm, false);
}
void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : key
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
Label slow, notin;
Operand mapped_location = GenerateMappedArgumentsLookup(
masm, rdx, rcx, rbx, rdi, r8, &notin, &slow);
__ movq(mapped_location, rax);
__ Ret();
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in rbx.
Operand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow);
__ movq(unmapped_location, rax);
__ Ret();
__ bind(&slow);
GenerateMiss(masm, false);
}
void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
int argc) {
// ----------- S t a t e -------------
// rcx : function name
// rsp[0] : return address
// rsp[8] : argument argc
// rsp[16] : argument argc - 1
// ...
// rsp[argc * 8] : argument 1
// rsp[(argc + 1) * 8] : argument 0 = receiver
// -----------------------------------
Label slow, notin;
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
Operand mapped_location = GenerateMappedArgumentsLookup(
masm, rdx, rcx, rbx, rax, r8, &notin, &slow);
__ movq(rdi, mapped_location);
GenerateFunctionTailCall(masm, argc, &slow);
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in rbx.
Operand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow);
__ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
__ j(equal, &slow);
__ movq(rdi, unmapped_location);
GenerateFunctionTailCall(masm, argc, &slow);
__ bind(&slow);
GenerateMiss(masm, argc);
}
void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : receiver

View File

@ -2509,7 +2509,6 @@ void LCodeGen::DoLoadKeyedSpecializedArrayElement(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -3225,7 +3224,6 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}

View File

@ -3390,7 +3390,6 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}
@ -3457,7 +3456,6 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
case JSObject::FAST_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
case JSObject::DICTIONARY_ELEMENTS:
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNREACHABLE();
break;
}

View File

@ -3039,8 +3039,7 @@ THREADED_TEST(DefinePropertyOnAPIAccessor) {
result = script_define->Run();
CHECK(try_catch.HasCaught());
String::AsciiValue exception_value(try_catch.Exception());
CHECK_EQ(*exception_value,
"TypeError: Cannot redefine property: defineProperty");
CHECK_EQ(*exception_value, "TypeError: Cannot redefine property: x");
}
THREADED_TEST(DefinePropertyOnDefineGetterSetter) {
@ -3085,8 +3084,7 @@ THREADED_TEST(DefinePropertyOnDefineGetterSetter) {
result = script_define->Run();
CHECK(try_catch.HasCaught());
String::AsciiValue exception_value(try_catch.Exception());
CHECK_EQ(*exception_value,
"TypeError: Cannot redefine property: defineProperty");
CHECK_EQ(*exception_value, "TypeError: Cannot redefine property: x");
}
@ -3204,8 +3202,7 @@ THREADED_TEST(DontDeleteAPIAccessorsCannotBeOverriden) {
"{get: function() { return 'func'; }})");
CHECK(try_catch.HasCaught());
String::AsciiValue exception_value(try_catch.Exception());
CHECK_EQ(*exception_value,
"TypeError: Cannot redefine property: defineProperty");
CHECK_EQ(*exception_value, "TypeError: Cannot redefine property: x");
}
{
v8::TryCatch try_catch;
@ -3213,8 +3210,7 @@ THREADED_TEST(DontDeleteAPIAccessorsCannotBeOverriden) {
"{get: function() { return 'func'; }})");
CHECK(try_catch.HasCaught());
String::AsciiValue exception_value(try_catch.Exception());
CHECK_EQ(*exception_value,
"TypeError: Cannot redefine property: defineProperty");
CHECK_EQ(*exception_value, "TypeError: Cannot redefine property: x");
}
}
@ -9575,7 +9571,6 @@ static void CheckSurvivingGlobalObjectsCount(int expected) {
// the first garbage collection but some of the maps have already
// been marked at that point. Therefore some of the maps are not
// collected until the second garbage collection.
HEAP->global_context_map();
HEAP->CollectAllGarbage(false);
HEAP->CollectAllGarbage(false);
int count = GetGlobalObjectsCount();
@ -14485,3 +14480,25 @@ THREADED_TEST(CallAPIFunctionOnNonObject) {
TryCatch try_catch;
CompileRun("f.call(2)");
}
// Regression test for issue 1470.
THREADED_TEST(ReadOnlyIndexedProperties) {
v8::HandleScope scope;
Local<ObjectTemplate> templ = ObjectTemplate::New();
LocalContext context;
Local<v8::Object> obj = templ->NewInstance();
context->Global()->Set(v8_str("obj"), obj);
obj->Set(v8_str("1"), v8_str("DONT_CHANGE"), v8::ReadOnly);
obj->Set(v8_str("1"), v8_str("foobar"));
CHECK_EQ(v8_str("DONT_CHANGE"), obj->Get(v8_str("1")));
obj->Set(v8_num(2), v8_str("DONT_CHANGE"), v8::ReadOnly);
obj->Set(v8_num(2), v8_str("foobar"));
CHECK_EQ(v8_str("DONT_CHANGE"), obj->Get(v8_num(2)));
// Test non-smi case.
obj->Set(v8_str("2000000000"), v8_str("DONT_CHANGE"), v8::ReadOnly);
obj->Set(v8_str("2000000000"), v8_str("foobar"));
CHECK_EQ(v8_str("DONT_CHANGE"), obj->Get(v8_str("2000000000")));
}

View File

@ -675,7 +675,7 @@ TEST(JSArray) {
CHECK(array->HasFastElements()); // Must be in fast mode.
// array[length] = name.
ok = array->SetElement(0, *name, kNonStrictMode, true)->ToObjectChecked();
ok = array->SetElement(0, *name, kNonStrictMode)->ToObjectChecked();
CHECK_EQ(Smi::FromInt(1), array->length());
CHECK_EQ(array->GetElement(0), *name);
@ -690,8 +690,7 @@ TEST(JSArray) {
CHECK(array->HasDictionaryElements()); // Must be in slow mode.
// array[length] = name.
ok = array->SetElement(
int_length, *name, kNonStrictMode, true)->ToObjectChecked();
ok = array->SetElement(int_length, *name, kNonStrictMode)->ToObjectChecked();
uint32_t new_int_length = 0;
CHECK(array->length()->ToArrayIndex(&new_int_length));
CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
@ -718,10 +717,9 @@ TEST(JSObjectCopy) {
obj->SetProperty(
*second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
Object* ok =
obj->SetElement(0, *first, kNonStrictMode, true)->ToObjectChecked();
Object* ok = obj->SetElement(0, *first, kNonStrictMode)->ToObjectChecked();
ok = obj->SetElement(1, *second, kNonStrictMode, true)->ToObjectChecked();
ok = obj->SetElement(1, *second, kNonStrictMode)->ToObjectChecked();
// Make the clone.
Handle<JSObject> clone = Copy(obj);
@ -739,8 +737,8 @@ TEST(JSObjectCopy) {
clone->SetProperty(
*second, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
ok = clone->SetElement(0, *second, kNonStrictMode, true)->ToObjectChecked();
ok = clone->SetElement(1, *first, kNonStrictMode, true)->ToObjectChecked();
ok = clone->SetElement(0, *second, kNonStrictMode)->ToObjectChecked();
ok = clone->SetElement(1, *first, kNonStrictMode)->ToObjectChecked();
CHECK_EQ(obj->GetElement(1), clone->GetElement(0));
CHECK_EQ(obj->GetElement(0), clone->GetElement(1));

View File

@ -152,8 +152,8 @@ function f6(x, y) {
return [arguments.length, arguments[0], y, arguments[2]];
}
assertArrayEquals([0, void 0, void 0, void 0], f6());
assertArrayEquals([1, "x", void 0, void 0], f6(1));
assertArrayEquals([0, "x", "y", void 0], f6());
assertArrayEquals([1, "x", "y", void 0], f6(1));
assertArrayEquals([2, "x", "y", void 0], f6(9, 17));
assertArrayEquals([3, "x", "y", 7], f6(3, 5, 7));
assertArrayEquals([4, "x", "y", "c"], f6("a", "b", "c", "d"));

View File

@ -0,0 +1,38 @@
// Copyright 2008 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// When a property of the arguments array is deleted, it
// must be "disconnected" from the corresponding parameter.
// Re-introducing the property does not connect to the parameter.
function f(x) {
delete arguments[0];
arguments[0] = 100;
return x;
}
assertEquals(10, f(10));

View File

@ -33,73 +33,33 @@ listenerComplete = false;
exception = false;
function h() {
var a = 1;
var b = 2;
debugger; // Breakpoint.
}
function checkFrame0(frame) {
// Frame 0 (h) has normal variables a and b.
var count = frame.localCount();
assertEquals(2, count);
for (var i = 0; i < count; ++i) {
var name = frame.localName(i);
var value = frame.localValue(i).value();
if (name == 'a') {
assertEquals(1, value);
} else {
assertEquals('b', name);
assertEquals(2, value);
}
function checkFrame0(name, value) {
assertTrue(name == 'a' || name == 'b');
if (name == 'a') {
assertEquals(1, value);
}
if (name == 'b') {
assertEquals(2, value);
}
}
function g() {
var a = 3;
eval("var b = 4;");
h();
}
function checkFrame1(frame) {
// Frame 1 (g) has normal variable a (and arguments).
var count = frame.localCount();
assertEquals(2, count);
for (var i = 0; i < count; ++i) {
var name = frame.localName(i);
var value = frame.localValue(i).value();
if (name == 'a') {
assertEquals(3, value);
} else {
assertEquals('arguments', name);
}
function checkFrame1(name, value) {
assertTrue(name == '.arguments' || name == 'a');
if (name == 'a') {
assertEquals(3, value);
}
}
function f() {
var a = 5;
var b = 0;
with ({b:6}) {
g();
function checkFrame2(name, value) {
assertTrue(name == '.arguments' || name == 'a' ||
name == 'arguments' || name == 'b');
if (name == 'a') {
assertEquals(5, value);
}
}
function checkFrame2(frame) {
// Frame 2 (f) has normal variables a and b (and arguments).
var count = frame.localCount();
assertEquals(3, count);
for (var i = 0; i < count; ++i) {
var name = frame.localName(i);
var value = frame.localValue(i).value();
if (name == 'a') {
assertEquals(5, value);
} else if (name == 'b') {
assertEquals(0, value);
} else {
assertEquals('arguments', name);
}
if (name == 'b') {
assertEquals(0, value);
}
}
@ -108,9 +68,23 @@ function listener(event, exec_state, event_data, data) {
try {
if (event == Debug.DebugEvent.Break)
{
checkFrame0(exec_state.frame(0));
checkFrame1(exec_state.frame(1));
checkFrame2(exec_state.frame(2));
// Frame 0 has normal variables a and b.
var frame0 = exec_state.frame(0);
checkFrame0(frame0.localName(0), frame0.localValue(0).value());
checkFrame0(frame0.localName(1), frame0.localValue(1).value());
// Frame 1 has normal variable a (and the .arguments variable).
var frame1 = exec_state.frame(1);
checkFrame1(frame1.localName(0), frame1.localValue(0).value());
checkFrame1(frame1.localName(1), frame1.localValue(1).value());
// Frame 2 has normal variables a and b (and both the .arguments and
// arguments variable).
var frame2 = exec_state.frame(2);
checkFrame2(frame2.localName(0), frame2.localValue(0).value());
checkFrame2(frame2.localName(1), frame2.localValue(1).value());
checkFrame2(frame2.localName(2), frame2.localValue(2).value());
checkFrame2(frame2.localName(3), frame2.localValue(3).value());
// Evaluating a and b on frames 0, 1 and 2 produces 1, 2, 3, 4, 5 and 6.
assertEquals(1, exec_state.frame(0).evaluate('a').value());
@ -131,6 +105,26 @@ function listener(event, exec_state, event_data, data) {
// Add the debug event listener.
Debug.setListener(listener);
function h() {
var a = 1;
var b = 2;
debugger; // Breakpoint.
};
function g() {
var a = 3;
eval("var b = 4;");
h();
};
function f() {
var a = 5;
var b = 0;
with ({b:6}) {
g();
}
};
f();
// Make sure that the debug event listener vas invoked.

View File

@ -143,7 +143,6 @@ var knownProblems = {
// These functions should not be callable as runtime functions.
"NewFunctionContext": true,
"NewArgumentsFast": true,
"NewStrictArgumentsFast": true,
"PushWithContext": true,
"PushCatchContext": true,
"LazyCompile": true,

View File

@ -57,18 +57,21 @@ assertEquals("2:false", test2(), "test2");
assertEquals(0, x, "test2"); // Global x is undisturbed.
// Delete on a parameter.
// Delete on an argument. This hits the same code paths as test5 because
// 'with' forces all parameters to be indirected through the arguments
// object.
function test3(value) {
var status;
with ({}) { status = delete value; }
return value + ":" + status;
}
assertEquals("3:false", test3(3), "test3");
assertEquals("undefined:true", test3(3), "test3");
assertEquals(0, x, "test3"); // Global x is undisturbed.
// Delete on a parameter found in an outer context.
// Delete on an argument from an outer context. This hits the same code
// path as test2.
function test4(value) {
function f() {
with ({}) { return delete value; }
@ -81,14 +84,15 @@ assertEquals("4:false", test4(4), "test4");
assertEquals(0, x, "test4"); // Global x is undisturbed.
// Delete on a parameter, arguments object should be unaffected.
// Delete on an argument found in the arguments object. Such properties are
// normally DONT_DELETE in JavaScript but deletion is allowed by V8.
function test5(value) {
var status;
with ({}) { status = delete value; }
return arguments[0] + ":" + status;
}
assertEquals("5:false", test5(5), "test5");
assertEquals("undefined:true", test5(5), "test5");
assertEquals(0, x, "test5"); // Global x is undisturbed.
function test6(value) {
@ -99,7 +103,7 @@ function test6(value) {
return arguments[0] + ":" + status;
}
assertEquals("6:false", test6(6), "test6");
assertEquals("undefined:true", test6(6), "test6");
assertEquals(0, x, "test6"); // Global x is undisturbed.

View File

@ -599,6 +599,11 @@ js1_5/Regress/regress-416737-02: FAIL_OK
js1_5/Array/regress-350256-02: FAIL
# This fails because 'delete arguments[i]' does not disconnect the
# argument from the arguments array. See issue #900066.
ecma_3/Function/regress-137181: FAIL
# 'export' and 'import' are not keywords in V8.
ecma_2/Exceptions/lexical-010: FAIL
ecma_2/Exceptions/lexical-022: FAIL

View File

@ -30,6 +30,14 @@ def FAIL_OK = FAIL, OKAY
##################### DELIBERATE INCOMPATIBILITIES #####################
# 900066: Deleting elements in .arguments should disconnect the
# element from the actual arguments. Implementing this is nontrivial
# and we have no indication that anything on the web depends on this
# feature.
S13_A13_T1: FAIL_OK
S13_A13_T2: FAIL_OK
S13_A13_T3: FAIL_OK
# This tests precision of trignometric functions. We're slightly off
# from the implementation in libc (~ 1e-17) but it's not clear if we
# or they are closer to the right answer, or if it even matters.