Make closures optimizable by Crankshaft compiler.

Currently only closures which only read from the context are supported.

Review URL: http://codereview.chromium.org/5753005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6340 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
antonm@chromium.org 2011-01-17 08:11:03 +00:00
parent 0819ac76f6
commit fae90d4f32
33 changed files with 469 additions and 108 deletions

View File

@ -244,6 +244,11 @@ void LUnaryMathOperation::PrintDataTo(StringStream* stream) const {
}
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
stream->Add("(%d, %d)", context_chain_length(), slot_index());
}
void LCallKeyed::PrintDataTo(StringStream* stream) const {
stream->Add("[r2] #%d / ", arity());
}
@ -1601,6 +1606,11 @@ LInstruction* LChunkBuilder::DoStoreGlobal(HStoreGlobal* instr) {
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
return DefineAsRegister(new LLoadContextSlot);
}
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
return DefineAsRegister(
new LLoadNamedField(UseRegisterAtStart(instr->object())));

View File

@ -86,7 +86,8 @@ class LCodeGen;
// LGlobalObject
// LGlobalReceiver
// LLabel
// LLayzBailout
// LLazyBailout
// LLoadContextSlot
// LLoadGlobal
// LMaterializedLiteral
// LArrayLiteral
@ -221,6 +222,7 @@ class LCodeGen;
V(ClassOfTestAndBranch) \
V(Label) \
V(LazyBailout) \
V(LoadContextSlot) \
V(LoadElements) \
V(LoadGlobal) \
V(LoadKeyedFastElement) \
@ -1273,6 +1275,20 @@ class LStoreGlobal: public LUnaryOperation {
};
class LLoadContextSlot: public LInstruction {
public:
DECLARE_CONCRETE_INSTRUCTION(LoadContextSlot, "load-context-slot")
DECLARE_HYDROGEN_ACCESSOR(LoadContextSlot)
int context_chain_length() const {
return hydrogen()->context_chain_length();
}
int slot_index() const { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
};
class LPushArgument: public LUnaryOperation {
public:
explicit LPushArgument(LOperand* argument) : LUnaryOperation(argument) {}

View File

@ -1988,6 +1988,14 @@ void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
}
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
// TODO(antonm): load a context with a separate instruction.
Register result = ToRegister(instr->result());
__ LoadContext(result, instr->context_chain_length());
__ ldr(result, ContextOperand(result, instr->slot_index()));
}
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Register object = ToRegister(instr->input());
Register result = ToRegister(instr->result());
@ -2865,15 +2873,15 @@ void LCodeGen::DoCheckMap(LCheckMap* instr) {
}
void LCodeGen::LoadPrototype(Register result,
Handle<JSObject> prototype) {
if (Heap::InNewSpace(*prototype)) {
void LCodeGen::LoadHeapObject(Register result,
Handle<HeapObject> object) {
if (Heap::InNewSpace(*object)) {
Handle<JSGlobalPropertyCell> cell =
Factory::NewJSGlobalPropertyCell(prototype);
Factory::NewJSGlobalPropertyCell(object);
__ mov(result, Operand(cell));
__ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
} else {
__ mov(result, Operand(prototype));
__ mov(result, Operand(object));
}
}
@ -2886,7 +2894,7 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Handle<JSObject> current_prototype = instr->prototype();
// Load prototype object.
LoadPrototype(temp1, current_prototype);
LoadHeapObject(temp1, current_prototype);
// Check prototype maps up to the holder.
while (!current_prototype.is_identical_to(holder)) {
@ -2896,7 +2904,7 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
current_prototype =
Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
// Load next prototype object.
LoadPrototype(temp1, current_prototype);
LoadHeapObject(temp1, current_prototype);
}
// Check the holder map.

View File

@ -176,7 +176,7 @@ class LCodeGen BASE_EMBEDDED {
int arity,
LInstruction* instr);
void LoadPrototype(Register result, Handle<JSObject> prototype);
void LoadHeapObject(Register result, Handle<HeapObject> object);
void RegisterLazyDeoptimization(LInstruction* instr);
void RegisterEnvironmentForDeoptimization(LEnvironment* environment);

View File

@ -166,12 +166,6 @@ bool FunctionLiteral::AllowsLazyCompilation() {
}
bool FunctionLiteral::AllowOptimize() {
// We can't deal with heap-allocated locals.
return scope()->num_heap_slots() == 0;
}
ObjectLiteral::Property::Property(Literal* key, Expression* value) {
emit_store_ = true;
key_ = key;

View File

@ -1717,7 +1717,6 @@ class FunctionLiteral: public Expression {
int num_parameters() { return num_parameters_; }
bool AllowsLazyCompilation();
bool AllowOptimize();
Handle<String> debug_name() const {
if (name_->length() > 0) return name_;

View File

@ -92,6 +92,25 @@ CompilationInfo::CompilationInfo(Handle<JSFunction> closure)
}
void CompilationInfo::DisableOptimization() {
if (FLAG_optimize_closures) {
// If we allow closures optimizations and it's an optimizable closure
// mark it correspondingly.
bool is_closure = closure_.is_null() && !scope_->HasTrivialOuterContext();
if (is_closure) {
bool is_optimizable_closure =
!scope_->outer_scope_calls_eval() && !scope_->inside_with();
if (is_optimizable_closure) {
SetMode(BASE);
return;
}
}
}
SetMode(NONOPT);
}
// Determine whether to use the full compiler for all code. If the flag
// --always-full-compiler is specified this is the case. For the virtual frame
// based compiler the full compiler is also used if a debugger is connected, as

View File

@ -114,7 +114,7 @@ class CompilationInfo BASE_EMBEDDED {
SetMode(OPTIMIZE);
osr_ast_id_ = osr_ast_id;
}
void DisableOptimization() { SetMode(NONOPT); }
void DisableOptimization();
// Deoptimization support.
bool HasDeoptimizationSupport() const { return supports_deoptimization_; }
@ -125,9 +125,7 @@ class CompilationInfo BASE_EMBEDDED {
// Determine whether or not we can adaptively optimize.
bool AllowOptimize() {
return V8::UseCrankshaft() &&
!closure_.is_null() &&
function_->AllowOptimize();
return V8::UseCrankshaft() && !closure_.is_null();
}
private:

View File

@ -141,6 +141,7 @@ DEFINE_bool(use_osr, false, "use on-stack replacement")
#endif
DEFINE_bool(trace_osr, false, "trace on-stack replacement")
DEFINE_int(stress_runs, 0, "number of stress runs")
DEFINE_bool(optimize_closures, true, "optimize closures")
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_bool(debug_code, false,

View File

@ -1190,6 +1190,11 @@ void HStoreGlobal::PrintDataTo(StringStream* stream) const {
}
void HLoadContextSlot::PrintDataTo(StringStream* stream) const {
stream->Add("(%d, %d)", context_chain_length(), slot_index());
}
// Implementation of type inference and type conversions. Calculates
// the inferred type of this instruction based on the input operands.

View File

@ -107,6 +107,7 @@ class LChunkBuilder;
// HGlobalObject
// HGlobalReceiver
// HLeaveInlined
// HLoadContextSlot
// HLoadGlobal
// HMaterializedLiteral
// HArrayLiteral
@ -220,6 +221,7 @@ class LChunkBuilder;
V(JSArrayLength) \
V(ClassOfTest) \
V(LeaveInlined) \
V(LoadContextSlot) \
V(LoadElements) \
V(LoadGlobal) \
V(LoadKeyedFastElement) \
@ -2599,6 +2601,39 @@ class HStoreGlobal: public HUnaryOperation {
};
class HLoadContextSlot: public HInstruction {
public:
HLoadContextSlot(int context_chain_length , int slot_index)
: context_chain_length_(context_chain_length), slot_index_(slot_index) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetFlag(kDependsOnCalls);
}
int context_chain_length() const { return context_chain_length_; }
int slot_index() const { return slot_index_; }
virtual void PrintDataTo(StringStream* stream) const;
virtual intptr_t Hashcode() const {
return context_chain_length() * 29 + slot_index();
}
DECLARE_CONCRETE_INSTRUCTION(LoadContextSlot, "load_context_slot")
protected:
virtual bool DataEquals(HValue* other) const {
HLoadContextSlot* b = HLoadContextSlot::cast(other);
return (context_chain_length() == b->context_chain_length())
&& (slot_index() == b->slot_index());
}
private:
int context_chain_length_;
int slot_index_;
};
class HLoadNamedField: public HUnaryOperation {
public:
HLoadNamedField(HValue* object, bool is_in_object, int offset)

View File

@ -2940,6 +2940,21 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
BAILOUT("unsupported context for arguments object");
}
ast_context()->ReturnValue(environment()->Lookup(variable));
} else if (variable->IsContextSlot()) {
if (variable->mode() == Variable::CONST) {
BAILOUT("reference to const context slot");
}
Slot* slot = variable->AsSlot();
CompilationInfo* info = graph()->info();
int context_chain_length = info->function()->scope()->
ContextChainLength(slot->var()->scope());
ASSERT(context_chain_length >= 0);
// TODO(antonm): if slot's value is not modified by closures, instead
// of reading it out of context, we could just embed the value as
// a constant.
HLoadContextSlot* instr =
new HLoadContextSlot(context_chain_length, slot->index());
ast_context()->ReturnInstruction(instr, expr->id());
} else if (variable->is_global()) {
LookupResult lookup;
LookupGlobalPropertyCell(variable, &lookup, false);
@ -2956,7 +2971,7 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
HLoadGlobal* instr = new HLoadGlobal(cell, check_hole);
ast_context()->ReturnInstruction(instr, expr->id());
} else {
BAILOUT("reference to non-stack-allocated/non-global variable");
BAILOUT("reference to a variable which requires dynamic lookup");
}
}
@ -3482,7 +3497,7 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
Top(),
expr->position(),
expr->AssignmentId());
} else {
} else if (var->IsStackAllocated()) {
// We allow reference to the arguments object only in assignemtns
// to local variables to make sure that the arguments object does
// not escape and is not modified.
@ -3495,6 +3510,8 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
VISIT_FOR_VALUE(expr->value());
}
Bind(proxy->var(), Top());
} else {
BAILOUT("Assigning to no non-stack-allocated/non-global variable");
}
// Return the value.
ast_context()->ReturnValue(Pop());

View File

@ -2115,6 +2115,14 @@ void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
}
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
// TODO(antonm): load a context with a separate instruction.
Register result = ToRegister(instr->result());
__ LoadContext(result, instr->context_chain_length());
__ mov(result, ContextOperand(result, instr->slot_index()));
}
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Register object = ToRegister(instr->input());
Register result = ToRegister(instr->result());
@ -3306,13 +3314,13 @@ void LCodeGen::DoCheckMap(LCheckMap* instr) {
}
void LCodeGen::LoadPrototype(Register result, Handle<JSObject> prototype) {
if (Heap::InNewSpace(*prototype)) {
void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
if (Heap::InNewSpace(*object)) {
Handle<JSGlobalPropertyCell> cell =
Factory::NewJSGlobalPropertyCell(prototype);
Factory::NewJSGlobalPropertyCell(object);
__ mov(result, Operand::Cell(cell));
} else {
__ mov(result, prototype);
__ mov(result, object);
}
}
@ -3324,7 +3332,7 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Handle<JSObject> current_prototype = instr->prototype();
// Load prototype object.
LoadPrototype(reg, current_prototype);
LoadHeapObject(reg, current_prototype);
// Check prototype maps up to the holder.
while (!current_prototype.is_identical_to(holder)) {
@ -3334,7 +3342,7 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
current_prototype =
Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
// Load next prototype object.
LoadPrototype(reg, current_prototype);
LoadHeapObject(reg, current_prototype);
}
// Check the holder map.

View File

@ -175,7 +175,7 @@ class LCodeGen BASE_EMBEDDED {
int arity,
LInstruction* instr);
void LoadPrototype(Register result, Handle<JSObject> prototype);
void LoadHeapObject(Register result, Handle<HeapObject> object);
void RegisterLazyDeoptimization(LInstruction* instr);
void RegisterEnvironmentForDeoptimization(LEnvironment* environment);

View File

@ -257,6 +257,11 @@ void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
}
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
stream->Add("(%d, %d)", context_chain_length(), slot_index());
}
void LCallKeyed::PrintDataTo(StringStream* stream) {
stream->Add("[ecx] #%d / ", arity());
}
@ -1633,6 +1638,11 @@ LInstruction* LChunkBuilder::DoStoreGlobal(HStoreGlobal* instr) {
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
return DefineAsRegister(new LLoadContextSlot);
}
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
ASSERT(instr->representation().IsTagged());
LOperand* obj = UseRegisterAtStart(instr->object());

View File

@ -90,6 +90,7 @@ class LCodeGen;
// LGlobalReceiver
// LGoto
// LLazyBailout
// LLoadContextSlot
// LLoadGlobal
// LMaterializedLiteral
// LArrayLiteral
@ -225,6 +226,7 @@ class LCodeGen;
V(ClassOfTestAndBranch) \
V(Label) \
V(LazyBailout) \
V(LoadContextSlot) \
V(LoadElements) \
V(LoadGlobal) \
V(LoadKeyedFastElement) \
@ -1349,6 +1351,20 @@ class LStoreGlobal: public LUnaryOperation<0> {
};
class LLoadContextSlot: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(LoadContextSlot, "load-context-slot")
DECLARE_HYDROGEN_ACCESSOR(LoadContextSlot)
int context_chain_length() const {
return hydrogen()->context_chain_length();
}
int slot_index() const { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
};
class LPushArgument: public LUnaryOperation<0> {
public:
explicit LPushArgument(LOperand* argument) : LUnaryOperation<0>(argument) {}

View File

@ -2989,13 +2989,6 @@ Code* SharedFunctionInfo::unchecked_code() {
void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
// If optimization has been disabled for the shared function info,
// reflect that in the code object so it will not be counted as
// optimizable code.
ASSERT(value->kind() != Code::FUNCTION ||
!value->optimizable() ||
this->code() == Builtins::builtin(Builtins::Illegal) ||
this->allows_lazy_compilation());
WRITE_FIELD(this, kCodeOffset, value);
CONDITIONAL_WRITE_BARRIER(this, kCodeOffset, mode);
}

View File

@ -5399,7 +5399,8 @@ void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
void JSFunction::MarkForLazyRecompilation() {
ASSERT(is_compiled() && !IsOptimized());
ASSERT(shared()->allows_lazy_compilation());
ASSERT(shared()->allows_lazy_compilation() ||
code()->optimizable());
ReplaceCode(Builtins::builtin(Builtins::LazyRecompile));
}

View File

@ -978,7 +978,7 @@ void Processor::VisitThisFunction(ThisFunction* node) {
}
// Assumes code has been parsed and scopes hve been analyzed. Mutates the
// Assumes code has been parsed and scopes have been analyzed. Mutates the
// AST, so the AST should not continue to be used in the case of failure.
bool Rewriter::Rewrite(CompilationInfo* info) {
FunctionLiteral* function = info->function();

View File

@ -165,8 +165,10 @@ static void AttemptOnStackReplacement(JSFunction* function) {
}
SharedFunctionInfo* shared = function->shared();
// If the code is not optimizable, don't try OSR.
if (!shared->code()->optimizable()) return;
// If the code is not optimizable or references context slots, don't try OSR.
if (!shared->code()->optimizable() || !shared->allows_lazy_compilation()) {
return;
}
// We are not prepared to do OSR for a function that already has an
// allocated arguments object. The optimized code would bypass it for

View File

@ -1749,6 +1749,7 @@ static MaybeObject* Runtime_SetCode(Arguments args) {
// Array, and Object, and some web code
// doesn't like seeing source code for constructors.
target->shared()->set_script(Heap::undefined_value());
target->shared()->code()->set_optimizable(false);
// Clear the optimization hints related to the compiled code as these are no
// longer valid when the code is overwritten.
target->shared()->ClearThisPropertyAssignmentsInfo();
@ -6735,12 +6736,24 @@ static MaybeObject* Runtime_LazyRecompile(Arguments args) {
// code from the full compiler.
if (!function->shared()->code()->optimizable() ||
Debug::has_break_points()) {
if (FLAG_trace_opt) {
PrintF("[failed to optimize ");
function->PrintName();
PrintF(": is code optimizable: %s, is debugger enabled: %s]\n",
function->shared()->code()->optimizable() ? "T" : "F",
Debug::has_break_points() ? "T" : "F");
}
function->ReplaceCode(function->shared()->code());
return function->code();
}
if (CompileOptimized(function, AstNode::kNoNumber)) {
return function->code();
}
if (FLAG_trace_opt) {
PrintF("[failed to optimize ");
function->PrintName();
PrintF(": optimized compilation failed]\n");
}
function->ReplaceCode(function->shared()->code());
return Failure::Exception();
}

View File

@ -112,68 +112,74 @@ Variable* VariableMap::Lookup(Handle<String> name) {
// Dummy constructor
Scope::Scope(Type type)
: outer_scope_(NULL),
inner_scopes_(0),
type_(type),
scope_name_(Factory::empty_symbol()),
: inner_scopes_(0),
variables_(false),
temps_(0),
params_(0),
dynamics_(NULL),
unresolved_(0),
decls_(0),
receiver_(NULL),
function_(NULL),
arguments_(NULL),
arguments_shadow_(NULL),
illegal_redecl_(NULL),
scope_inside_with_(false),
scope_contains_with_(false),
scope_calls_eval_(false),
outer_scope_calls_eval_(false),
inner_scope_calls_eval_(false),
outer_scope_is_eval_scope_(false),
force_eager_compilation_(false),
num_stack_slots_(0),
num_heap_slots_(0) {
decls_(0) {
SetDefaults(type, NULL, NULL);
ASSERT(!resolved());
}
Scope::Scope(Scope* outer_scope, Type type)
: outer_scope_(outer_scope),
inner_scopes_(4),
type_(type),
scope_name_(Factory::empty_symbol()),
: inner_scopes_(4),
variables_(),
temps_(4),
params_(4),
dynamics_(NULL),
unresolved_(16),
decls_(4),
receiver_(NULL),
function_(NULL),
arguments_(NULL),
arguments_shadow_(NULL),
illegal_redecl_(NULL),
scope_inside_with_(false),
scope_contains_with_(false),
scope_calls_eval_(false),
outer_scope_calls_eval_(false),
inner_scope_calls_eval_(false),
outer_scope_is_eval_scope_(false),
force_eager_compilation_(false),
num_stack_slots_(0),
num_heap_slots_(0) {
decls_(4) {
SetDefaults(type, outer_scope, NULL);
// At some point we might want to provide outer scopes to
// eval scopes (by walking the stack and reading the scope info).
// In that case, the ASSERT below needs to be adjusted.
ASSERT((type == GLOBAL_SCOPE || type == EVAL_SCOPE) == (outer_scope == NULL));
ASSERT(!HasIllegalRedeclaration());
ASSERT(!resolved());
}
Scope::Scope(Scope* inner_scope, SerializedScopeInfo* scope_info)
: inner_scopes_(4),
variables_(),
temps_(4),
params_(4),
unresolved_(16),
decls_(4) {
ASSERT(scope_info != NULL);
SetDefaults(FUNCTION_SCOPE, inner_scope->outer_scope(), scope_info);
ASSERT(resolved());
InsertAfterScope(inner_scope);
if (scope_info->HasHeapAllocatedLocals()) {
num_heap_slots_ = scope_info_->NumberOfContextSlots();
}
}
bool Scope::Analyze(CompilationInfo* info) {
ASSERT(info->function() != NULL);
Scope* top = info->function()->scope();
// If we have a serialized scope info, reuse it.
if (!info->closure().is_null()) {
SerializedScopeInfo* scope_info = info->closure()->shared()->scope_info();
if (scope_info != SerializedScopeInfo::Empty()) {
Scope* scope = top;
JSFunction* current = *info->closure();
do {
current = current->context()->closure();
SerializedScopeInfo* scope_info = current->shared()->scope_info();
if (scope_info != SerializedScopeInfo::Empty()) {
scope = new Scope(scope, scope_info);
} else {
ASSERT(current->context()->IsGlobalContext());
}
} while (!current->context()->IsGlobalContext());
}
}
while (top->outer_scope() != NULL) top = top->outer_scope();
top->AllocateVariables(info->calling_context());
@ -191,6 +197,8 @@ bool Scope::Analyze(CompilationInfo* info) {
void Scope::Initialize(bool inside_with) {
ASSERT(!resolved());
// Add this scope as a new inner scope of the outer scope.
if (outer_scope_ != NULL) {
outer_scope_->inner_scopes_.Add(this);
@ -210,7 +218,7 @@ void Scope::Initialize(bool inside_with) {
Variable* var =
variables_.Declare(this, Factory::this_symbol(), Variable::VAR,
false, Variable::THIS);
var->rewrite_ = new Slot(var, Slot::PARAMETER, -1);
var->set_rewrite(new Slot(var, Slot::PARAMETER, -1));
receiver_ = var;
if (is_function_scope()) {
@ -224,7 +232,28 @@ void Scope::Initialize(bool inside_with) {
Variable* Scope::LocalLookup(Handle<String> name) {
return variables_.Lookup(name);
Variable* result = variables_.Lookup(name);
if (result != NULL || !resolved()) {
return result;
}
// If the scope is resolved, we can find a variable in serialized scope info.
// We should never lookup 'arguments' in this scope
// as it is impllicitly present in any scope.
ASSERT(*name != *Factory::arguments_symbol());
// Check context slot lookup.
Variable::Mode mode;
int index = scope_info_->ContextSlotIndex(*name, &mode);
if (index < 0) {
return NULL;
}
// Check that there is no local slot with the given name.
ASSERT(scope_info_->StackSlotIndex(*name) < 0);
Variable* var = variables_.Declare(this, name, mode, true, Variable::NORMAL);
var->set_rewrite(new Slot(var, Slot::CONTEXT, index));
return var;
}
@ -250,6 +279,7 @@ Variable* Scope::DeclareLocal(Handle<String> name, Variable::Mode mode) {
// DYNAMIC variables are introduces during variable allocation,
// INTERNAL variables are allocated explicitly, and TEMPORARY
// variables are allocated via NewTemporary().
ASSERT(!resolved());
ASSERT(mode == Variable::VAR || mode == Variable::CONST);
return variables_.Declare(this, name, mode, true, Variable::NORMAL);
}
@ -273,6 +303,7 @@ VariableProxy* Scope::NewUnresolved(Handle<String> name, bool inside_with) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
ASSERT(!resolved());
VariableProxy* proxy = new VariableProxy(name, false, inside_with);
unresolved_.Add(proxy);
return proxy;
@ -292,6 +323,7 @@ void Scope::RemoveUnresolved(VariableProxy* var) {
Variable* Scope::NewTemporary(Handle<String> name) {
ASSERT(!resolved());
Variable* var =
new Variable(this, name, Variable::TEMPORARY, true, Variable::NORMAL);
temps_.Add(var);
@ -550,7 +582,7 @@ Variable* Scope::NonLocal(Handle<String> name, Variable::Mode mode) {
// Declare a new non-local.
var = map->Declare(NULL, name, mode, true, Variable::NORMAL);
// Allocate it by giving it a dynamic lookup.
var->rewrite_ = new Slot(var, Slot::LOOKUP, -1);
var->set_rewrite(new Slot(var, Slot::LOOKUP, -1));
}
return var;
}
@ -612,8 +644,9 @@ Variable* Scope::LookupRecursive(Handle<String> name,
ASSERT(var != NULL);
// If this is a lookup from an inner scope, mark the variable.
if (inner_lookup)
var->is_accessed_from_inner_scope_ = true;
if (inner_lookup) {
var->MarkAsAccessedFromInnerScope();
}
// If the variable we have found is just a guess, invalidate the
// result. If the found variable is local, record that fact so we
@ -753,7 +786,7 @@ bool Scope::MustAllocate(Variable* var) {
// via an eval() call. This is only possible if the variable has a
// visible name.
if ((var->is_this() || var->name()->length() > 0) &&
(var->is_accessed_from_inner_scope_ ||
(var->is_accessed_from_inner_scope() ||
scope_calls_eval_ || inner_scope_calls_eval_ ||
scope_contains_with_)) {
var->set_is_used(true);
@ -771,7 +804,7 @@ bool Scope::MustAllocateInContext(Variable* var) {
// context.
return
var->mode() != Variable::TEMPORARY &&
(var->is_accessed_from_inner_scope_ ||
(var->is_accessed_from_inner_scope() ||
scope_calls_eval_ || inner_scope_calls_eval_ ||
scope_contains_with_ || var->is_global());
}
@ -787,12 +820,12 @@ bool Scope::HasArgumentsParameter() {
void Scope::AllocateStackSlot(Variable* var) {
var->rewrite_ = new Slot(var, Slot::LOCAL, num_stack_slots_++);
var->set_rewrite(new Slot(var, Slot::LOCAL, num_stack_slots_++));
}
void Scope::AllocateHeapSlot(Variable* var) {
var->rewrite_ = new Slot(var, Slot::CONTEXT, num_heap_slots_++);
var->set_rewrite(new Slot(var, Slot::CONTEXT, num_heap_slots_++));
}
@ -857,7 +890,7 @@ void Scope::AllocateParameterLocals() {
// It is ok to set this only now, because arguments is a local
// variable that is allocated after the parameters have been
// allocated.
arguments_shadow_->is_accessed_from_inner_scope_ = true;
arguments_shadow_->MarkAsAccessedFromInnerScope();
}
Property* rewrite =
new Property(new VariableProxy(arguments_shadow_),
@ -865,7 +898,7 @@ void Scope::AllocateParameterLocals() {
RelocInfo::kNoPosition,
Property::SYNTHETIC);
rewrite->set_is_arguments_access(true);
var->rewrite_ = rewrite;
var->set_rewrite(rewrite);
}
}
@ -880,23 +913,23 @@ void Scope::AllocateParameterLocals() {
ASSERT(var->scope() == this);
if (MustAllocate(var)) {
if (MustAllocateInContext(var)) {
ASSERT(var->rewrite_ == NULL ||
ASSERT(var->rewrite() == NULL ||
(var->AsSlot() != NULL &&
var->AsSlot()->type() == Slot::CONTEXT));
if (var->rewrite_ == NULL) {
if (var->rewrite() == NULL) {
// Only set the heap allocation if the parameter has not
// been allocated yet.
AllocateHeapSlot(var);
}
} else {
ASSERT(var->rewrite_ == NULL ||
ASSERT(var->rewrite() == NULL ||
(var->AsSlot() != NULL &&
var->AsSlot()->type() == Slot::PARAMETER));
// Set the parameter index always, even if the parameter
// was seen before! (We need to access the actual parameter
// supplied for the last occurrence of a multiply declared
// parameter.)
var->rewrite_ = new Slot(var, Slot::PARAMETER, i);
var->set_rewrite(new Slot(var, Slot::PARAMETER, i));
}
}
}
@ -906,10 +939,10 @@ void Scope::AllocateParameterLocals() {
void Scope::AllocateNonParameterLocal(Variable* var) {
ASSERT(var->scope() == this);
ASSERT(var->rewrite_ == NULL ||
ASSERT(var->rewrite() == NULL ||
(!var->IsVariable(Factory::result_symbol())) ||
(var->AsSlot() == NULL || var->AsSlot()->type() != Slot::LOCAL));
if (var->rewrite_ == NULL && MustAllocate(var)) {
if (var->rewrite() == NULL && MustAllocate(var)) {
if (MustAllocateInContext(var)) {
AllocateHeapSlot(var);
} else {
@ -943,15 +976,18 @@ void Scope::AllocateNonParameterLocals() {
void Scope::AllocateVariablesRecursively() {
// The number of slots required for variables.
num_stack_slots_ = 0;
num_heap_slots_ = Context::MIN_CONTEXT_SLOTS;
// Allocate variables for inner scopes.
for (int i = 0; i < inner_scopes_.length(); i++) {
inner_scopes_[i]->AllocateVariablesRecursively();
}
// If scope is already resolved, we still need to allocate
// variables in inner scopes which might not had been resolved yet.
if (resolved()) return;
// The number of slots required for variables.
num_stack_slots_ = 0;
num_heap_slots_ = Context::MIN_CONTEXT_SLOTS;
// Allocate variables for this scope.
// Parameters must be allocated first, if any.
if (is_function_scope()) AllocateParameterLocals();

View File

@ -302,6 +302,14 @@ class Scope: public ZoneObject {
explicit Scope(Type type);
void InsertAfterScope(Scope* scope) {
inner_scopes_.Add(scope);
outer_scope_ = scope->outer_scope_;
outer_scope_->inner_scopes_.RemoveElement(scope);
outer_scope_->inner_scopes_.Add(this);
scope->outer_scope_ = this;
}
// Scope tree.
Scope* outer_scope_; // the immediately enclosing outer scope, or NULL
ZoneList<Scope*> inner_scopes_; // the immediately enclosed inner scopes
@ -355,6 +363,10 @@ class Scope: public ZoneObject {
int num_stack_slots_;
int num_heap_slots_;
// Serialized scopes support.
SerializedScopeInfo* scope_info_;
bool resolved() { return scope_info_ != NULL; }
// Create a non-local variable with a given name.
// These variables are looked up dynamically at runtime.
Variable* NonLocal(Handle<String> name, Variable::Mode mode);
@ -386,6 +398,33 @@ class Scope: public ZoneObject {
void AllocateNonParameterLocal(Variable* var);
void AllocateNonParameterLocals();
void AllocateVariablesRecursively();
private:
Scope(Scope* inner_scope, SerializedScopeInfo* scope_info);
void SetDefaults(Type type,
Scope* outer_scope,
SerializedScopeInfo* scope_info) {
outer_scope_ = outer_scope;
type_ = type;
scope_name_ = Factory::empty_symbol();
dynamics_ = NULL;
receiver_ = NULL;
function_ = NULL;
arguments_ = NULL;
arguments_shadow_ = NULL;
illegal_redecl_ = NULL;
scope_inside_with_ = false;
scope_contains_with_ = false;
scope_calls_eval_ = false;
outer_scope_calls_eval_ = false;
inner_scope_calls_eval_ = false;
outer_scope_is_eval_scope_ = false;
force_eager_compilation_ = false;
num_stack_slots_ = 0;
num_heap_slots_ = 0;
scope_info_ = scope_info;
}
};

View File

@ -98,6 +98,12 @@ bool Variable::IsStackLocal() const {
}
bool Variable::IsContextSlot() const {
Slot* s = AsSlot();
return s != NULL && s->type() == Slot::CONTEXT;
}
Variable::Variable(Scope* scope,
Handle<String> name,
Mode mode,

View File

@ -138,6 +138,9 @@ class Variable: public ZoneObject {
bool is_accessed_from_inner_scope() const {
return is_accessed_from_inner_scope_;
}
void MarkAsAccessedFromInnerScope() {
is_accessed_from_inner_scope_ = true;
}
bool is_used() { return is_used_; }
void set_is_used(bool flag) { is_used_ = flag; }
@ -148,6 +151,7 @@ class Variable: public ZoneObject {
bool IsStackAllocated() const;
bool IsParameter() const; // Includes 'this'.
bool IsStackLocal() const;
bool IsContextSlot() const;
bool is_dynamic() const {
return (mode_ == DYNAMIC ||
@ -175,6 +179,7 @@ class Variable: public ZoneObject {
}
Expression* rewrite() const { return rewrite_; }
void set_rewrite(Expression* expr) { rewrite_ = expr; }
StaticType* type() { return &type_; }
@ -197,8 +202,6 @@ class Variable: public ZoneObject {
// Code generation.
// rewrite_ is usually a Slot or a Property, but may be any expression.
Expression* rewrite_;
friend class Scope; // Has explicit access to rewrite_.
};

View File

@ -1102,6 +1102,11 @@ void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
}
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Abort("Unimplemented: %s", "DoLoadContextSlot");
}
void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
Abort("Unimplemented: %s", "DoLoadNamedField");
}
@ -1376,8 +1381,8 @@ void LCodeGen::DoCheckMap(LCheckMap* instr) {
}
void LCodeGen::LoadPrototype(Register result, Handle<JSObject> prototype) {
Abort("Unimplemented: %s", "LoadPrototype");
void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
Abort("Unimplemented: %s", "LoadHeapObject");
}

View File

@ -175,7 +175,7 @@ class LCodeGen BASE_EMBEDDED {
int arity,
LInstruction* instr);
void LoadPrototype(Register result, Handle<JSObject> prototype);
void LoadHeapObject(Register result, Handle<HeapObject> object);
void RegisterLazyDeoptimization(LInstruction* instr);
void RegisterEnvironmentForDeoptimization(LEnvironment* environment);

View File

@ -257,8 +257,13 @@ void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
}
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
stream->Add("(%d, %d)", context_chain_length(), slot_index());
}
void LCallKeyed::PrintDataTo(StringStream* stream) {
stream->Add("[ecx] #%d / ", arity());
stream->Add("[rcx] #%d / ", arity());
}
@ -1231,6 +1236,12 @@ LInstruction* LChunkBuilder::DoStoreGlobal(HStoreGlobal* instr) {
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
Abort("Unimplemented: %s", "DoLoadContextSlot");
return NULL;
}
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
Abort("Unimplemented: %s", "DoLoadNamedField");
return NULL;

View File

@ -90,6 +90,7 @@ class LCodeGen;
// LGlobalReceiver
// LGoto
// LLazyBailout
// LLoadContextSlot
// LLoadGlobal
// LMaterializedLiteral
// LArrayLiteral
@ -225,6 +226,7 @@ class LCodeGen;
V(ClassOfTestAndBranch) \
V(Label) \
V(LazyBailout) \
V(LoadContextSlot) \
V(LoadElements) \
V(LoadGlobal) \
V(LoadKeyedFastElement) \
@ -1349,6 +1351,20 @@ class LStoreGlobal: public LUnaryOperation<0> {
};
class LLoadContextSlot: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(LoadContextSlot, "load-context-slot")
DECLARE_HYDROGEN_ACCESSOR(LoadContextSlot)
int context_chain_length() const {
return hydrogen()->context_chain_length();
}
int slot_index() const { return hydrogen()->slot_index(); }
virtual void PrintDataTo(StringStream* stream);
};
class LPushArgument: public LUnaryOperation<0> {
public:
explicit LPushArgument(LOperand* argument) : LUnaryOperation<0>(argument) {}

View File

@ -29,6 +29,10 @@ prefix cctest
test-api/Bug*: FAIL
# The problem is that a code object can get a different optimizable flag
# in crankshaft after creation.
test-log/EquivalenceOfLoggingAndTraversal: SKIP
##############################################################################
# BUG(281): This test fails on some Linuxes.

45
test/mjsunit/closures.js Normal file
View File

@ -0,0 +1,45 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
function runner(f, expected) {
for (var i = 0; i < 1000000; i++) {
assertEquals(expected, f.call(this));
}
}
function test(n) {
function MyFunction() {
var result = n * 2 + arguments.length;
return result;
}
runner(MyFunction, n * 2);
}
test(1);
test(42);
test(239);

View File

@ -0,0 +1,51 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Verifies that closures in presence of eval work fine.
function withEval(expr, filter) {
function walk(v) {
for (var i in v) {
for (var i in v) {}
}
return filter(v);
}
var o = eval(expr);
return walk(o);
}
function makeTagInfoJSON(n) {
var a = new Array(n);
for (var i = 0; i < n; i++) a.push('{}');
return a;
}
var expr = '([' + makeTagInfoJSON(128).join(', ') + '])'
for (var n = 0; n < 300; n++) {
withEval(expr, function(a) { return a; });
}

View File

@ -49,7 +49,7 @@ function foo() {
return j; // Make sure that future optimizations don't eliminate j.
} catch(e) {
ok = true;
assertTrue(re.test(e));
assertTrue(re.test(e), 'e: ' + e);
}
assertTrue(ok);
}