Support fast case for-in in Crankshaft.
Only JSObject enumerables with enum cache (fast case properties, no interceptors, no enumerable properties on the prototype) are supported. HLoadKeyedGeneric with keys produced by for-in enumeration are recognized and rewritten into direct property load by index. For this enum-cache was extended to store property indices in a separate array (see handles.cc). New hydrogen instructions: - HForInPrepareMap: checks for-in fast case preconditions and returns map that contains enum-cache; - HForInCacheArray: extracts enum-cache array from the map; - HCheckMapValue: map check with HValue map instead of immediate; - HLoadFieldByIndex: load fast property by it's index, positive indexes denote in-object properties, negative - out of object properties; Changed hydrogen instructions: - HLoadKeyedFastElement: added hole check suppression for loads from internal FixedArrays that are knows to have no holes inside. R=fschneider@chromium.org BUG= TEST= Review URL: https://chromiumcodereview.appspot.com/9425045 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10794 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
0f348c9208
commit
6703dddac4
@ -934,6 +934,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ cmp(r0, null_value);
|
||||
__ b(eq, &exit);
|
||||
|
||||
PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
|
||||
|
||||
// Convert the object to a JS object.
|
||||
Label convert, done_convert;
|
||||
__ JumpIfSmi(r0, &convert);
|
||||
@ -955,48 +957,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
|
||||
// guarantee cache validity, call the runtime system to check cache
|
||||
// validity or get the property names in a fixed array.
|
||||
Label next;
|
||||
// Preload a couple of values used in the loop.
|
||||
Register empty_fixed_array_value = r6;
|
||||
__ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
|
||||
Register empty_descriptor_array_value = r7;
|
||||
__ LoadRoot(empty_descriptor_array_value,
|
||||
Heap::kEmptyDescriptorArrayRootIndex);
|
||||
__ mov(r1, r0);
|
||||
__ bind(&next);
|
||||
|
||||
// Check that there are no elements. Register r1 contains the
|
||||
// current JS object we've reached through the prototype chain.
|
||||
__ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
|
||||
__ cmp(r2, empty_fixed_array_value);
|
||||
__ b(ne, &call_runtime);
|
||||
|
||||
// Check that instance descriptors are not empty so that we can
|
||||
// check for an enum cache. Leave the map in r2 for the subsequent
|
||||
// prototype load.
|
||||
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
|
||||
__ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOrBitField3Offset));
|
||||
__ JumpIfSmi(r3, &call_runtime);
|
||||
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (r3). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
__ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
|
||||
__ JumpIfSmi(r3, &call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
Label check_prototype;
|
||||
__ cmp(r1, r0);
|
||||
__ b(eq, &check_prototype);
|
||||
__ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
__ cmp(r3, empty_fixed_array_value);
|
||||
__ b(ne, &call_runtime);
|
||||
|
||||
// Load the prototype from the map and loop if non-null.
|
||||
__ bind(&check_prototype);
|
||||
__ ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
|
||||
__ cmp(r1, null_value);
|
||||
__ b(ne, &next);
|
||||
__ CheckEnumCache(null_value, &call_runtime);
|
||||
|
||||
// The enum cache is valid. Load the map of the object being
|
||||
// iterated over and use the cache for the iteration.
|
||||
@ -1049,6 +1010,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ Push(r1, r0); // Fixed array length (as smi) and initial index.
|
||||
|
||||
// Generate code for doing the condition check.
|
||||
PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
|
||||
__ bind(&loop);
|
||||
// Load the current count to r0, load the length to r1.
|
||||
__ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
|
||||
@ -1092,7 +1054,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ mov(result_register(), r3);
|
||||
// Perform the assignment as if via '='.
|
||||
{ EffectContext context(this);
|
||||
EmitAssignment(stmt->each(), stmt->AssignmentId());
|
||||
EmitAssignment(stmt->each());
|
||||
}
|
||||
|
||||
// Generate code for the body of the loop.
|
||||
@ -1113,6 +1075,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ Drop(5);
|
||||
|
||||
// Exit and decrement the loop depth.
|
||||
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
|
||||
__ bind(&exit);
|
||||
decrement_loop_depth();
|
||||
}
|
||||
@ -1876,7 +1839,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
|
||||
void FullCodeGenerator::EmitAssignment(Expression* expr) {
|
||||
// Invalid left-hand sides are rewritten to have a 'throw
|
||||
// ReferenceError' on the left-hand side.
|
||||
if (!expr->IsValidLeftHandSide()) {
|
||||
@ -1928,7 +1891,6 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
PrepareForBailoutForId(bailout_ast_id, TOS_REG);
|
||||
context()->Plug(r0);
|
||||
}
|
||||
|
||||
|
@ -2268,4 +2268,32 @@ LInstruction* LChunkBuilder::DoIn(HIn* instr) {
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
|
||||
LOperand* object = UseFixed(instr->enumerable(), r0);
|
||||
LForInPrepareMap* result = new LForInPrepareMap(object);
|
||||
return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
|
||||
LOperand* map = UseRegister(instr->map());
|
||||
return AssignEnvironment(DefineAsRegister(
|
||||
new LForInCacheArray(map)));
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
|
||||
LOperand* value = UseRegisterAtStart(instr->value());
|
||||
LOperand* map = UseRegisterAtStart(instr->map());
|
||||
return AssignEnvironment(new LCheckMapValue(value, map));
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
|
||||
LOperand* object = UseRegister(instr->object());
|
||||
LOperand* index = UseRegister(instr->index());
|
||||
return DefineAsRegister(new LLoadFieldByIndex(object, index));
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
@ -172,7 +172,12 @@ class LCodeGen;
|
||||
V(TypeofIsAndBranch) \
|
||||
V(UnaryMathOperation) \
|
||||
V(UnknownOSRValue) \
|
||||
V(ValueOf)
|
||||
V(ValueOf) \
|
||||
V(ForInPrepareMap) \
|
||||
V(ForInCacheArray) \
|
||||
V(CheckMapValue) \
|
||||
V(LoadFieldByIndex)
|
||||
|
||||
|
||||
|
||||
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
|
||||
@ -2064,6 +2069,62 @@ class LIn: public LTemplateInstruction<1, 2, 0> {
|
||||
};
|
||||
|
||||
|
||||
class LForInPrepareMap: public LTemplateInstruction<1, 1, 0> {
|
||||
public:
|
||||
explicit LForInPrepareMap(LOperand* object) {
|
||||
inputs_[0] = object;
|
||||
}
|
||||
|
||||
LOperand* object() { return inputs_[0]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap, "for-in-prepare-map")
|
||||
};
|
||||
|
||||
|
||||
class LForInCacheArray: public LTemplateInstruction<1, 1, 0> {
|
||||
public:
|
||||
explicit LForInCacheArray(LOperand* map) {
|
||||
inputs_[0] = map;
|
||||
}
|
||||
|
||||
LOperand* map() { return inputs_[0]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray, "for-in-cache-array")
|
||||
|
||||
int idx() {
|
||||
return HForInCacheArray::cast(this->hydrogen_value())->idx();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class LCheckMapValue: public LTemplateInstruction<0, 2, 0> {
|
||||
public:
|
||||
LCheckMapValue(LOperand* value, LOperand* map) {
|
||||
inputs_[0] = value;
|
||||
inputs_[1] = map;
|
||||
}
|
||||
|
||||
LOperand* value() { return inputs_[0]; }
|
||||
LOperand* map() { return inputs_[1]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(CheckMapValue, "check-map-value")
|
||||
};
|
||||
|
||||
|
||||
class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
|
||||
public:
|
||||
LLoadFieldByIndex(LOperand* object, LOperand* index) {
|
||||
inputs_[0] = object;
|
||||
inputs_[1] = index;
|
||||
}
|
||||
|
||||
LOperand* object() { return inputs_[0]; }
|
||||
LOperand* index() { return inputs_[1]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex, "load-field-by-index")
|
||||
};
|
||||
|
||||
|
||||
class LChunkBuilder;
|
||||
class LChunk: public ZoneObject {
|
||||
public:
|
||||
|
@ -4850,6 +4850,88 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
|
||||
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
|
||||
__ cmp(r0, ip);
|
||||
DeoptimizeIf(eq, instr->environment());
|
||||
|
||||
Register null_value = r5;
|
||||
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
|
||||
__ cmp(r0, null_value);
|
||||
DeoptimizeIf(eq, instr->environment());
|
||||
|
||||
__ tst(r0, Operand(kSmiTagMask));
|
||||
DeoptimizeIf(eq, instr->environment());
|
||||
|
||||
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
||||
__ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
|
||||
DeoptimizeIf(le, instr->environment());
|
||||
|
||||
Label use_cache, call_runtime;
|
||||
__ CheckEnumCache(null_value, &call_runtime);
|
||||
|
||||
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
|
||||
__ b(&use_cache);
|
||||
|
||||
// Get the set of properties to enumerate.
|
||||
__ bind(&call_runtime);
|
||||
__ push(r0);
|
||||
CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
|
||||
|
||||
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
|
||||
__ LoadRoot(ip, Heap::kMetaMapRootIndex);
|
||||
__ cmp(r1, ip);
|
||||
DeoptimizeIf(ne, instr->environment());
|
||||
__ bind(&use_cache);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
|
||||
Register map = ToRegister(instr->map());
|
||||
Register result = ToRegister(instr->result());
|
||||
__ LoadInstanceDescriptors(map, result);
|
||||
__ ldr(result,
|
||||
FieldMemOperand(result, DescriptorArray::kEnumerationIndexOffset));
|
||||
__ ldr(result,
|
||||
FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
|
||||
__ cmp(result, Operand(0));
|
||||
DeoptimizeIf(eq, instr->environment());
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
|
||||
Register object = ToRegister(instr->value());
|
||||
Register map = ToRegister(instr->map());
|
||||
__ ldr(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
__ cmp(map, scratch0());
|
||||
DeoptimizeIf(ne, instr->environment());
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
|
||||
Register object = ToRegister(instr->object());
|
||||
Register index = ToRegister(instr->index());
|
||||
Register result = ToRegister(instr->result());
|
||||
Register scratch = scratch0();
|
||||
|
||||
Label out_of_object, done;
|
||||
__ cmp(index, Operand(0));
|
||||
__ b(lt, &out_of_object);
|
||||
|
||||
STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
|
||||
__ add(scratch, object, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
|
||||
__ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
|
||||
|
||||
__ b(&done);
|
||||
|
||||
__ bind(&out_of_object);
|
||||
__ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
|
||||
// Index is equal to negated out of object property index plus 1.
|
||||
__ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
|
||||
__ ldr(result, FieldMemOperand(scratch,
|
||||
FixedArray::kHeaderSize - kPointerSize));
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
|
||||
#undef __
|
||||
|
@ -3664,6 +3664,52 @@ void MacroAssembler::LoadInstanceDescriptors(Register map,
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
|
||||
Label next;
|
||||
// Preload a couple of values used in the loop.
|
||||
Register empty_fixed_array_value = r6;
|
||||
LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
|
||||
Register empty_descriptor_array_value = r7;
|
||||
LoadRoot(empty_descriptor_array_value,
|
||||
Heap::kEmptyDescriptorArrayRootIndex);
|
||||
mov(r1, r0);
|
||||
bind(&next);
|
||||
|
||||
// Check that there are no elements. Register r1 contains the
|
||||
// current JS object we've reached through the prototype chain.
|
||||
ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
|
||||
cmp(r2, empty_fixed_array_value);
|
||||
b(ne, call_runtime);
|
||||
|
||||
// Check that instance descriptors are not empty so that we can
|
||||
// check for an enum cache. Leave the map in r2 for the subsequent
|
||||
// prototype load.
|
||||
ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
|
||||
ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOrBitField3Offset));
|
||||
JumpIfSmi(r3, call_runtime);
|
||||
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (r3). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
|
||||
JumpIfSmi(r3, call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
Label check_prototype;
|
||||
cmp(r1, r0);
|
||||
b(eq, &check_prototype);
|
||||
ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
cmp(r3, empty_fixed_array_value);
|
||||
b(ne, call_runtime);
|
||||
|
||||
// Load the prototype from the map and loop if non-null.
|
||||
bind(&check_prototype);
|
||||
ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
|
||||
cmp(r1, null_value);
|
||||
b(ne, &next);
|
||||
}
|
||||
|
||||
|
||||
bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
|
||||
if (r1.is(r2)) return true;
|
||||
if (r1.is(r3)) return true;
|
||||
|
@ -1259,6 +1259,10 @@ class MacroAssembler: public Assembler {
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
// Expects object in r0 and returns map with validated enum cache
|
||||
// in r0. Assumes that any other register can be used as a scratch.
|
||||
void CheckEnumCache(Register null_value, Label* call_runtime);
|
||||
|
||||
private:
|
||||
void CallCFunctionHelper(Register function,
|
||||
int num_reg_arguments,
|
||||
|
12
src/ast.h
12
src/ast.h
@ -786,10 +786,10 @@ class ForInStatement: public IterationStatement {
|
||||
Expression* each() const { return each_; }
|
||||
Expression* enumerable() const { return enumerable_; }
|
||||
|
||||
// Bailout support.
|
||||
int AssignmentId() const { return assignment_id_; }
|
||||
virtual int ContinueId() const { return EntryId(); }
|
||||
virtual int StackCheckId() const { return EntryId(); }
|
||||
virtual int StackCheckId() const { return body_id_; }
|
||||
int BodyId() const { return body_id_; }
|
||||
int PrepareId() const { return prepare_id_; }
|
||||
|
||||
protected:
|
||||
template<class> friend class AstNodeFactory;
|
||||
@ -798,13 +798,15 @@ class ForInStatement: public IterationStatement {
|
||||
: IterationStatement(isolate, labels),
|
||||
each_(NULL),
|
||||
enumerable_(NULL),
|
||||
assignment_id_(GetNextId(isolate)) {
|
||||
body_id_(GetNextId(isolate)),
|
||||
prepare_id_(GetNextId(isolate)) {
|
||||
}
|
||||
|
||||
private:
|
||||
Expression* each_;
|
||||
Expression* enumerable_;
|
||||
int assignment_id_;
|
||||
int body_id_;
|
||||
int prepare_id_;
|
||||
};
|
||||
|
||||
|
||||
|
@ -495,7 +495,7 @@ class FullCodeGenerator: public AstVisitor {
|
||||
|
||||
// Assign to the given expression as if via '='. The right-hand-side value
|
||||
// is expected in the accumulator.
|
||||
void EmitAssignment(Expression* expr, int bailout_ast_id);
|
||||
void EmitAssignment(Expression* expr);
|
||||
|
||||
// Complete a variable assignment. The right-hand-side value is expected
|
||||
// in the accumulator.
|
||||
|
@ -711,26 +711,57 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
|
||||
isolate);
|
||||
}
|
||||
isolate->counters()->enum_cache_misses()->Increment();
|
||||
Handle<Map> map(object->map());
|
||||
int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
|
||||
|
||||
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
|
||||
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
|
||||
|
||||
Handle<FixedArray> indices;
|
||||
Handle<FixedArray> sort_array2;
|
||||
|
||||
if (cache_result) {
|
||||
indices = isolate->factory()->NewFixedArray(num_enum);
|
||||
sort_array2 = isolate->factory()->NewFixedArray(num_enum);
|
||||
}
|
||||
|
||||
Handle<DescriptorArray> descs =
|
||||
Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
|
||||
|
||||
for (int i = 0; i < descs->number_of_descriptors(); i++) {
|
||||
if (descs->IsProperty(i) && !descs->IsDontEnum(i)) {
|
||||
(*storage)->set(index, descs->GetKey(i));
|
||||
storage->set(index, descs->GetKey(i));
|
||||
PropertyDetails details(descs->GetDetails(i));
|
||||
(*sort_array)->set(index, Smi::FromInt(details.index()));
|
||||
sort_array->set(index, Smi::FromInt(details.index()));
|
||||
if (!indices.is_null()) {
|
||||
if (details.type() != FIELD) {
|
||||
indices = Handle<FixedArray>();
|
||||
sort_array2 = Handle<FixedArray>();
|
||||
} else {
|
||||
int field_index = Descriptor::IndexFromValue(descs->GetValue(i));
|
||||
if (field_index >= map->inobject_properties()) {
|
||||
field_index = -(field_index - map->inobject_properties() + 1);
|
||||
}
|
||||
indices->set(index, Smi::FromInt(field_index));
|
||||
sort_array2->set(index, Smi::FromInt(details.index()));
|
||||
}
|
||||
}
|
||||
index++;
|
||||
}
|
||||
}
|
||||
(*storage)->SortPairs(*sort_array, sort_array->length());
|
||||
storage->SortPairs(*sort_array, sort_array->length());
|
||||
if (!indices.is_null()) {
|
||||
indices->SortPairs(*sort_array2, sort_array2->length());
|
||||
}
|
||||
if (cache_result) {
|
||||
Handle<FixedArray> bridge_storage =
|
||||
isolate->factory()->NewFixedArray(
|
||||
DescriptorArray::kEnumCacheBridgeLength);
|
||||
DescriptorArray* desc = object->map()->instance_descriptors();
|
||||
desc->SetEnumCache(*bridge_storage, *storage);
|
||||
desc->SetEnumCache(*bridge_storage,
|
||||
*storage,
|
||||
indices.is_null() ? Object::cast(Smi::FromInt(0))
|
||||
: Object::cast(*indices));
|
||||
}
|
||||
ASSERT(storage->length() == index);
|
||||
return storage;
|
||||
|
@ -786,6 +786,33 @@ void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
|
||||
}
|
||||
|
||||
|
||||
void HCheckMapValue::PrintDataTo(StringStream* stream) {
|
||||
value()->PrintNameTo(stream);
|
||||
stream->Add(" ");
|
||||
map()->PrintNameTo(stream);
|
||||
}
|
||||
|
||||
|
||||
void HForInPrepareMap::PrintDataTo(StringStream* stream) {
|
||||
enumerable()->PrintNameTo(stream);
|
||||
}
|
||||
|
||||
|
||||
void HForInCacheArray::PrintDataTo(StringStream* stream) {
|
||||
enumerable()->PrintNameTo(stream);
|
||||
stream->Add(" ");
|
||||
map()->PrintNameTo(stream);
|
||||
stream->Add("[%d]", idx_);
|
||||
}
|
||||
|
||||
|
||||
void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
|
||||
object()->PrintNameTo(stream);
|
||||
stream->Add(" ");
|
||||
index()->PrintNameTo(stream);
|
||||
}
|
||||
|
||||
|
||||
HValue* HConstant::Canonicalize() {
|
||||
return HasNoUses() && !IsBlockEntry() ? NULL : this;
|
||||
}
|
||||
@ -1519,10 +1546,15 @@ void HLoadKeyedFastElement::PrintDataTo(StringStream* stream) {
|
||||
|
||||
|
||||
bool HLoadKeyedFastElement::RequiresHoleCheck() {
|
||||
if (hole_check_mode_ == OMIT_HOLE_CHECK) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
|
||||
HValue* use = it.value();
|
||||
if (!use->IsChange()) return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1543,6 +1575,39 @@ void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) {
|
||||
}
|
||||
|
||||
|
||||
HValue* HLoadKeyedGeneric::Canonicalize() {
|
||||
// Recognize generic keyed loads that use property name generated
|
||||
// by for-in statement as a key and rewrite them into fast property load
|
||||
// by index.
|
||||
if (key()->IsLoadKeyedFastElement()) {
|
||||
HLoadKeyedFastElement* key_load = HLoadKeyedFastElement::cast(key());
|
||||
if (key_load->object()->IsForInCacheArray()) {
|
||||
HForInCacheArray* names_cache =
|
||||
HForInCacheArray::cast(key_load->object());
|
||||
|
||||
if (names_cache->enumerable() == object()) {
|
||||
HForInCacheArray* index_cache =
|
||||
names_cache->index_cache();
|
||||
HCheckMapValue* map_check =
|
||||
new(block()->zone()) HCheckMapValue(object(), names_cache->map());
|
||||
HInstruction* index = new(block()->zone()) HLoadKeyedFastElement(
|
||||
index_cache,
|
||||
key_load->key(),
|
||||
HLoadKeyedFastElement::OMIT_HOLE_CHECK);
|
||||
HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex(
|
||||
object(), index);
|
||||
map_check->InsertBefore(this);
|
||||
index->InsertBefore(this);
|
||||
load->InsertBefore(this);
|
||||
return load;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
void HLoadKeyedSpecializedArrayElement::PrintDataTo(
|
||||
StringStream* stream) {
|
||||
external_pointer()->PrintNameTo(stream);
|
||||
|
@ -180,7 +180,11 @@ class LChunkBuilder;
|
||||
V(UnaryMathOperation) \
|
||||
V(UnknownOSRValue) \
|
||||
V(UseConst) \
|
||||
V(ValueOf)
|
||||
V(ValueOf) \
|
||||
V(ForInPrepareMap) \
|
||||
V(ForInCacheArray) \
|
||||
V(CheckMapValue) \
|
||||
V(LoadFieldByIndex)
|
||||
|
||||
#define GVN_FLAG_LIST(V) \
|
||||
V(Calls) \
|
||||
@ -2011,7 +2015,8 @@ class HLoadExternalArrayPointer: public HUnaryOperation {
|
||||
|
||||
class HCheckMap: public HTemplateInstruction<2> {
|
||||
public:
|
||||
HCheckMap(HValue* value, Handle<Map> map,
|
||||
HCheckMap(HValue* value,
|
||||
Handle<Map> map,
|
||||
HValue* typecheck = NULL,
|
||||
CompareMapMode mode = REQUIRE_EXACT_MAP)
|
||||
: map_(map),
|
||||
@ -3814,7 +3819,12 @@ class HLoadFunctionPrototype: public HUnaryOperation {
|
||||
|
||||
class HLoadKeyedFastElement: public HTemplateInstruction<2> {
|
||||
public:
|
||||
HLoadKeyedFastElement(HValue* obj, HValue* key) {
|
||||
enum HoleCheckMode { PERFORM_HOLE_CHECK, OMIT_HOLE_CHECK };
|
||||
|
||||
HLoadKeyedFastElement(HValue* obj,
|
||||
HValue* key,
|
||||
HoleCheckMode hole_check_mode = PERFORM_HOLE_CHECK)
|
||||
: hole_check_mode_(hole_check_mode) {
|
||||
SetOperandAt(0, obj);
|
||||
SetOperandAt(1, key);
|
||||
set_representation(Representation::Tagged());
|
||||
@ -3839,7 +3849,14 @@ class HLoadKeyedFastElement: public HTemplateInstruction<2> {
|
||||
DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastElement)
|
||||
|
||||
protected:
|
||||
virtual bool DataEquals(HValue* other) { return true; }
|
||||
virtual bool DataEquals(HValue* other) {
|
||||
if (!other->IsLoadKeyedFastElement()) return false;
|
||||
HLoadKeyedFastElement* other_load = HLoadKeyedFastElement::cast(other);
|
||||
return hole_check_mode_ == other_load->hole_check_mode_;
|
||||
}
|
||||
|
||||
private:
|
||||
HoleCheckMode hole_check_mode_;
|
||||
};
|
||||
|
||||
|
||||
@ -3943,6 +3960,8 @@ class HLoadKeyedGeneric: public HTemplateInstruction<3> {
|
||||
return Representation::Tagged();
|
||||
}
|
||||
|
||||
virtual HValue* Canonicalize();
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric)
|
||||
};
|
||||
|
||||
@ -4617,6 +4636,134 @@ class HIn: public HTemplateInstruction<3> {
|
||||
DECLARE_CONCRETE_INSTRUCTION(In)
|
||||
};
|
||||
|
||||
|
||||
class HCheckMapValue: public HTemplateInstruction<2> {
|
||||
public:
|
||||
HCheckMapValue(HValue* value,
|
||||
HValue* map) {
|
||||
SetOperandAt(0, value);
|
||||
SetOperandAt(1, map);
|
||||
set_representation(Representation::Tagged());
|
||||
SetFlag(kUseGVN);
|
||||
SetGVNFlag(kDependsOnMaps);
|
||||
SetGVNFlag(kDependsOnElementsKind);
|
||||
}
|
||||
|
||||
virtual Representation RequiredInputRepresentation(int index) {
|
||||
return Representation::Tagged();
|
||||
}
|
||||
|
||||
virtual void PrintDataTo(StringStream* stream);
|
||||
|
||||
virtual HType CalculateInferredType() {
|
||||
return HType::Tagged();
|
||||
}
|
||||
|
||||
HValue* value() { return OperandAt(0); }
|
||||
HValue* map() { return OperandAt(1); }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(CheckMapValue)
|
||||
|
||||
protected:
|
||||
virtual bool DataEquals(HValue* other) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class HForInPrepareMap : public HTemplateInstruction<2> {
|
||||
public:
|
||||
HForInPrepareMap(HValue* context,
|
||||
HValue* object) {
|
||||
SetOperandAt(0, context);
|
||||
SetOperandAt(1, object);
|
||||
set_representation(Representation::Tagged());
|
||||
SetAllSideEffects();
|
||||
}
|
||||
|
||||
virtual Representation RequiredInputRepresentation(int index) {
|
||||
return Representation::Tagged();
|
||||
}
|
||||
|
||||
HValue* context() { return OperandAt(0); }
|
||||
HValue* enumerable() { return OperandAt(1); }
|
||||
|
||||
virtual void PrintDataTo(StringStream* stream);
|
||||
|
||||
virtual HType CalculateInferredType() {
|
||||
return HType::Tagged();
|
||||
}
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap);
|
||||
};
|
||||
|
||||
|
||||
class HForInCacheArray : public HTemplateInstruction<2> {
|
||||
public:
|
||||
HForInCacheArray(HValue* enumerable,
|
||||
HValue* keys,
|
||||
int idx) : idx_(idx) {
|
||||
SetOperandAt(0, enumerable);
|
||||
SetOperandAt(1, keys);
|
||||
set_representation(Representation::Tagged());
|
||||
}
|
||||
|
||||
virtual Representation RequiredInputRepresentation(int index) {
|
||||
return Representation::Tagged();
|
||||
}
|
||||
|
||||
HValue* enumerable() { return OperandAt(0); }
|
||||
HValue* map() { return OperandAt(1); }
|
||||
int idx() { return idx_; }
|
||||
|
||||
HForInCacheArray* index_cache() {
|
||||
return index_cache_;
|
||||
}
|
||||
|
||||
void set_index_cache(HForInCacheArray* index_cache) {
|
||||
index_cache_ = index_cache;
|
||||
}
|
||||
|
||||
virtual void PrintDataTo(StringStream* stream);
|
||||
|
||||
virtual HType CalculateInferredType() {
|
||||
return HType::Tagged();
|
||||
}
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray);
|
||||
|
||||
private:
|
||||
int idx_;
|
||||
HForInCacheArray* index_cache_;
|
||||
};
|
||||
|
||||
|
||||
class HLoadFieldByIndex : public HTemplateInstruction<2> {
|
||||
public:
|
||||
HLoadFieldByIndex(HValue* object,
|
||||
HValue* index) {
|
||||
SetOperandAt(0, object);
|
||||
SetOperandAt(1, index);
|
||||
set_representation(Representation::Tagged());
|
||||
}
|
||||
|
||||
virtual Representation RequiredInputRepresentation(int index) {
|
||||
return Representation::Tagged();
|
||||
}
|
||||
|
||||
HValue* object() { return OperandAt(0); }
|
||||
HValue* index() { return OperandAt(1); }
|
||||
|
||||
virtual void PrintDataTo(StringStream* stream);
|
||||
|
||||
virtual HType CalculateInferredType() {
|
||||
return HType::Tagged();
|
||||
}
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex);
|
||||
};
|
||||
|
||||
|
||||
#undef DECLARE_INSTRUCTION
|
||||
#undef DECLARE_CONCRETE_INSTRUCTION
|
||||
|
||||
|
123
src/hydrogen.cc
123
src/hydrogen.cc
@ -2744,12 +2744,20 @@ void HGraphBuilder::VisitIfStatement(IfStatement* stmt) {
|
||||
|
||||
HBasicBlock* HGraphBuilder::BreakAndContinueScope::Get(
|
||||
BreakableStatement* stmt,
|
||||
BreakType type) {
|
||||
BreakType type,
|
||||
int* drop_extra) {
|
||||
*drop_extra = 0;
|
||||
BreakAndContinueScope* current = this;
|
||||
while (current != NULL && current->info()->target() != stmt) {
|
||||
*drop_extra += current->info()->drop_extra();
|
||||
current = current->next();
|
||||
}
|
||||
ASSERT(current != NULL); // Always found (unless stack is malformed).
|
||||
|
||||
if (type == BREAK) {
|
||||
*drop_extra += current->info()->drop_extra();
|
||||
}
|
||||
|
||||
HBasicBlock* block = NULL;
|
||||
switch (type) {
|
||||
case BREAK:
|
||||
@ -2777,7 +2785,11 @@ void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) {
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
HBasicBlock* continue_block = break_scope()->Get(stmt->target(), CONTINUE);
|
||||
int drop_extra = 0;
|
||||
HBasicBlock* continue_block = break_scope()->Get(stmt->target(),
|
||||
CONTINUE,
|
||||
&drop_extra);
|
||||
Drop(drop_extra);
|
||||
current_block()->Goto(continue_block);
|
||||
set_current_block(NULL);
|
||||
}
|
||||
@ -2787,7 +2799,11 @@ void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
HBasicBlock* break_block = break_scope()->Get(stmt->target(), BREAK);
|
||||
int drop_extra = 0;
|
||||
HBasicBlock* break_block = break_scope()->Get(stmt->target(),
|
||||
BREAK,
|
||||
&drop_extra);
|
||||
Drop(drop_extra);
|
||||
current_block()->Goto(break_block);
|
||||
set_current_block(NULL);
|
||||
}
|
||||
@ -3148,7 +3164,6 @@ void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
|
||||
|
||||
BreakAndContinueInfo break_info(stmt);
|
||||
if (current_block() != NULL) {
|
||||
BreakAndContinueScope push(&break_info, this);
|
||||
CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
|
||||
}
|
||||
HBasicBlock* body_exit =
|
||||
@ -3193,7 +3208,6 @@ void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
|
||||
|
||||
BreakAndContinueInfo break_info(stmt);
|
||||
if (current_block() != NULL) {
|
||||
BreakAndContinueScope push(&break_info, this);
|
||||
CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
|
||||
}
|
||||
HBasicBlock* body_exit =
|
||||
@ -3218,7 +3232,104 @@ void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
return Bailout("ForInStatement");
|
||||
|
||||
if (!stmt->each()->IsVariableProxy() ||
|
||||
!stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
|
||||
return Bailout("ForInStatement with non-local each variable");
|
||||
}
|
||||
|
||||
Variable* each_var = stmt->each()->AsVariableProxy()->var();
|
||||
|
||||
CHECK_ALIVE(VisitForValue(stmt->enumerable()));
|
||||
HValue* enumerable = Top(); // Leave enumerable at the top.
|
||||
|
||||
HValue* context = environment()->LookupContext();
|
||||
|
||||
HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap(
|
||||
context, enumerable));
|
||||
AddSimulate(stmt->PrepareId());
|
||||
|
||||
HInstruction* array = AddInstruction(
|
||||
new(zone()) HForInCacheArray(
|
||||
enumerable,
|
||||
map,
|
||||
DescriptorArray::kEnumCacheBridgeCacheIndex));
|
||||
|
||||
HInstruction* array_length = AddInstruction(
|
||||
new(zone()) HFixedArrayBaseLength(array));
|
||||
|
||||
HInstruction* start_index = AddInstruction(new(zone()) HConstant(
|
||||
Handle<Object>(Smi::FromInt(0)), Representation::Integer32()));
|
||||
|
||||
Push(map);
|
||||
Push(array);
|
||||
Push(array_length);
|
||||
Push(start_index);
|
||||
|
||||
HInstruction* index_cache = AddInstruction(
|
||||
new(zone()) HForInCacheArray(
|
||||
enumerable,
|
||||
map,
|
||||
DescriptorArray::kEnumCacheBridgeIndicesCacheIndex));
|
||||
HForInCacheArray::cast(array)->set_index_cache(
|
||||
HForInCacheArray::cast(index_cache));
|
||||
|
||||
HBasicBlock* loop_entry = CreateLoopHeaderBlock();
|
||||
current_block()->Goto(loop_entry);
|
||||
set_current_block(loop_entry);
|
||||
|
||||
HValue* index = Top();
|
||||
|
||||
// Check that we still have more keys.
|
||||
HCompareIDAndBranch* compare_index =
|
||||
new(zone()) HCompareIDAndBranch(index, array_length, Token::LT);
|
||||
compare_index->SetInputRepresentation(Representation::Integer32());
|
||||
|
||||
HBasicBlock* loop_body = graph()->CreateBasicBlock();
|
||||
HBasicBlock* loop_successor = graph()->CreateBasicBlock();
|
||||
|
||||
compare_index->SetSuccessorAt(0, loop_body);
|
||||
compare_index->SetSuccessorAt(1, loop_successor);
|
||||
current_block()->Finish(compare_index);
|
||||
|
||||
set_current_block(loop_successor);
|
||||
Drop(5);
|
||||
|
||||
set_current_block(loop_body);
|
||||
|
||||
HValue* key = AddInstruction(
|
||||
new(zone()) HLoadKeyedFastElement(
|
||||
array, index, HLoadKeyedFastElement::OMIT_HOLE_CHECK));
|
||||
|
||||
// Check if the expected map still matches that of the enumerable.
|
||||
// If not just deoptimize.
|
||||
AddInstruction(new(zone()) HCheckMapValue(enumerable, map));
|
||||
|
||||
Bind(each_var, key);
|
||||
|
||||
BreakAndContinueInfo break_info(stmt, 5);
|
||||
CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
|
||||
|
||||
HBasicBlock* body_exit =
|
||||
JoinContinue(stmt, current_block(), break_info.continue_block());
|
||||
|
||||
if (body_exit != NULL) {
|
||||
set_current_block(body_exit);
|
||||
|
||||
HValue* current_index = Pop();
|
||||
PushAndAdd(
|
||||
new(zone()) HAdd(context, current_index, graph()->GetConstant1()));
|
||||
|
||||
body_exit = current_block();
|
||||
}
|
||||
|
||||
HBasicBlock* loop_exit = CreateLoop(stmt,
|
||||
loop_entry,
|
||||
body_exit,
|
||||
loop_successor,
|
||||
break_info.break_block());
|
||||
|
||||
set_current_block(loop_exit);
|
||||
}
|
||||
|
||||
|
||||
|
@ -705,8 +705,12 @@ class HGraphBuilder: public AstVisitor {
|
||||
// can have a separate lifetime.
|
||||
class BreakAndContinueInfo BASE_EMBEDDED {
|
||||
public:
|
||||
explicit BreakAndContinueInfo(BreakableStatement* target)
|
||||
: target_(target), break_block_(NULL), continue_block_(NULL) {
|
||||
explicit BreakAndContinueInfo(BreakableStatement* target,
|
||||
int drop_extra = 0)
|
||||
: target_(target),
|
||||
break_block_(NULL),
|
||||
continue_block_(NULL),
|
||||
drop_extra_(drop_extra) {
|
||||
}
|
||||
|
||||
BreakableStatement* target() { return target_; }
|
||||
@ -714,11 +718,13 @@ class HGraphBuilder: public AstVisitor {
|
||||
void set_break_block(HBasicBlock* block) { break_block_ = block; }
|
||||
HBasicBlock* continue_block() { return continue_block_; }
|
||||
void set_continue_block(HBasicBlock* block) { continue_block_ = block; }
|
||||
int drop_extra() { return drop_extra_; }
|
||||
|
||||
private:
|
||||
BreakableStatement* target_;
|
||||
HBasicBlock* break_block_;
|
||||
HBasicBlock* continue_block_;
|
||||
int drop_extra_;
|
||||
};
|
||||
|
||||
// A helper class to maintain a stack of current BreakAndContinueInfo
|
||||
@ -737,7 +743,7 @@ class HGraphBuilder: public AstVisitor {
|
||||
BreakAndContinueScope* next() { return next_; }
|
||||
|
||||
// Search the break stack for a break or continue target.
|
||||
HBasicBlock* Get(BreakableStatement* stmt, BreakType type);
|
||||
HBasicBlock* Get(BreakableStatement* stmt, BreakType type, int* drop_extra);
|
||||
|
||||
private:
|
||||
BreakAndContinueInfo* info_;
|
||||
|
@ -925,6 +925,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ cmp(eax, isolate()->factory()->null_value());
|
||||
__ j(equal, &exit);
|
||||
|
||||
PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
|
||||
|
||||
// Convert the object to a JS object.
|
||||
Label convert, done_convert;
|
||||
__ JumpIfSmi(eax, &convert, Label::kNear);
|
||||
@ -937,7 +939,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ push(eax);
|
||||
|
||||
// Check for proxies.
|
||||
Label call_runtime;
|
||||
Label call_runtime, use_cache, fixed_array;
|
||||
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
||||
__ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
|
||||
__ j(below_equal, &call_runtime);
|
||||
@ -946,61 +948,19 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
|
||||
// guarantee cache validity, call the runtime system to check cache
|
||||
// validity or get the property names in a fixed array.
|
||||
Label next;
|
||||
__ mov(ecx, eax);
|
||||
__ bind(&next);
|
||||
__ CheckEnumCache(&call_runtime);
|
||||
|
||||
// Check that there are no elements. Register ecx contains the
|
||||
// current JS object we've reached through the prototype chain.
|
||||
__ cmp(FieldOperand(ecx, JSObject::kElementsOffset),
|
||||
isolate()->factory()->empty_fixed_array());
|
||||
__ j(not_equal, &call_runtime);
|
||||
|
||||
// Check that instance descriptors are not empty so that we can
|
||||
// check for an enum cache. Leave the map in ebx for the subsequent
|
||||
// prototype load.
|
||||
__ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
|
||||
__ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
|
||||
__ JumpIfSmi(edx, &call_runtime);
|
||||
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (edx). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
__ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
|
||||
__ JumpIfSmi(edx, &call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
Label check_prototype;
|
||||
__ cmp(ecx, eax);
|
||||
__ j(equal, &check_prototype, Label::kNear);
|
||||
__ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
__ cmp(edx, isolate()->factory()->empty_fixed_array());
|
||||
__ j(not_equal, &call_runtime);
|
||||
|
||||
// Load the prototype from the map and loop if non-null.
|
||||
__ bind(&check_prototype);
|
||||
__ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
|
||||
__ cmp(ecx, isolate()->factory()->null_value());
|
||||
__ j(not_equal, &next);
|
||||
|
||||
// The enum cache is valid. Load the map of the object being
|
||||
// iterated over and use the cache for the iteration.
|
||||
Label use_cache;
|
||||
__ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
|
||||
__ jmp(&use_cache, Label::kNear);
|
||||
|
||||
// Get the set of properties to enumerate.
|
||||
__ bind(&call_runtime);
|
||||
__ push(eax); // Duplicate the enumerable object on the stack.
|
||||
__ push(eax);
|
||||
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
|
||||
|
||||
// If we got a map from the runtime call, we can do a fast
|
||||
// modification check. Otherwise, we got a fixed array, and we have
|
||||
// to do a slow check.
|
||||
Label fixed_array;
|
||||
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
|
||||
isolate()->factory()->meta_map());
|
||||
__ j(not_equal, &fixed_array, Label::kNear);
|
||||
__ j(not_equal, &fixed_array);
|
||||
|
||||
|
||||
// We got a map in register eax. Get the enumeration cache from it.
|
||||
__ bind(&use_cache);
|
||||
@ -1033,6 +993,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ push(Immediate(Smi::FromInt(0))); // Initial index.
|
||||
|
||||
// Generate code for doing the condition check.
|
||||
PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
|
||||
__ bind(&loop);
|
||||
__ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
|
||||
__ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
|
||||
@ -1075,7 +1036,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ mov(result_register(), ebx);
|
||||
// Perform the assignment as if via '='.
|
||||
{ EffectContext context(this);
|
||||
EmitAssignment(stmt->each(), stmt->AssignmentId());
|
||||
EmitAssignment(stmt->each());
|
||||
}
|
||||
|
||||
// Generate code for the body of the loop.
|
||||
@ -1094,6 +1055,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ add(esp, Immediate(5 * kPointerSize));
|
||||
|
||||
// Exit and decrement the loop depth.
|
||||
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
|
||||
__ bind(&exit);
|
||||
decrement_loop_depth();
|
||||
}
|
||||
@ -1854,7 +1816,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
|
||||
void FullCodeGenerator::EmitAssignment(Expression* expr) {
|
||||
// Invalid left-hand sides are rewritten to have a 'throw
|
||||
// ReferenceError' on the left-hand side.
|
||||
if (!expr->IsValidLeftHandSide()) {
|
||||
@ -1906,7 +1868,6 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
PrepareForBailoutForId(bailout_ast_id, TOS_REG);
|
||||
context()->Plug(eax);
|
||||
}
|
||||
|
||||
|
@ -4729,6 +4729,84 @@ void LCodeGen::DoIn(LIn* instr) {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
|
||||
__ cmp(eax, isolate()->factory()->undefined_value());
|
||||
DeoptimizeIf(equal, instr->environment());
|
||||
|
||||
__ cmp(eax, isolate()->factory()->null_value());
|
||||
DeoptimizeIf(equal, instr->environment());
|
||||
|
||||
__ test(eax, Immediate(kSmiTagMask));
|
||||
DeoptimizeIf(zero, instr->environment());
|
||||
|
||||
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
||||
__ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
|
||||
DeoptimizeIf(below_equal, instr->environment());
|
||||
|
||||
Label use_cache, call_runtime;
|
||||
__ CheckEnumCache(&call_runtime);
|
||||
|
||||
__ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
|
||||
__ jmp(&use_cache, Label::kNear);
|
||||
|
||||
// Get the set of properties to enumerate.
|
||||
__ bind(&call_runtime);
|
||||
__ push(eax);
|
||||
CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
|
||||
|
||||
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
|
||||
isolate()->factory()->meta_map());
|
||||
DeoptimizeIf(not_equal, instr->environment());
|
||||
__ bind(&use_cache);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
|
||||
Register map = ToRegister(instr->map());
|
||||
Register result = ToRegister(instr->result());
|
||||
__ LoadInstanceDescriptors(map, result);
|
||||
__ mov(result,
|
||||
FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
|
||||
__ mov(result,
|
||||
FieldOperand(result, FixedArray::SizeFor(instr->idx())));
|
||||
__ test(result, result);
|
||||
DeoptimizeIf(equal, instr->environment());
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
|
||||
Register object = ToRegister(instr->value());
|
||||
__ cmp(ToRegister(instr->map()),
|
||||
FieldOperand(object, HeapObject::kMapOffset));
|
||||
DeoptimizeIf(not_equal, instr->environment());
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
|
||||
Register object = ToRegister(instr->object());
|
||||
Register index = ToRegister(instr->index());
|
||||
|
||||
Label out_of_object, done;
|
||||
__ cmp(index, Immediate(0));
|
||||
__ j(less, &out_of_object);
|
||||
__ mov(object, FieldOperand(object,
|
||||
index,
|
||||
times_half_pointer_size,
|
||||
JSObject::kHeaderSize));
|
||||
__ jmp(&done, Label::kNear);
|
||||
|
||||
__ bind(&out_of_object);
|
||||
__ mov(object, FieldOperand(object, JSObject::kPropertiesOffset));
|
||||
__ neg(index);
|
||||
// Index is now equal to out of object property index plus 1.
|
||||
__ mov(object, FieldOperand(object,
|
||||
index,
|
||||
times_half_pointer_size,
|
||||
FixedArray::kHeaderSize - kPointerSize));
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
|
||||
#undef __
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
@ -2408,6 +2408,35 @@ LInstruction* LChunkBuilder::DoIn(HIn* instr) {
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
|
||||
LOperand* context = UseFixed(instr->context(), esi);
|
||||
LOperand* object = UseFixed(instr->enumerable(), eax);
|
||||
LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
|
||||
return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
|
||||
LOperand* map = UseRegister(instr->map());
|
||||
return AssignEnvironment(DefineAsRegister(
|
||||
new(zone()) LForInCacheArray(map)));
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
|
||||
LOperand* value = UseRegisterAtStart(instr->value());
|
||||
LOperand* map = UseRegisterAtStart(instr->map());
|
||||
return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
|
||||
LOperand* object = UseRegister(instr->object());
|
||||
LOperand* index = UseTempRegister(instr->index());
|
||||
return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_TARGET_ARCH_IA32
|
||||
|
@ -167,7 +167,11 @@ class LCodeGen;
|
||||
V(TypeofIsAndBranch) \
|
||||
V(UnaryMathOperation) \
|
||||
V(UnknownOSRValue) \
|
||||
V(ValueOf)
|
||||
V(ValueOf) \
|
||||
V(ForInPrepareMap) \
|
||||
V(ForInCacheArray) \
|
||||
V(CheckMapValue) \
|
||||
V(LoadFieldByIndex)
|
||||
|
||||
|
||||
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
|
||||
@ -2168,6 +2172,64 @@ class LIn: public LTemplateInstruction<1, 3, 0> {
|
||||
};
|
||||
|
||||
|
||||
class LForInPrepareMap: public LTemplateInstruction<1, 2, 0> {
|
||||
public:
|
||||
LForInPrepareMap(LOperand* context, LOperand* object) {
|
||||
inputs_[0] = context;
|
||||
inputs_[1] = object;
|
||||
}
|
||||
|
||||
LOperand* context() { return inputs_[0]; }
|
||||
LOperand* object() { return inputs_[1]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap, "for-in-prepare-map")
|
||||
};
|
||||
|
||||
|
||||
class LForInCacheArray: public LTemplateInstruction<1, 1, 0> {
|
||||
public:
|
||||
explicit LForInCacheArray(LOperand* map) {
|
||||
inputs_[0] = map;
|
||||
}
|
||||
|
||||
LOperand* map() { return inputs_[0]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray, "for-in-cache-array")
|
||||
|
||||
int idx() {
|
||||
return HForInCacheArray::cast(this->hydrogen_value())->idx();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class LCheckMapValue: public LTemplateInstruction<0, 2, 0> {
|
||||
public:
|
||||
LCheckMapValue(LOperand* value, LOperand* map) {
|
||||
inputs_[0] = value;
|
||||
inputs_[1] = map;
|
||||
}
|
||||
|
||||
LOperand* value() { return inputs_[0]; }
|
||||
LOperand* map() { return inputs_[1]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(CheckMapValue, "check-map-value")
|
||||
};
|
||||
|
||||
|
||||
class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
|
||||
public:
|
||||
LLoadFieldByIndex(LOperand* object, LOperand* index) {
|
||||
inputs_[0] = object;
|
||||
inputs_[1] = index;
|
||||
}
|
||||
|
||||
LOperand* object() { return inputs_[0]; }
|
||||
LOperand* index() { return inputs_[1]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex, "load-field-by-index")
|
||||
};
|
||||
|
||||
|
||||
class LChunkBuilder;
|
||||
class LChunk: public ZoneObject {
|
||||
public:
|
||||
|
@ -2776,6 +2776,46 @@ void MacroAssembler::EnsureNotWhite(
|
||||
bind(&done);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::CheckEnumCache(Label* call_runtime) {
|
||||
Label next;
|
||||
mov(ecx, eax);
|
||||
bind(&next);
|
||||
|
||||
// Check that there are no elements. Register ecx contains the
|
||||
// current JS object we've reached through the prototype chain.
|
||||
cmp(FieldOperand(ecx, JSObject::kElementsOffset),
|
||||
isolate()->factory()->empty_fixed_array());
|
||||
j(not_equal, call_runtime);
|
||||
|
||||
// Check that instance descriptors are not empty so that we can
|
||||
// check for an enum cache. Leave the map in ebx for the subsequent
|
||||
// prototype load.
|
||||
mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
|
||||
mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
|
||||
JumpIfSmi(edx, call_runtime);
|
||||
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (edx). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
|
||||
JumpIfSmi(edx, call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
Label check_prototype;
|
||||
cmp(ecx, eax);
|
||||
j(equal, &check_prototype, Label::kNear);
|
||||
mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
cmp(edx, isolate()->factory()->empty_fixed_array());
|
||||
j(not_equal, call_runtime);
|
||||
|
||||
// Load the prototype from the map and loop if non-null.
|
||||
bind(&check_prototype);
|
||||
mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
|
||||
cmp(ecx, isolate()->factory()->null_value());
|
||||
j(not_equal, &next);
|
||||
}
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_TARGET_ARCH_IA32
|
||||
|
@ -829,6 +829,10 @@ class MacroAssembler: public Assembler {
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
// Expects object in eax and returns map with validated enum cache
|
||||
// in eax. Assumes that any other register can be used as a scratch.
|
||||
void CheckEnumCache(Label* call_runtime);
|
||||
|
||||
private:
|
||||
bool generating_stub_;
|
||||
bool allow_stub_calls_;
|
||||
|
@ -5705,15 +5705,21 @@ MaybeObject* DescriptorArray::Allocate(int number_of_descriptors) {
|
||||
|
||||
|
||||
void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
|
||||
FixedArray* new_cache) {
|
||||
FixedArray* new_cache,
|
||||
Object* new_index_cache) {
|
||||
ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
|
||||
ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
|
||||
if (HasEnumCache()) {
|
||||
FixedArray::cast(get(kEnumerationIndexIndex))->
|
||||
set(kEnumCacheBridgeCacheIndex, new_cache);
|
||||
FixedArray::cast(get(kEnumerationIndexIndex))->
|
||||
set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
|
||||
} else {
|
||||
if (IsEmpty()) return; // Do nothing for empty descriptor array.
|
||||
FixedArray::cast(bridge_storage)->
|
||||
set(kEnumCacheBridgeCacheIndex, new_cache);
|
||||
FixedArray::cast(bridge_storage)->
|
||||
set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
|
||||
NoWriteBarrierSet(FixedArray::cast(bridge_storage),
|
||||
kEnumCacheBridgeEnumIndex,
|
||||
get(kEnumerationIndexIndex));
|
||||
|
@ -2421,7 +2421,9 @@ class DescriptorArray: public FixedArray {
|
||||
|
||||
// Initialize or change the enum cache,
|
||||
// using the supplied storage for the small "bridge".
|
||||
void SetEnumCache(FixedArray* bridge_storage, FixedArray* new_cache);
|
||||
void SetEnumCache(FixedArray* bridge_storage,
|
||||
FixedArray* new_cache,
|
||||
Object* new_index_cache);
|
||||
|
||||
// Accessors for fetching instance descriptor at descriptor number.
|
||||
inline String* GetKey(int descriptor_number);
|
||||
@ -2525,9 +2527,10 @@ class DescriptorArray: public FixedArray {
|
||||
static const int kFirstIndex = 3;
|
||||
|
||||
// The length of the "bridge" to the enum cache.
|
||||
static const int kEnumCacheBridgeLength = 2;
|
||||
static const int kEnumCacheBridgeLength = 3;
|
||||
static const int kEnumCacheBridgeEnumIndex = 0;
|
||||
static const int kEnumCacheBridgeCacheIndex = 1;
|
||||
static const int kEnumCacheBridgeIndicesCacheIndex = 2;
|
||||
|
||||
// Layout description.
|
||||
static const int kBitField3StorageOffset = FixedArray::kHeaderSize;
|
||||
|
@ -901,6 +901,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ cmpq(rax, null_value);
|
||||
__ j(equal, &exit);
|
||||
|
||||
PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
|
||||
|
||||
// Convert the object to a JS object.
|
||||
Label convert, done_convert;
|
||||
__ JumpIfSmi(rax, &convert);
|
||||
@ -922,47 +924,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
|
||||
// guarantee cache validity, call the runtime system to check cache
|
||||
// validity or get the property names in a fixed array.
|
||||
Label next;
|
||||
Register empty_fixed_array_value = r8;
|
||||
__ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
|
||||
Register empty_descriptor_array_value = r9;
|
||||
__ LoadRoot(empty_descriptor_array_value,
|
||||
Heap::kEmptyDescriptorArrayRootIndex);
|
||||
__ movq(rcx, rax);
|
||||
__ bind(&next);
|
||||
|
||||
// Check that there are no elements. Register rcx contains the
|
||||
// current JS object we've reached through the prototype chain.
|
||||
__ cmpq(empty_fixed_array_value,
|
||||
FieldOperand(rcx, JSObject::kElementsOffset));
|
||||
__ j(not_equal, &call_runtime);
|
||||
|
||||
// Check that instance descriptors are not empty so that we can
|
||||
// check for an enum cache. Leave the map in rbx for the subsequent
|
||||
// prototype load.
|
||||
__ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
|
||||
__ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
|
||||
__ JumpIfSmi(rdx, &call_runtime);
|
||||
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (rdx). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
__ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
|
||||
__ JumpIfSmi(rdx, &call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
Label check_prototype;
|
||||
__ cmpq(rcx, rax);
|
||||
__ j(equal, &check_prototype, Label::kNear);
|
||||
__ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
__ cmpq(rdx, empty_fixed_array_value);
|
||||
__ j(not_equal, &call_runtime);
|
||||
|
||||
// Load the prototype from the map and loop if non-null.
|
||||
__ bind(&check_prototype);
|
||||
__ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
|
||||
__ cmpq(rcx, null_value);
|
||||
__ j(not_equal, &next);
|
||||
__ CheckEnumCache(null_value, &call_runtime);
|
||||
|
||||
// The enum cache is valid. Load the map of the object being
|
||||
// iterated over and use the cache for the iteration.
|
||||
@ -1014,6 +976,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ Push(Smi::FromInt(0)); // Initial index.
|
||||
|
||||
// Generate code for doing the condition check.
|
||||
PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
|
||||
__ bind(&loop);
|
||||
__ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
|
||||
__ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
|
||||
@ -1059,7 +1022,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ movq(result_register(), rbx);
|
||||
// Perform the assignment as if via '='.
|
||||
{ EffectContext context(this);
|
||||
EmitAssignment(stmt->each(), stmt->AssignmentId());
|
||||
EmitAssignment(stmt->each());
|
||||
}
|
||||
|
||||
// Generate code for the body of the loop.
|
||||
@ -1078,6 +1041,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
__ addq(rsp, Immediate(5 * kPointerSize));
|
||||
|
||||
// Exit and decrement the loop depth.
|
||||
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
|
||||
__ bind(&exit);
|
||||
decrement_loop_depth();
|
||||
}
|
||||
@ -1801,7 +1765,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
||||
}
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
|
||||
void FullCodeGenerator::EmitAssignment(Expression* expr) {
|
||||
// Invalid left-hand sides are rewritten to have a 'throw
|
||||
// ReferenceError' on the left-hand side.
|
||||
if (!expr->IsValidLeftHandSide()) {
|
||||
@ -1853,7 +1817,6 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
PrepareForBailoutForId(bailout_ast_id, TOS_REG);
|
||||
context()->Plug(rax);
|
||||
}
|
||||
|
||||
|
@ -4461,6 +4461,88 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
|
||||
osr_pc_offset_ = masm()->pc_offset();
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
|
||||
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
|
||||
DeoptimizeIf(equal, instr->environment());
|
||||
|
||||
Register null_value = rdi;
|
||||
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
|
||||
__ cmpq(rax, null_value);
|
||||
DeoptimizeIf(equal, instr->environment());
|
||||
|
||||
Condition cc = masm()->CheckSmi(rax);
|
||||
DeoptimizeIf(cc, instr->environment());
|
||||
|
||||
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
|
||||
__ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
|
||||
DeoptimizeIf(below_equal, instr->environment());
|
||||
|
||||
Label use_cache, call_runtime;
|
||||
__ CheckEnumCache(null_value, &call_runtime);
|
||||
|
||||
__ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
|
||||
__ jmp(&use_cache, Label::kNear);
|
||||
|
||||
// Get the set of properties to enumerate.
|
||||
__ bind(&call_runtime);
|
||||
__ push(rax);
|
||||
CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
|
||||
|
||||
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
|
||||
Heap::kMetaMapRootIndex);
|
||||
DeoptimizeIf(not_equal, instr->environment());
|
||||
__ bind(&use_cache);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
|
||||
Register map = ToRegister(instr->map());
|
||||
Register result = ToRegister(instr->result());
|
||||
__ LoadInstanceDescriptors(map, result);
|
||||
__ movq(result,
|
||||
FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
|
||||
__ movq(result,
|
||||
FieldOperand(result, FixedArray::SizeFor(instr->idx())));
|
||||
Condition cc = masm()->CheckSmi(result);
|
||||
DeoptimizeIf(NegateCondition(cc), instr->environment());
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
|
||||
Register object = ToRegister(instr->value());
|
||||
__ cmpq(ToRegister(instr->map()),
|
||||
FieldOperand(object, HeapObject::kMapOffset));
|
||||
DeoptimizeIf(not_equal, instr->environment());
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
|
||||
Register object = ToRegister(instr->object());
|
||||
Register index = ToRegister(instr->index());
|
||||
|
||||
Label out_of_object, done;
|
||||
__ SmiToInteger32(index, index);
|
||||
__ cmpl(index, Immediate(0));
|
||||
__ j(less, &out_of_object);
|
||||
__ movq(object, FieldOperand(object,
|
||||
index,
|
||||
times_pointer_size,
|
||||
JSObject::kHeaderSize));
|
||||
__ jmp(&done, Label::kNear);
|
||||
|
||||
__ bind(&out_of_object);
|
||||
__ movq(object, FieldOperand(object, JSObject::kPropertiesOffset));
|
||||
__ negl(index);
|
||||
// Index is now equal to out of object property index plus 1.
|
||||
__ movq(object, FieldOperand(object,
|
||||
index,
|
||||
times_pointer_size,
|
||||
FixedArray::kHeaderSize - kPointerSize));
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
|
||||
#undef __
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
@ -2269,6 +2269,34 @@ LInstruction* LChunkBuilder::DoIn(HIn* instr) {
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
|
||||
LOperand* object = UseFixed(instr->enumerable(), rax);
|
||||
LForInPrepareMap* result = new LForInPrepareMap(object);
|
||||
return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
|
||||
LOperand* map = UseRegister(instr->map());
|
||||
return AssignEnvironment(DefineAsRegister(
|
||||
new LForInCacheArray(map)));
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
|
||||
LOperand* value = UseRegisterAtStart(instr->value());
|
||||
LOperand* map = UseRegisterAtStart(instr->map());
|
||||
return AssignEnvironment(new LCheckMapValue(value, map));
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
|
||||
LOperand* object = UseRegister(instr->object());
|
||||
LOperand* index = UseTempRegister(instr->index());
|
||||
return DefineSameAsFirst(new LLoadFieldByIndex(object, index));
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_TARGET_ARCH_X64
|
||||
|
@ -172,7 +172,11 @@ class LCodeGen;
|
||||
V(TypeofIsAndBranch) \
|
||||
V(UnaryMathOperation) \
|
||||
V(UnknownOSRValue) \
|
||||
V(ValueOf)
|
||||
V(ValueOf) \
|
||||
V(ForInPrepareMap) \
|
||||
V(ForInCacheArray) \
|
||||
V(CheckMapValue) \
|
||||
V(LoadFieldByIndex)
|
||||
|
||||
|
||||
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
|
||||
@ -2040,6 +2044,62 @@ class LStackCheck: public LTemplateInstruction<0, 0, 0> {
|
||||
};
|
||||
|
||||
|
||||
class LForInPrepareMap: public LTemplateInstruction<1, 1, 0> {
|
||||
public:
|
||||
explicit LForInPrepareMap(LOperand* object) {
|
||||
inputs_[0] = object;
|
||||
}
|
||||
|
||||
LOperand* object() { return inputs_[0]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap, "for-in-prepare-map")
|
||||
};
|
||||
|
||||
|
||||
class LForInCacheArray: public LTemplateInstruction<1, 1, 0> {
|
||||
public:
|
||||
explicit LForInCacheArray(LOperand* map) {
|
||||
inputs_[0] = map;
|
||||
}
|
||||
|
||||
LOperand* map() { return inputs_[0]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray, "for-in-cache-array")
|
||||
|
||||
int idx() {
|
||||
return HForInCacheArray::cast(this->hydrogen_value())->idx();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class LCheckMapValue: public LTemplateInstruction<0, 2, 0> {
|
||||
public:
|
||||
LCheckMapValue(LOperand* value, LOperand* map) {
|
||||
inputs_[0] = value;
|
||||
inputs_[1] = map;
|
||||
}
|
||||
|
||||
LOperand* value() { return inputs_[0]; }
|
||||
LOperand* map() { return inputs_[1]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(CheckMapValue, "check-map-value")
|
||||
};
|
||||
|
||||
|
||||
class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
|
||||
public:
|
||||
LLoadFieldByIndex(LOperand* object, LOperand* index) {
|
||||
inputs_[0] = object;
|
||||
inputs_[1] = index;
|
||||
}
|
||||
|
||||
LOperand* object() { return inputs_[0]; }
|
||||
LOperand* index() { return inputs_[1]; }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex, "load-field-by-index")
|
||||
};
|
||||
|
||||
|
||||
class LChunkBuilder;
|
||||
class LChunk: public ZoneObject {
|
||||
public:
|
||||
|
@ -4372,6 +4372,52 @@ void MacroAssembler::EnsureNotWhite(
|
||||
bind(&done);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
|
||||
Label next;
|
||||
Register empty_fixed_array_value = r8;
|
||||
LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
|
||||
Register empty_descriptor_array_value = r9;
|
||||
LoadRoot(empty_descriptor_array_value,
|
||||
Heap::kEmptyDescriptorArrayRootIndex);
|
||||
movq(rcx, rax);
|
||||
bind(&next);
|
||||
|
||||
// Check that there are no elements. Register rcx contains the
|
||||
// current JS object we've reached through the prototype chain.
|
||||
cmpq(empty_fixed_array_value,
|
||||
FieldOperand(rcx, JSObject::kElementsOffset));
|
||||
j(not_equal, call_runtime);
|
||||
|
||||
// Check that instance descriptors are not empty so that we can
|
||||
// check for an enum cache. Leave the map in rbx for the subsequent
|
||||
// prototype load.
|
||||
movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
|
||||
movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
|
||||
JumpIfSmi(rdx, call_runtime);
|
||||
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (rdx). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
|
||||
JumpIfSmi(rdx, call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
Label check_prototype;
|
||||
cmpq(rcx, rax);
|
||||
j(equal, &check_prototype, Label::kNear);
|
||||
movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
cmpq(rdx, empty_fixed_array_value);
|
||||
j(not_equal, call_runtime);
|
||||
|
||||
// Load the prototype from the map and loop if non-null.
|
||||
bind(&check_prototype);
|
||||
movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
|
||||
cmpq(rcx, null_value);
|
||||
j(not_equal, &next);
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_TARGET_ARCH_X64
|
||||
|
@ -1299,6 +1299,11 @@ class MacroAssembler: public Assembler {
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
void LeaveFrame(StackFrame::Type type);
|
||||
|
||||
// Expects object in rax and returns map with validated enum cache
|
||||
// in rax. Assumes that any other register can be used as a scratch.
|
||||
void CheckEnumCache(Register null_value,
|
||||
Label* call_runtime);
|
||||
|
||||
private:
|
||||
// Order general registers are pushed by Pushad.
|
||||
// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
|
||||
|
244
test/mjsunit/compiler/optimized-for-in.js
Normal file
244
test/mjsunit/compiler/optimized-for-in.js
Normal file
@ -0,0 +1,244 @@
|
||||
// Copyright 2012 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Flags: --allow-natives-syntax
|
||||
|
||||
// Test for-in support in Crankshaft. For simplicity this tests assumes certain
|
||||
// fixed iteration order for properties and will have to be adjusted if V8
|
||||
// stops following insertion order.
|
||||
|
||||
|
||||
function a(t) {
|
||||
var result = [];
|
||||
for (var i in t) {
|
||||
result.push(i + t[i]);
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Check that we correctly deoptimize on map check.
|
||||
function b(t) {
|
||||
var result = [];
|
||||
for (var i in t) {
|
||||
result.push(i + t[i]);
|
||||
delete t[i];
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Check that we correctly deoptimize during preparation step.
|
||||
function c(t) {
|
||||
var result = [];
|
||||
for (var i in t) {
|
||||
result.push(i + t[i]);
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Check that we deoptimize to the place after side effect in the right state.
|
||||
function d(t) {
|
||||
var result = [];
|
||||
var o;
|
||||
for (var i in (o = t())) {
|
||||
result.push(i + o[i]);
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Check that we correctly deoptimize on map check inserted for fused load.
|
||||
function e(t) {
|
||||
var result = [];
|
||||
for (var i in t) {
|
||||
delete t[i];
|
||||
t[i] = i;
|
||||
result.push(i + t[i]);
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Nested for-in loops.
|
||||
function f(t) {
|
||||
var result = [];
|
||||
for (var i in t) {
|
||||
for (var j in t) {
|
||||
result.push(i + j + t[i] + t[j]);
|
||||
}
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Deoptimization from the inner for-in loop.
|
||||
function g(t) {
|
||||
var result = [];
|
||||
for (var i in t) {
|
||||
for (var j in t) {
|
||||
result.push(i + j + t[i] + t[j]);
|
||||
var v = t[i];
|
||||
delete t[i];
|
||||
t[i] = v;
|
||||
}
|
||||
}
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
|
||||
// Break from the inner for-in loop.
|
||||
function h(t, deopt) {
|
||||
var result = [];
|
||||
for (var i in t) {
|
||||
for (var j in t) {
|
||||
result.push(i + j + t[i] + t[j]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
deopt.deopt;
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Continue in the inner loop.
|
||||
function j(t, deopt) {
|
||||
var result = [];
|
||||
for (var i in t) {
|
||||
for (var j in t) {
|
||||
result.push(i + j + t[i] + t[j]);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
deopt.deopt;
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Continue of the outer loop.
|
||||
function k(t, deopt) {
|
||||
var result = [];
|
||||
outer: for (var i in t) {
|
||||
for (var j in t) {
|
||||
result.push(i + j + t[i] + t[j]);
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
deopt.deopt;
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Break of the outer loop.
|
||||
function l(t, deopt) {
|
||||
var result = [];
|
||||
outer: for (var i in t) {
|
||||
for (var j in t) {
|
||||
result.push(i + j + t[i] + t[j]);
|
||||
break outer;
|
||||
}
|
||||
}
|
||||
deopt.deopt;
|
||||
return result.join('');
|
||||
}
|
||||
|
||||
// Test deoptimization from inlined frame (currently it is not inlined).
|
||||
function m0(t, deopt) {
|
||||
for (var i in t) {
|
||||
for (var j in t) {
|
||||
deopt.deopt;
|
||||
return i + j + t[i] + t[j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function m(t, deopt) {
|
||||
return m0(t, deopt);
|
||||
}
|
||||
|
||||
|
||||
function tryFunction(s, mkT, f) {
|
||||
var d = {deopt: false};
|
||||
assertEquals(s, f(mkT(), d));
|
||||
assertEquals(s, f(mkT(), d));
|
||||
assertEquals(s, f(mkT(), d));
|
||||
%OptimizeFunctionOnNextCall(f);
|
||||
assertEquals(s, f(mkT(), d));
|
||||
assertEquals(s, f(mkT(), {}));
|
||||
}
|
||||
|
||||
var s = "a1b2c3d4";
|
||||
function mkTable() { return { a: "1", b: "2", c: "3", d: "4" }; }
|
||||
|
||||
|
||||
tryFunction(s, mkTable, a);
|
||||
tryFunction(s, mkTable, b);
|
||||
tryFunction("0a1b2c3d", function () { return "abcd"; }, c);
|
||||
tryFunction("0a1b2c3d", function () {
|
||||
var cnt = false;
|
||||
return function () {
|
||||
cnt = true;
|
||||
return "abcd";
|
||||
}
|
||||
}, d);
|
||||
tryFunction("aabbccdd", mkTable, e);
|
||||
|
||||
function mkSmallTable() { return { a: "1", b: "2" }; }
|
||||
|
||||
tryFunction("aa11ab12ba21bb22", mkSmallTable, f);
|
||||
tryFunction("aa11ab12bb22ba21", mkSmallTable, g);
|
||||
tryFunction("aa11ba21", mkSmallTable, h);
|
||||
tryFunction("aa11ab12ba21bb22", mkSmallTable, j);
|
||||
tryFunction("aa11ba21", mkSmallTable, h);
|
||||
tryFunction("aa11ba21", mkSmallTable, k);
|
||||
tryFunction("aa11", mkSmallTable, l);
|
||||
tryFunction("aa11", mkSmallTable, m);
|
||||
|
||||
// Test handling of null.
|
||||
tryFunction("", function () {
|
||||
return function () { return null; }
|
||||
}, function (t) {
|
||||
for (var i in t()) { return i; }
|
||||
return "";
|
||||
});
|
||||
|
||||
// Test smis.
|
||||
tryFunction("", function () {
|
||||
return function () { return 11; }
|
||||
}, function (t) {
|
||||
for (var i in t()) { return i; }
|
||||
return "";
|
||||
});
|
||||
|
||||
// Test LoadFieldByIndex for out of object properties.
|
||||
function O() { this.a = 1; }
|
||||
for (var i = 0; i < 10; i++) new O();
|
||||
tryFunction("a1b2c3d4e5f6", function () {
|
||||
var o = new O();
|
||||
o.b = 2;
|
||||
o.c = 3;
|
||||
o.d = 4;
|
||||
o.e = 5;
|
||||
o.f = 6;
|
||||
return o;
|
||||
}, function (t) {
|
||||
var r = [];
|
||||
for (var i in t) r.push(i + t[i]);
|
||||
return r.join('');
|
||||
});
|
Loading…
Reference in New Issue
Block a user