Migrate instance of deprecated maps in HCheckMaps.

Currently only direct map checks are supported. Otherwise only polymorphic cases with a generic fallback behave properly, regular polymorphic cases still need to be adapted.

R=danno@chromium.org

Review URL: https://chromiumcodereview.appspot.com/21536003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16057 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
verwaest@chromium.org 2013-08-05 16:42:39 +00:00
parent 14239ab9fb
commit 9d9930ce99
16 changed files with 216 additions and 48 deletions

View File

@ -2013,10 +2013,16 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL; LOperand* value = NULL;
if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value()); if (!instr->CanOmitMapChecks()) {
LInstruction* result = new(zone()) LCheckMaps(value); value = UseRegisterAtStart(instr->value());
if (instr->CanOmitMapChecks()) return result; if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
return AssignEnvironment(result); }
LCheckMaps* result = new(zone()) LCheckMaps(value);
if (!instr->CanOmitMapChecks()) {
AssignEnvironment(result);
if (instr->has_migration_target()) return AssignPointerMap(result);
}
return result;
} }

View File

@ -5214,33 +5214,67 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
} }
void LCodeGen::DoCheckMapCommon(Register map_reg, void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
Handle<Map> map, {
LEnvironment* env) { PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
Label success; __ push(object);
__ CompareMap(map_reg, map, &success); CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
DeoptimizeIf(ne, env); __ StoreToSafepointRegisterSlot(scratch0(), r0);
__ bind(&success); }
__ tst(scratch0(), Operand(kSmiTagMask));
DeoptimizeIf(eq, instr->environment());
} }
void LCodeGen::DoCheckMaps(LCheckMaps* instr) { void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
class DeferredCheckMaps: public LDeferredCode {
public:
DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
: LDeferredCode(codegen), instr_(instr), object_(object) {
SetExit(check_maps());
}
virtual void Generate() {
codegen()->DoDeferredInstanceMigration(instr_, object_);
}
Label* check_maps() { return &check_maps_; }
virtual LInstruction* instr() { return instr_; }
private:
LCheckMaps* instr_;
Label check_maps_;
Register object_;
};
if (instr->hydrogen()->CanOmitMapChecks()) return; if (instr->hydrogen()->CanOmitMapChecks()) return;
Register map_reg = scratch0(); Register map_reg = scratch0();
LOperand* input = instr->value(); LOperand* input = instr->value();
ASSERT(input->IsRegister()); ASSERT(input->IsRegister());
Register reg = ToRegister(input); Register reg = ToRegister(input);
Label success;
SmallMapList* map_set = instr->hydrogen()->map_set(); SmallMapList* map_set = instr->hydrogen()->map_set();
__ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
DeferredCheckMaps* deferred = NULL;
if (instr->hydrogen()->has_migration_target()) {
deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
__ bind(deferred->check_maps());
}
Label success;
for (int i = 0; i < map_set->length() - 1; i++) { for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i); Handle<Map> map = map_set->at(i);
__ CompareMap(map_reg, map, &success); __ CompareMap(map_reg, map, &success);
__ b(eq, &success); __ b(eq, &success);
} }
Handle<Map> map = map_set->last(); Handle<Map> map = map_set->last();
DoCheckMapCommon(map_reg, map, instr->environment()); __ CompareMap(map_reg, map, &success);
if (instr->hydrogen()->has_migration_target()) {
__ b(ne, deferred->entry());
} else {
DeoptimizeIf(ne, instr->environment());
}
__ bind(&success); __ bind(&success);
} }

View File

@ -154,8 +154,7 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredAllocate(LAllocate* instr); void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check); Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoCheckMapCommon(Register map_reg, Handle<Map> map, LEnvironment* env);
// Parallel move support. // Parallel move support.
void DoParallelMove(LParallelMove* move); void DoParallelMove(LParallelMove* move);

View File

@ -2938,6 +2938,7 @@ HCheckMaps* HCheckMaps::New(Zone* zone,
HValue* typecheck) { HValue* typecheck) {
HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck); HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
check_map->map_set_.Add(map, zone); check_map->map_set_.Add(map, zone);
check_map->has_migration_target_ = map->is_migration_target();
if (map->CanOmitMapChecks() && if (map->CanOmitMapChecks() &&
value->IsConstant() && value->IsConstant() &&
HConstant::cast(value)->InstanceOf(map)) { HConstant::cast(value)->InstanceOf(map)) {

View File

@ -2564,6 +2564,7 @@ class HCheckMaps: public HTemplateInstruction<2> {
HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck); HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
for (int i = 0; i < maps->length(); i++) { for (int i = 0; i < maps->length(); i++) {
check_map->map_set_.Add(maps->at(i), zone); check_map->map_set_.Add(maps->at(i), zone);
check_map->has_migration_target_ |= maps->at(i)->is_migration_target();
} }
check_map->map_set_.Sort(); check_map->map_set_.Sort();
return check_map; return check_map;
@ -2582,6 +2583,10 @@ class HCheckMaps: public HTemplateInstruction<2> {
HValue* value() { return OperandAt(0); } HValue* value() { return OperandAt(0); }
SmallMapList* map_set() { return &map_set_; } SmallMapList* map_set() { return &map_set_; }
bool has_migration_target() {
return has_migration_target_;
}
virtual void FinalizeUniqueValueId(); virtual void FinalizeUniqueValueId();
DECLARE_CONCRETE_INSTRUCTION(CheckMaps) DECLARE_CONCRETE_INSTRUCTION(CheckMaps)
@ -2606,7 +2611,7 @@ class HCheckMaps: public HTemplateInstruction<2> {
// Clients should use one of the static New* methods above. // Clients should use one of the static New* methods above.
HCheckMaps(HValue* value, Zone *zone, HValue* typecheck) HCheckMaps(HValue* value, Zone *zone, HValue* typecheck)
: HTemplateInstruction<2>(value->type()), : HTemplateInstruction<2>(value->type()),
omit_(false), map_unique_ids_(0, zone) { omit_(false), has_migration_target_(false), map_unique_ids_(0, zone) {
SetOperandAt(0, value); SetOperandAt(0, value);
// Use the object value for the dependency if NULL is passed. // Use the object value for the dependency if NULL is passed.
// TODO(titzer): do GVN flags already express this dependency? // TODO(titzer): do GVN flags already express this dependency?
@ -2628,6 +2633,7 @@ class HCheckMaps: public HTemplateInstruction<2> {
} }
bool omit_; bool omit_;
bool has_migration_target_;
SmallMapList map_set_; SmallMapList map_set_;
ZoneList<UniqueValueId> map_unique_ids_; ZoneList<UniqueValueId> map_unique_ids_;
}; };

View File

@ -882,7 +882,7 @@ void LCodeGen::LoadContextFromDeferred(LOperand* context) {
} else if (context->IsConstantOperand()) { } else if (context->IsConstantOperand()) {
HConstant* constant = HConstant* constant =
chunk_->LookupConstant(LConstantOperand::cast(context)); chunk_->LookupConstant(LConstantOperand::cast(context));
__ LoadHeapObject(esi, Handle<Context>::cast(constant->handle())); __ LoadObject(esi, Handle<Object>::cast(constant->handle()));
} else { } else {
UNREACHABLE(); UNREACHABLE();
} }
@ -5793,31 +5793,68 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
} }
void LCodeGen::DoCheckMapCommon(Register reg, void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
Handle<Map> map, {
LInstruction* instr) { PushSafepointRegistersScope scope(this);
Label success; __ push(object);
__ CompareMap(reg, map, &success); __ xor_(esi, esi);
DeoptimizeIf(not_equal, instr->environment()); __ CallRuntimeSaveDoubles(Runtime::kMigrateInstance);
__ bind(&success); RecordSafepointWithRegisters(
instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
__ test(eax, Immediate(kSmiTagMask));
}
DeoptimizeIf(zero, instr->environment());
} }
void LCodeGen::DoCheckMaps(LCheckMaps* instr) { void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
class DeferredCheckMaps: public LDeferredCode {
public:
DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
: LDeferredCode(codegen), instr_(instr), object_(object) {
SetExit(check_maps());
}
virtual void Generate() {
codegen()->DoDeferredInstanceMigration(instr_, object_);
}
Label* check_maps() { return &check_maps_; }
virtual LInstruction* instr() { return instr_; }
private:
LCheckMaps* instr_;
Label check_maps_;
Register object_;
};
if (instr->hydrogen()->CanOmitMapChecks()) return; if (instr->hydrogen()->CanOmitMapChecks()) return;
LOperand* input = instr->value(); LOperand* input = instr->value();
ASSERT(input->IsRegister()); ASSERT(input->IsRegister());
Register reg = ToRegister(input); Register reg = ToRegister(input);
Label success;
SmallMapList* map_set = instr->hydrogen()->map_set(); SmallMapList* map_set = instr->hydrogen()->map_set();
DeferredCheckMaps* deferred = NULL;
if (instr->hydrogen()->has_migration_target()) {
deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
__ bind(deferred->check_maps());
}
Label success;
for (int i = 0; i < map_set->length() - 1; i++) { for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i); Handle<Map> map = map_set->at(i);
__ CompareMap(reg, map, &success); __ CompareMap(reg, map, &success);
__ j(equal, &success); __ j(equal, &success);
} }
Handle<Map> map = map_set->last(); Handle<Map> map = map_set->last();
DoCheckMapCommon(reg, map, instr); __ CompareMap(reg, map, &success);
if (instr->hydrogen()->has_migration_target()) {
__ j(not_equal, deferred->entry());
} else {
DeoptimizeIf(not_equal, instr->environment());
}
__ bind(&success); __ bind(&success);
} }

View File

@ -163,8 +163,7 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredAllocate(LAllocate* instr); void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check); Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoCheckMapCommon(Register reg, Handle<Map> map, LInstruction* instr);
// Parallel move support. // Parallel move support.
void DoParallelMove(LParallelMove* move); void DoParallelMove(LParallelMove* move);

View File

@ -2051,10 +2051,16 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL; LOperand* value = NULL;
if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value()); if (!instr->CanOmitMapChecks()) {
value = UseRegisterAtStart(instr->value());
if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
}
LCheckMaps* result = new(zone()) LCheckMaps(value); LCheckMaps* result = new(zone()) LCheckMaps(value);
if (instr->CanOmitMapChecks()) return result; if (!instr->CanOmitMapChecks()) {
return AssignEnvironment(result); AssignEnvironment(result);
if (instr->has_migration_target()) return AssignPointerMap(result);
}
return result;
} }

View File

@ -3617,6 +3617,17 @@ bool Map::is_deprecated() {
} }
void Map::set_migration_target(bool value) {
set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
}
bool Map::is_migration_target() {
if (!FLAG_track_fields) return false;
return IsMigrationTarget::decode(bit_field3());
}
void Map::freeze() { void Map::freeze() {
set_bit_field3(IsFrozen::update(bit_field3(), true)); set_bit_field3(IsFrozen::update(bit_field3(), true));
} }
@ -4215,7 +4226,20 @@ void Map::InitializeDescriptors(DescriptorArray* descriptors) {
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
SMI_ACCESSORS(Map, bit_field3, kBitField3Offset)
void Map::set_bit_field3(uint32_t bits) {
// Ensure the upper 2 bits have the same value by sign extending it. This is
// necessary to be able to use the 31st bit.
int value = bits << 1;
WRITE_FIELD(this, kBitField3Offset, Smi::FromInt(value >> 1));
}
uint32_t Map::bit_field3() {
Object* value = READ_FIELD(this, kBitField3Offset);
return Smi::cast(value)->value();
}
void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) { void Map::ClearTransitions(Heap* heap, WriteBarrierMode mode) {

View File

@ -2719,6 +2719,7 @@ MaybeObject* Map::GeneralizeRepresentation(int modify_index,
Handle<Map>(new_map); Handle<Map>(new_map);
return maybe_map; return maybe_map;
} }
new_map->set_migration_target(true);
} }
new_map->set_owns_descriptors(true); new_map->set_owns_descriptors(true);
@ -6517,6 +6518,7 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
result->set_is_shared(sharing == SHARED_NORMALIZED_MAP); result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
result->set_dictionary_map(true); result->set_dictionary_map(true);
result->set_migration_target(false);
#ifdef VERIFY_HEAP #ifdef VERIFY_HEAP
if (FLAG_verify_heap && result->is_shared()) { if (FLAG_verify_heap && result->is_shared()) {

View File

@ -5456,8 +5456,8 @@ class Map: public HeapObject {
inline void set_bit_field2(byte value); inline void set_bit_field2(byte value);
// Bit field 3. // Bit field 3.
inline int bit_field3(); inline uint32_t bit_field3();
inline void set_bit_field3(int value); inline void set_bit_field3(uint32_t bits);
class EnumLengthBits: public BitField<int, 0, 11> {}; class EnumLengthBits: public BitField<int, 0, 11> {};
class NumberOfOwnDescriptorsBits: public BitField<int, 11, 11> {}; class NumberOfOwnDescriptorsBits: public BitField<int, 11, 11> {};
@ -5469,6 +5469,7 @@ class Map: public HeapObject {
class Deprecated: public BitField<bool, 27, 1> {}; class Deprecated: public BitField<bool, 27, 1> {};
class IsFrozen: public BitField<bool, 28, 1> {}; class IsFrozen: public BitField<bool, 28, 1> {};
class IsUnstable: public BitField<bool, 29, 1> {}; class IsUnstable: public BitField<bool, 29, 1> {};
class IsMigrationTarget: public BitField<bool, 30, 1> {};
// Tells whether the object in the prototype property will be used // Tells whether the object in the prototype property will be used
// for instances created from this function. If the prototype // for instances created from this function. If the prototype
@ -5775,6 +5776,8 @@ class Map: public HeapObject {
inline bool is_frozen(); inline bool is_frozen();
inline void mark_unstable(); inline void mark_unstable();
inline bool is_stable(); inline bool is_stable();
inline void set_migration_target(bool value);
inline bool is_migration_target();
inline void deprecate(); inline void deprecate();
inline bool is_deprecated(); inline bool is_deprecated();
inline bool CanBeDeprecated(); inline bool CanBeDeprecated();

View File

@ -13685,6 +13685,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FlattenString) {
} }
RUNTIME_FUNCTION(MaybeObject*, Runtime_MigrateInstance) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(Object, object, 0);
if (!object->IsJSObject()) return Smi::FromInt(0);
Handle<JSObject> js_object = Handle<JSObject>::cast(object);
if (!js_object->map()->is_deprecated()) return Smi::FromInt(0);
JSObject::MigrateInstance(js_object);
return *object;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) { RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
SealHandleScope shs(isolate); SealHandleScope shs(isolate);
// This is only called from codegen, so checks might be more lax. // This is only called from codegen, so checks might be more lax.

View File

@ -109,6 +109,7 @@ namespace internal {
F(DebugCallbackSupportsStepping, 1, 1) \ F(DebugCallbackSupportsStepping, 1, 1) \
F(DebugPrepareStepInIfStepping, 1, 1) \ F(DebugPrepareStepInIfStepping, 1, 1) \
F(FlattenString, 1, 1) \ F(FlattenString, 1, 1) \
F(MigrateInstance, 1, 1) \
\ \
/* Array join support */ \ /* Array join support */ \
F(PushIfAbsent, 2, 1) \ F(PushIfAbsent, 2, 1) \

View File

@ -4974,31 +4974,64 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
} }
void LCodeGen::DoCheckMapCommon(Register reg, void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
Handle<Map> map, {
LInstruction* instr) { PushSafepointRegistersScope scope(this);
Label success; __ push(object);
__ CompareMap(reg, map, &success); CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
DeoptimizeIf(not_equal, instr->environment()); __ testq(rax, Immediate(kSmiTagMask));
__ bind(&success); }
DeoptimizeIf(zero, instr->environment());
} }
void LCodeGen::DoCheckMaps(LCheckMaps* instr) { void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
class DeferredCheckMaps: public LDeferredCode {
public:
DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
: LDeferredCode(codegen), instr_(instr), object_(object) {
SetExit(check_maps());
}
virtual void Generate() {
codegen()->DoDeferredInstanceMigration(instr_, object_);
}
Label* check_maps() { return &check_maps_; }
virtual LInstruction* instr() { return instr_; }
private:
LCheckMaps* instr_;
Label check_maps_;
Register object_;
};
if (instr->hydrogen()->CanOmitMapChecks()) return; if (instr->hydrogen()->CanOmitMapChecks()) return;
LOperand* input = instr->value(); LOperand* input = instr->value();
ASSERT(input->IsRegister()); ASSERT(input->IsRegister());
Register reg = ToRegister(input); Register reg = ToRegister(input);
Label success;
SmallMapList* map_set = instr->hydrogen()->map_set(); SmallMapList* map_set = instr->hydrogen()->map_set();
DeferredCheckMaps* deferred = NULL;
if (instr->hydrogen()->has_migration_target()) {
deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
__ bind(deferred->check_maps());
}
Label success;
for (int i = 0; i < map_set->length() - 1; i++) { for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i); Handle<Map> map = map_set->at(i);
__ CompareMap(reg, map, &success); __ CompareMap(reg, map, &success);
__ j(equal, &success); __ j(equal, &success);
} }
Handle<Map> map = map_set->last(); Handle<Map> map = map_set->last();
DoCheckMapCommon(reg, map, instr); __ CompareMap(reg, map, &success);
if (instr->hydrogen()->has_migration_target()) {
__ j(not_equal, deferred->entry());
} else {
DeoptimizeIf(not_equal, instr->environment());
}
__ bind(&success); __ bind(&success);
} }

View File

@ -132,8 +132,7 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredAllocate(LAllocate* instr); void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check); Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoCheckMapCommon(Register reg, Handle<Map> map, LInstruction* instr);
// Parallel move support. // Parallel move support.
void DoParallelMove(LParallelMove* move); void DoParallelMove(LParallelMove* move);

View File

@ -1917,10 +1917,16 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) { LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL; LOperand* value = NULL;
if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value()); if (!instr->CanOmitMapChecks()) {
value = UseRegisterAtStart(instr->value());
if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
}
LCheckMaps* result = new(zone()) LCheckMaps(value); LCheckMaps* result = new(zone()) LCheckMaps(value);
if (instr->CanOmitMapChecks()) return result; if (!instr->CanOmitMapChecks()) {
return AssignEnvironment(result); AssignEnvironment(result);
if (instr->has_migration_target()) return AssignPointerMap(result);
}
return result;
} }