Reland [Object.observe] Don't force normalization of elements for observed objects

Original Issue: https://codereview.chromium.org/29353003/

TBR=danno
BUG=v8:2946

Review URL: https://codereview.chromium.org/66933003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@17607 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
rafaelw@chromium.org 2013-11-08 19:12:43 +00:00
parent 6fdec542bb
commit ca8f947664
9 changed files with 30 additions and 49 deletions

View File

@ -1432,10 +1432,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ JumpIfSmi(receiver, &slow); __ JumpIfSmi(receiver, &slow);
// Get the map of the object. // Get the map of the object.
__ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
// Check that the receiver does not require access checks. We need // Check that the receiver does not require access checks and is not observed.
// to do this because this generic stub does not perform map checks. // The generic stub does not perform map checks or handle observed objects.
__ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset)); __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
__ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded)); __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
__ b(ne, &slow); __ b(ne, &slow);
// Check if the object is a JS array or not. // Check if the object is a JS array or not.
__ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset)); __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));

View File

@ -311,6 +311,7 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
Heap* heap, Object* receiver, Arguments* args, int first_added_arg) { Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
if (!receiver->IsJSArray()) return NULL; if (!receiver->IsJSArray()) return NULL;
JSArray* array = JSArray::cast(receiver); JSArray* array = JSArray::cast(receiver);
if (array->map()->is_observed()) return NULL;
HeapObject* elms = array->elements(); HeapObject* elms = array->elements();
Map* map = elms->map(); Map* map = elms->map();
if (map == heap->fixed_array_map()) { if (map == heap->fixed_array_map()) {

View File

@ -874,10 +874,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ JumpIfSmi(edx, &slow); __ JumpIfSmi(edx, &slow);
// Get the map from the receiver. // Get the map from the receiver.
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
// Check that the receiver does not require access checks. We need // Check that the receiver does not require access checks and is not observed.
// to do this because this generic stub does not perform map checks. // The generic stub does not perform map checks or handle observed objects.
__ test_b(FieldOperand(edi, Map::kBitFieldOffset), __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
1 << Map::kIsAccessCheckNeeded); 1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved);
__ j(not_zero, &slow); __ j(not_zero, &slow);
// Check that the key is a smi. // Check that the key is a smi.
__ JumpIfNotSmi(ecx, &slow); __ JumpIfNotSmi(ecx, &slow);

View File

@ -1354,10 +1354,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ JumpIfSmi(receiver, &slow); __ JumpIfSmi(receiver, &slow);
// Get the map of the object. // Get the map of the object.
__ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
// Check that the receiver does not require access checks. We need // Check that the receiver does not require access checks and is not observed.
// to do this because this generic stub does not perform map checks. // The generic stub does not perform map checks or handle observed objects.
__ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset)); __ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
__ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded)); __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded |
1 << Map::kIsObserved));
__ Branch(&slow, ne, t0, Operand(zero_reg)); __ Branch(&slow, ne, t0, Operand(zero_reg));
// Check if the object is a JS array or not. // Check if the object is a JS array or not.
__ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset)); __ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));

View File

@ -366,9 +366,6 @@ void Map::MapVerify() {
SLOW_ASSERT(transitions()->IsSortedNoDuplicates()); SLOW_ASSERT(transitions()->IsSortedNoDuplicates());
SLOW_ASSERT(transitions()->IsConsistentWithBackPointers(this)); SLOW_ASSERT(transitions()->IsConsistentWithBackPointers(this));
} }
ASSERT(!is_observed() || instance_type() < FIRST_JS_OBJECT_TYPE ||
instance_type() > LAST_JS_OBJECT_TYPE ||
has_slow_elements_kind() || has_external_array_elements());
} }

View File

@ -3649,16 +3649,13 @@ bool Map::owns_descriptors() {
} }
void Map::set_is_observed(bool is_observed) { void Map::set_has_instance_call_handler() {
ASSERT(instance_type() < FIRST_JS_OBJECT_TYPE || set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
instance_type() > LAST_JS_OBJECT_TYPE ||
has_slow_elements_kind() || has_external_array_elements());
set_bit_field3(IsObserved::update(bit_field3(), is_observed));
} }
bool Map::is_observed() { bool Map::has_instance_call_handler() {
return IsObserved::decode(bit_field3()); return HasInstanceCallHandler::decode(bit_field3());
} }

View File

@ -5614,12 +5614,6 @@ void JSObject::SetObserved(Handle<JSObject> object) {
if (object->map()->is_observed()) if (object->map()->is_observed())
return; return;
if (!object->HasExternalArrayElements()) {
// Go to dictionary mode, so that we don't skip map checks.
NormalizeElements(object);
ASSERT(!object->HasFastElements());
}
LookupResult result(isolate); LookupResult result(isolate);
object->map()->LookupTransition(*object, object->map()->LookupTransition(*object,
isolate->heap()->observed_symbol(), isolate->heap()->observed_symbol(),
@ -5633,7 +5627,7 @@ void JSObject::SetObserved(Handle<JSObject> object) {
new_map = Map::CopyForObserved(handle(object->map())); new_map = Map::CopyForObserved(handle(object->map()));
} else { } else {
new_map = Map::Copy(handle(object->map())); new_map = Map::Copy(handle(object->map()));
new_map->set_is_observed(true); new_map->set_is_observed();
} }
object->set_map(*new_map); object->set_map(*new_map);
} }
@ -6971,7 +6965,7 @@ Handle<Map> Map::CopyForObserved(Handle<Map> map) {
map->set_transitions(*transitions); map->set_transitions(*transitions);
new_map->set_is_observed(true); new_map->set_is_observed();
if (map->owns_descriptors()) { if (map->owns_descriptors()) {
new_map->InitializeDescriptors(map->instance_descriptors()); new_map->InitializeDescriptors(map->instance_descriptors());
@ -11226,7 +11220,6 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength(
Heap* heap = GetHeap(); Heap* heap = GetHeap();
// We should never end in here with a pixel or external array. // We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements()); ASSERT(!HasExternalArrayElements());
ASSERT(!map()->is_observed());
// Allocate a new fast elements backing store. // Allocate a new fast elements backing store.
FixedArray* new_elements; FixedArray* new_elements;
@ -11311,7 +11304,6 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
Heap* heap = GetHeap(); Heap* heap = GetHeap();
// We should never end in here with a pixel or external array. // We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements()); ASSERT(!HasExternalArrayElements());
ASSERT(!map()->is_observed());
FixedArrayBase* elems; FixedArrayBase* elems;
{ MaybeObject* maybe_obj = { MaybeObject* maybe_obj =
@ -11460,10 +11452,6 @@ MaybeObject* JSArray::SetElementsLength(Object* len) {
if (!new_length_handle->ToArrayIndex(&new_length)) if (!new_length_handle->ToArrayIndex(&new_length))
return Failure::InternalError(); return Failure::InternalError();
// Observed arrays should always be in dictionary mode;
// if they were in fast mode, the below is slower than necessary
// as it iterates over the array backing store multiple times.
ASSERT(self->HasDictionaryElements());
static const PropertyAttributes kNoAttrFilter = NONE; static const PropertyAttributes kNoAttrFilter = NONE;
int num_elements = self->NumberOfLocalElements(kNoAttrFilter); int num_elements = self->NumberOfLocalElements(kNoAttrFilter);
if (num_elements > 0) { if (num_elements > 0) {
@ -11474,6 +11462,8 @@ MaybeObject* JSArray::SetElementsLength(Object* len) {
} }
} else { } else {
// For sparse arrays, only iterate over existing elements. // For sparse arrays, only iterate over existing elements.
// TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over
// the to-be-removed indices twice.
Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements); Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements);
self->GetLocalElementKeys(*keys, kNoAttrFilter); self->GetLocalElementKeys(*keys, kNoAttrFilter);
while (num_elements-- > 0) { while (num_elements-- > 0) {
@ -12872,7 +12862,6 @@ MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) {
MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) { MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
ASSERT(!map()->is_observed());
ElementsKind from_kind = map()->elements_kind(); ElementsKind from_kind = map()->elements_kind();
if (IsFastHoleyElementsKind(from_kind)) { if (IsFastHoleyElementsKind(from_kind)) {

View File

@ -5664,7 +5664,7 @@ class Map: public HeapObject {
class FunctionWithPrototype: public BitField<bool, 23, 1> {}; class FunctionWithPrototype: public BitField<bool, 23, 1> {};
class DictionaryMap: public BitField<bool, 24, 1> {}; class DictionaryMap: public BitField<bool, 24, 1> {};
class OwnsDescriptors: public BitField<bool, 25, 1> {}; class OwnsDescriptors: public BitField<bool, 25, 1> {};
class IsObserved: public BitField<bool, 26, 1> {}; class HasInstanceCallHandler: public BitField<bool, 26, 1> {};
class Deprecated: public BitField<bool, 27, 1> {}; class Deprecated: public BitField<bool, 27, 1> {};
class IsFrozen: public BitField<bool, 28, 1> {}; class IsFrozen: public BitField<bool, 28, 1> {};
class IsUnstable: public BitField<bool, 29, 1> {}; class IsUnstable: public BitField<bool, 29, 1> {};
@ -5727,12 +5727,12 @@ class Map: public HeapObject {
} }
// Tells whether the instance has a call-as-function handler. // Tells whether the instance has a call-as-function handler.
inline void set_has_instance_call_handler() { inline void set_is_observed() {
set_bit_field(bit_field() | (1 << kHasInstanceCallHandler)); set_bit_field(bit_field() | (1 << kIsObserved));
} }
inline bool has_instance_call_handler() { inline bool is_observed() {
return ((1 << kHasInstanceCallHandler) & bit_field()) != 0; return ((1 << kIsObserved) & bit_field()) != 0;
} }
inline void set_is_extensible(bool value); inline void set_is_extensible(bool value);
@ -5741,10 +5741,6 @@ class Map: public HeapObject {
inline void set_elements_kind(ElementsKind elements_kind) { inline void set_elements_kind(ElementsKind elements_kind) {
ASSERT(elements_kind < kElementsKindCount); ASSERT(elements_kind < kElementsKindCount);
ASSERT(kElementsKindCount <= (1 << kElementsKindBitCount)); ASSERT(kElementsKindCount <= (1 << kElementsKindBitCount));
ASSERT(!is_observed() ||
elements_kind == DICTIONARY_ELEMENTS ||
elements_kind == NON_STRICT_ARGUMENTS_ELEMENTS ||
IsExternalArrayElementsKind(elements_kind));
set_bit_field2((bit_field2() & ~kElementsKindMask) | set_bit_field2((bit_field2() & ~kElementsKindMask) |
(elements_kind << kElementsKindShift)); (elements_kind << kElementsKindShift));
ASSERT(this->elements_kind() == elements_kind); ASSERT(this->elements_kind() == elements_kind);
@ -5997,8 +5993,8 @@ class Map: public HeapObject {
inline bool owns_descriptors(); inline bool owns_descriptors();
inline void set_owns_descriptors(bool is_shared); inline void set_owns_descriptors(bool is_shared);
inline bool is_observed(); inline bool has_instance_call_handler();
inline void set_is_observed(bool is_observed); inline void set_has_instance_call_handler();
inline void freeze(); inline void freeze();
inline bool is_frozen(); inline bool is_frozen();
inline void mark_unstable(); inline void mark_unstable();
@ -6257,7 +6253,7 @@ class Map: public HeapObject {
static const int kHasNamedInterceptor = 3; static const int kHasNamedInterceptor = 3;
static const int kHasIndexedInterceptor = 4; static const int kHasIndexedInterceptor = 4;
static const int kIsUndetectable = 5; static const int kIsUndetectable = 5;
static const int kHasInstanceCallHandler = 6; static const int kIsObserved = 6;
static const int kIsAccessCheckNeeded = 7; static const int kIsAccessCheckNeeded = 7;
// Bit positions for bit field 2 // Bit positions for bit field 2

View File

@ -749,10 +749,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ JumpIfSmi(rdx, &slow_with_tagged_index); __ JumpIfSmi(rdx, &slow_with_tagged_index);
// Get the map from the receiver. // Get the map from the receiver.
__ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
// Check that the receiver does not require access checks. We need // Check that the receiver does not require access checks and is not observed.
// to do this because this generic stub does not perform map checks. // The generic stub does not perform map checks or handle observed objects.
__ testb(FieldOperand(r9, Map::kBitFieldOffset), __ testb(FieldOperand(r9, Map::kBitFieldOffset),
Immediate(1 << Map::kIsAccessCheckNeeded)); Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
__ j(not_zero, &slow_with_tagged_index); __ j(not_zero, &slow_with_tagged_index);
// Check that the key is a smi. // Check that the key is a smi.
__ JumpIfNotSmi(rcx, &slow_with_tagged_index); __ JumpIfNotSmi(rcx, &slow_with_tagged_index);