Moved lastadded from DescriptorArray to Map. Renamed kLastAdded to kEnumCache.
Review URL: https://chromiumcodereview.appspot.com/10802034 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12146 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
8b70a7bc54
commit
52bfb2a18e
@ -1144,7 +1144,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
// We got a map in register r0. Get the enumeration cache from it.
|
||||
__ bind(&use_cache);
|
||||
__ LoadInstanceDescriptors(r0, r1, r2);
|
||||
__ ldr(r1, FieldMemOperand(r1, DescriptorArray::kLastAddedOffset));
|
||||
__ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumCacheOffset));
|
||||
__ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
|
||||
// Set up the four remaining stack slots.
|
||||
|
@ -5364,7 +5364,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
|
||||
Register scratch = ToRegister(instr->scratch());
|
||||
__ LoadInstanceDescriptors(map, result, scratch);
|
||||
__ ldr(result,
|
||||
FieldMemOperand(result, DescriptorArray::kLastAddedOffset));
|
||||
FieldMemOperand(result, DescriptorArray::kEnumCacheOffset));
|
||||
__ ldr(result,
|
||||
FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
|
||||
__ cmp(result, Operand(0));
|
||||
|
@ -3723,7 +3723,7 @@ void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (r3). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
ldr(r3, FieldMemOperand(r3, DescriptorArray::kLastAddedOffset));
|
||||
ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheOffset));
|
||||
JumpIfSmi(r3, call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
|
@ -906,7 +906,6 @@ void Factory::CopyAppendCallbackDescriptors(Handle<Map> map,
|
||||
|
||||
// Copy the descriptors from the array.
|
||||
if (0 < descriptor_count) {
|
||||
result->SetLastAdded(array->LastAdded());
|
||||
for (int i = 0; i < descriptor_count; i++) {
|
||||
result->CopyFrom(i, *array, i, witness);
|
||||
}
|
||||
@ -924,14 +923,14 @@ void Factory::CopyAppendCallbackDescriptors(Handle<Map> map,
|
||||
Handle<String> key =
|
||||
SymbolFromString(Handle<String>(String::cast(entry->name())));
|
||||
// Check if a descriptor with this name already exists before writing.
|
||||
if (LinearSearch(*result, *key, result->NumberOfSetDescriptors()) ==
|
||||
if (LinearSearch(*result, *key, map->NumberOfSetDescriptors()) ==
|
||||
DescriptorArray::kNotFound) {
|
||||
CallbacksDescriptor desc(*key, *entry, entry->property_attributes());
|
||||
map->AppendDescriptor(&desc, witness);
|
||||
}
|
||||
}
|
||||
|
||||
int new_number_of_descriptors = result->NumberOfSetDescriptors();
|
||||
int new_number_of_descriptors = map->NumberOfSetDescriptors();
|
||||
// Reinstall the original descriptor array if no new elements were added.
|
||||
if (new_number_of_descriptors == descriptor_count) {
|
||||
map->set_instance_descriptors(*array);
|
||||
@ -946,7 +945,6 @@ void Factory::CopyAppendCallbackDescriptors(Handle<Map> map,
|
||||
for (int i = 0; i < new_number_of_descriptors; i++) {
|
||||
new_result->CopyFrom(i, *result, i, witness);
|
||||
}
|
||||
new_result->SetLastAdded(result->LastAdded());
|
||||
map->set_instance_descriptors(*new_result);
|
||||
}
|
||||
}
|
||||
|
10
src/heap.cc
10
src/heap.cc
@ -2044,7 +2044,8 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
|
||||
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
|
||||
reinterpret_cast<Map*>(result)->set_bit_field(0);
|
||||
reinterpret_cast<Map*>(result)->set_bit_field2(0);
|
||||
reinterpret_cast<Map*>(result)->set_bit_field3(0);
|
||||
reinterpret_cast<Map*>(result)->set_bit_field3(
|
||||
Map::LastAddedBits::encode(Map::kNoneAdded));
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -2053,9 +2054,8 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
|
||||
int instance_size,
|
||||
ElementsKind elements_kind) {
|
||||
Object* result;
|
||||
{ MaybeObject* maybe_result = AllocateRawMap();
|
||||
if (!maybe_result->ToObject(&result)) return maybe_result;
|
||||
}
|
||||
MaybeObject* maybe_result = AllocateRawMap();
|
||||
if (!maybe_result->To(&result)) return maybe_result;
|
||||
|
||||
Map* map = reinterpret_cast<Map*>(result);
|
||||
map->set_map_no_write_barrier(meta_map());
|
||||
@ -2072,7 +2072,7 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
|
||||
map->set_unused_property_fields(0);
|
||||
map->set_bit_field(0);
|
||||
map->set_bit_field2(1 << Map::kIsExtensible);
|
||||
map->set_bit_field3(0);
|
||||
map->set_bit_field3(Map::LastAddedBits::encode(Map::kNoneAdded));
|
||||
map->set_elements_kind(elements_kind);
|
||||
|
||||
// If the map object is aligned fill the padding area with Smi 0 objects.
|
||||
|
@ -1094,7 +1094,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
// We got a map in register eax. Get the enumeration cache from it.
|
||||
__ bind(&use_cache);
|
||||
__ LoadInstanceDescriptors(eax, ecx);
|
||||
__ mov(ecx, FieldOperand(ecx, DescriptorArray::kLastAddedOffset));
|
||||
__ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
|
||||
__ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
|
||||
// Set up the four remaining stack slots.
|
||||
|
@ -5295,7 +5295,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
|
||||
Register result = ToRegister(instr->result());
|
||||
__ LoadInstanceDescriptors(map, result);
|
||||
__ mov(result,
|
||||
FieldOperand(result, DescriptorArray::kLastAddedOffset));
|
||||
FieldOperand(result, DescriptorArray::kEnumCacheOffset));
|
||||
__ mov(result,
|
||||
FieldOperand(result, FixedArray::SizeFor(instr->idx())));
|
||||
__ test(result, result);
|
||||
|
@ -2901,7 +2901,7 @@ void MacroAssembler::CheckEnumCache(Label* call_runtime) {
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (edx). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
mov(edx, FieldOperand(edx, DescriptorArray::kLastAddedOffset));
|
||||
mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheOffset));
|
||||
JumpIfSmi(edx, call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
|
@ -1516,10 +1516,10 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
|
||||
case CONSTANT_FUNCTION:
|
||||
return;
|
||||
case TRANSITION: {
|
||||
Map* value = lookup->GetTransitionTarget();
|
||||
Handle<Map> transition(Map::cast(value));
|
||||
Handle<Map> transition(lookup->GetTransitionTarget());
|
||||
int descriptor = transition->LastAdded();
|
||||
|
||||
DescriptorArray* target_descriptors = transition->instance_descriptors();
|
||||
int descriptor = target_descriptors->LastAdded();
|
||||
PropertyDetails details = target_descriptors->GetDetails(descriptor);
|
||||
|
||||
if (details.type() != FIELD || details.attributes() != NONE) return;
|
||||
@ -1980,9 +1980,9 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
|
||||
break;
|
||||
case TRANSITION: {
|
||||
Handle<Map> transition(lookup->GetTransitionTarget());
|
||||
int descriptor = transition->LastAdded();
|
||||
|
||||
DescriptorArray* target_descriptors = transition->instance_descriptors();
|
||||
int descriptor = target_descriptors->LastAdded();
|
||||
PropertyDetails details = target_descriptors->GetDetails(descriptor);
|
||||
|
||||
if (details.type() == FIELD && details.attributes() == NONE) {
|
||||
|
@ -1149,7 +1149,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
// We got a map in register v0. Get the enumeration cache from it.
|
||||
__ bind(&use_cache);
|
||||
__ LoadInstanceDescriptors(v0, a1, a2);
|
||||
__ lw(a1, FieldMemOperand(a1, DescriptorArray::kLastAddedOffset));
|
||||
__ lw(a1, FieldMemOperand(a1, DescriptorArray::kEnumCacheOffset));
|
||||
__ lw(a2, FieldMemOperand(a1, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
|
||||
// Set up the four remaining stack slots.
|
||||
|
@ -5170,7 +5170,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
|
||||
Register scratch = ToRegister(instr->scratch());
|
||||
__ LoadInstanceDescriptors(map, result, scratch);
|
||||
__ lw(result,
|
||||
FieldMemOperand(result, DescriptorArray::kLastAddedOffset));
|
||||
FieldMemOperand(result, DescriptorArray::kEnumCacheOffset));
|
||||
__ lw(result,
|
||||
FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
|
||||
DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
|
||||
|
@ -5339,7 +5339,7 @@ void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (a3). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
lw(a3, FieldMemOperand(a3, DescriptorArray::kLastAddedOffset));
|
||||
lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumCacheOffset));
|
||||
JumpIfSmi(a3, call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
|
@ -2954,16 +2954,12 @@ bool Map::has_non_instance_prototype() {
|
||||
|
||||
|
||||
void Map::set_function_with_prototype(bool value) {
|
||||
if (value) {
|
||||
set_bit_field3(bit_field3() | (1 << kFunctionWithPrototype));
|
||||
} else {
|
||||
set_bit_field3(bit_field3() & ~(1 << kFunctionWithPrototype));
|
||||
}
|
||||
set_bit_field3(FunctionWithPrototype::update(bit_field3(), value));
|
||||
}
|
||||
|
||||
|
||||
bool Map::function_with_prototype() {
|
||||
return ((1 << kFunctionWithPrototype) & bit_field3()) != 0;
|
||||
return FunctionWithPrototype::decode(bit_field3());
|
||||
}
|
||||
|
||||
|
||||
@ -3008,15 +3004,11 @@ bool Map::attached_to_shared_function_info() {
|
||||
|
||||
|
||||
void Map::set_is_shared(bool value) {
|
||||
if (value) {
|
||||
set_bit_field3(bit_field3() | (1 << kIsShared));
|
||||
} else {
|
||||
set_bit_field3(bit_field3() & ~(1 << kIsShared));
|
||||
}
|
||||
set_bit_field3(IsShared::update(bit_field3(), value));
|
||||
}
|
||||
|
||||
bool Map::is_shared() {
|
||||
return ((1 << kIsShared) & bit_field3()) != 0;
|
||||
return IsShared::decode(bit_field3());
|
||||
}
|
||||
|
||||
|
||||
@ -3522,17 +3514,17 @@ void Map::InitializeDescriptors(DescriptorArray* descriptors) {
|
||||
}
|
||||
#endif
|
||||
|
||||
set_instance_descriptors(descriptors);
|
||||
|
||||
for (int i = 0; i < len; ++i) {
|
||||
if (descriptors->GetDetails(i).index() == len) {
|
||||
descriptors->SetLastAdded(i);
|
||||
SetLastAdded(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ASSERT(len == 0 ||
|
||||
len == descriptors->GetDetails(descriptors->LastAdded()).index());
|
||||
|
||||
set_instance_descriptors(descriptors);
|
||||
len == descriptors->GetDetails(LastAdded()).index());
|
||||
}
|
||||
|
||||
|
||||
@ -3558,9 +3550,9 @@ void Map::ClearDescriptorArray(Heap* heap, WriteBarrierMode mode) {
|
||||
void Map::AppendDescriptor(Descriptor* desc,
|
||||
const DescriptorArray::WhitenessWitness& witness) {
|
||||
DescriptorArray* descriptors = instance_descriptors();
|
||||
int set_descriptors = descriptors->NumberOfSetDescriptors();
|
||||
int set_descriptors = NumberOfSetDescriptors();
|
||||
int new_last_added = descriptors->Append(desc, witness, set_descriptors);
|
||||
descriptors->SetLastAdded(new_last_added);
|
||||
SetLastAdded(new_last_added);
|
||||
}
|
||||
|
||||
|
||||
|
@ -2771,9 +2771,9 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
|
||||
strict_mode);
|
||||
case TRANSITION: {
|
||||
Map* transition_map = result->GetTransitionTarget();
|
||||
int descriptor = transition_map->LastAdded();
|
||||
|
||||
DescriptorArray* descriptors = transition_map->instance_descriptors();
|
||||
int descriptor = descriptors->LastAdded();
|
||||
PropertyDetails details = descriptors->GetDetails(descriptor);
|
||||
|
||||
if (details.type() == FIELD) {
|
||||
@ -2892,9 +2892,9 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
|
||||
return ConvertDescriptorToField(name, value, attributes);
|
||||
case TRANSITION: {
|
||||
Map* transition_map = result.GetTransitionTarget();
|
||||
int descriptor = transition_map->LastAdded();
|
||||
|
||||
DescriptorArray* descriptors = transition_map->instance_descriptors();
|
||||
int descriptor = descriptors->LastAdded();
|
||||
PropertyDetails details = descriptors->GetDetails(descriptor);
|
||||
|
||||
if (details.type() == FIELD) {
|
||||
@ -3080,17 +3080,16 @@ MaybeObject* NormalizedMapCache::Get(JSObject* obj,
|
||||
// except for the code cache, which can contain some ics which can be
|
||||
// applied to the shared map.
|
||||
Object* fresh;
|
||||
{ MaybeObject* maybe_fresh =
|
||||
fast->CopyNormalized(mode, SHARED_NORMALIZED_MAP);
|
||||
if (maybe_fresh->ToObject(&fresh)) {
|
||||
ASSERT(memcmp(Map::cast(fresh)->address(),
|
||||
Map::cast(result)->address(),
|
||||
Map::kCodeCacheOffset) == 0);
|
||||
int offset = Map::kCodeCacheOffset + kPointerSize;
|
||||
ASSERT(memcmp(Map::cast(fresh)->address() + offset,
|
||||
Map::cast(result)->address() + offset,
|
||||
Map::kSize - offset) == 0);
|
||||
}
|
||||
MaybeObject* maybe_fresh =
|
||||
fast->CopyNormalized(mode, SHARED_NORMALIZED_MAP);
|
||||
if (maybe_fresh->ToObject(&fresh)) {
|
||||
ASSERT(memcmp(Map::cast(fresh)->address(),
|
||||
Map::cast(result)->address(),
|
||||
Map::kCodeCacheOffset) == 0);
|
||||
int offset = Map::kCodeCacheOffset + kPointerSize;
|
||||
ASSERT(memcmp(Map::cast(fresh)->address() + offset,
|
||||
Map::cast(result)->address() + offset,
|
||||
Map::kSize - offset) == 0);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@ -4496,7 +4495,7 @@ MaybeObject* JSObject::DefineFastAccessor(String* name,
|
||||
// If there is a transition, try to follow it.
|
||||
if (result.IsFound()) {
|
||||
Map* target = result.GetTransitionTarget();
|
||||
int descriptor_number = target->instance_descriptors()->LastAdded();
|
||||
int descriptor_number = target->LastAdded();
|
||||
ASSERT(target->instance_descriptors()->GetKey(descriptor_number) == name);
|
||||
return TryAccessorTransition(
|
||||
this, target, descriptor_number, component, accessor, attributes);
|
||||
@ -4711,14 +4710,14 @@ MaybeObject* Map::CopyNormalized(PropertyNormalizationMode mode,
|
||||
}
|
||||
|
||||
Map* result;
|
||||
{ MaybeObject* maybe_result = RawCopy(new_instance_size);
|
||||
if (!maybe_result->To(&result)) return maybe_result;
|
||||
}
|
||||
MaybeObject* maybe_result = RawCopy(new_instance_size);
|
||||
if (!maybe_result->To(&result)) return maybe_result;
|
||||
|
||||
if (mode != CLEAR_INOBJECT_PROPERTIES) {
|
||||
result->set_inobject_properties(inobject_properties());
|
||||
}
|
||||
|
||||
result->SetLastAdded(kNoneAdded);
|
||||
result->set_code_cache(code_cache());
|
||||
result->set_is_shared(sharing == SHARED_NORMALIZED_MAP);
|
||||
|
||||
@ -4756,15 +4755,15 @@ MaybeObject* Map::CopyReplaceDescriptors(DescriptorArray* descriptors,
|
||||
MaybeObject* maybe_result = CopyDropDescriptors();
|
||||
if (!maybe_result->To(&result)) return maybe_result;
|
||||
|
||||
if (last_added == DescriptorArray::kNoneAdded) {
|
||||
if (last_added == kNoneAdded) {
|
||||
ASSERT(descriptors->IsEmpty());
|
||||
ASSERT(flag == OMIT_TRANSITION);
|
||||
return result;
|
||||
} else {
|
||||
ASSERT(descriptors->GetDetails(last_added).index() ==
|
||||
descriptors->number_of_descriptors());
|
||||
result->set_instance_descriptors(descriptors);
|
||||
result->SetLastAdded(last_added);
|
||||
}
|
||||
|
||||
descriptors->SetLastAdded(last_added);
|
||||
result->set_instance_descriptors(descriptors);
|
||||
|
||||
if (flag == INSERT_TRANSITION) {
|
||||
TransitionArray* transitions;
|
||||
MaybeObject* maybe_transitions = AddTransition(name, result);
|
||||
@ -4823,8 +4822,8 @@ MaybeObject* Map::CopyWithPreallocatedFieldDescriptors() {
|
||||
if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
|
||||
|
||||
int last_added = initial_descriptors->IsEmpty()
|
||||
? DescriptorArray::kNoneAdded
|
||||
: initial_descriptors->LastAdded();
|
||||
? kNoneAdded
|
||||
: initial_map->LastAdded();
|
||||
|
||||
return CopyReplaceDescriptors(descriptors, NULL, last_added, OMIT_TRANSITION);
|
||||
}
|
||||
@ -4836,9 +4835,7 @@ MaybeObject* Map::Copy(DescriptorArray::SharedMode shared_mode) {
|
||||
MaybeObject* maybe_descriptors = source_descriptors->Copy(shared_mode);
|
||||
if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors;
|
||||
|
||||
int last_added = source_descriptors->IsEmpty()
|
||||
? DescriptorArray::kNoneAdded
|
||||
: source_descriptors->LastAdded();
|
||||
int last_added = source_descriptors->IsEmpty() ? kNoneAdded : LastAdded();
|
||||
|
||||
return CopyReplaceDescriptors(descriptors, NULL, last_added, OMIT_TRANSITION);
|
||||
}
|
||||
@ -4944,8 +4941,7 @@ MaybeObject* Map::CopyReplaceDescriptor(Descriptor* descriptor,
|
||||
|
||||
SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates());
|
||||
|
||||
return CopyReplaceDescriptors(
|
||||
new_descriptors, key, descriptors->LastAdded(), flag);
|
||||
return CopyReplaceDescriptors(new_descriptors, key, LastAdded(), flag);
|
||||
}
|
||||
|
||||
|
||||
@ -5732,7 +5728,7 @@ MaybeObject* DescriptorArray::Allocate(int number_of_descriptors,
|
||||
if (!maybe_array->To(&result)) return maybe_array;
|
||||
}
|
||||
|
||||
result->set(kLastAddedIndex, Smi::FromInt(kNoneAdded));
|
||||
result->set(kEnumCacheIndex, Smi::FromInt(Map::kNoneAdded));
|
||||
result->set(kTransitionsIndex, Smi::FromInt(0));
|
||||
return result;
|
||||
}
|
||||
@ -5744,9 +5740,9 @@ void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
|
||||
ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
|
||||
ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
|
||||
if (HasEnumCache()) {
|
||||
FixedArray::cast(get(kLastAddedIndex))->
|
||||
FixedArray::cast(get(kEnumCacheIndex))->
|
||||
set(kEnumCacheBridgeCacheIndex, new_cache);
|
||||
FixedArray::cast(get(kLastAddedIndex))->
|
||||
FixedArray::cast(get(kEnumCacheIndex))->
|
||||
set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
|
||||
} else {
|
||||
if (IsEmpty()) return; // Do nothing for empty descriptor array.
|
||||
@ -5756,8 +5752,8 @@ void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
|
||||
set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
|
||||
NoWriteBarrierSet(FixedArray::cast(bridge_storage),
|
||||
kEnumCacheBridgeLastAdded,
|
||||
get(kLastAddedIndex));
|
||||
set(kLastAddedIndex, bridge_storage);
|
||||
get(kEnumCacheIndex));
|
||||
set(kEnumCacheIndex, bridge_storage);
|
||||
}
|
||||
}
|
||||
|
||||
@ -7226,8 +7222,10 @@ bool Map::EquivalentToForNormalization(Map* other,
|
||||
instance_type() == other->instance_type() &&
|
||||
bit_field() == other->bit_field() &&
|
||||
bit_field2() == other->bit_field2() &&
|
||||
(bit_field3() & ~(1<<Map::kIsShared)) ==
|
||||
(other->bit_field3() & ~(1<<Map::kIsShared));
|
||||
static_cast<uint32_t>(bit_field3()) ==
|
||||
LastAddedBits::update(
|
||||
IsShared::update(other->bit_field3(), true),
|
||||
kNoneAdded);
|
||||
}
|
||||
|
||||
|
||||
|
@ -2472,43 +2472,20 @@ class DescriptorArray: public FixedArray {
|
||||
inline int number_of_entries() { return number_of_descriptors(); }
|
||||
inline int NextEnumerationIndex() { return number_of_descriptors() + 1; }
|
||||
|
||||
int LastAdded() {
|
||||
ASSERT(!IsEmpty());
|
||||
Object* obj = get(kLastAddedIndex);
|
||||
if (obj->IsSmi()) {
|
||||
return Smi::cast(obj)->value();
|
||||
} else {
|
||||
Object* index = FixedArray::cast(obj)->get(kEnumCacheBridgeLastAdded);
|
||||
return Smi::cast(index)->value();
|
||||
}
|
||||
}
|
||||
|
||||
// Set index of the last added descriptor and flush any enum cache.
|
||||
void SetLastAdded(int index) {
|
||||
ASSERT(!IsEmpty() || index > 0);
|
||||
set(kLastAddedIndex, Smi::FromInt(index));
|
||||
}
|
||||
|
||||
int NumberOfSetDescriptors() {
|
||||
ASSERT(!IsEmpty());
|
||||
if (LastAdded() == kNoneAdded) return 0;
|
||||
return GetDetails(LastAdded()).index();
|
||||
}
|
||||
|
||||
bool HasEnumCache() {
|
||||
return !IsEmpty() && !get(kLastAddedIndex)->IsSmi();
|
||||
return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
|
||||
}
|
||||
|
||||
Object* GetEnumCache() {
|
||||
ASSERT(HasEnumCache());
|
||||
FixedArray* bridge = FixedArray::cast(get(kLastAddedIndex));
|
||||
FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
|
||||
return bridge->get(kEnumCacheBridgeCacheIndex);
|
||||
}
|
||||
|
||||
Object** GetEnumCacheSlot() {
|
||||
ASSERT(HasEnumCache());
|
||||
return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
|
||||
kLastAddedOffset);
|
||||
kEnumCacheOffset);
|
||||
}
|
||||
|
||||
Object** GetTransitionsSlot() {
|
||||
@ -2590,11 +2567,8 @@ class DescriptorArray: public FixedArray {
|
||||
// Constant for denoting key was not found.
|
||||
static const int kNotFound = -1;
|
||||
|
||||
// Constant for denoting that the LastAdded field was not yet set.
|
||||
static const int kNoneAdded = -1;
|
||||
|
||||
static const int kBackPointerStorageIndex = 0;
|
||||
static const int kLastAddedIndex = 1;
|
||||
static const int kEnumCacheIndex = 1;
|
||||
static const int kTransitionsIndex = 2;
|
||||
static const int kFirstIndex = 3;
|
||||
|
||||
@ -2606,9 +2580,9 @@ class DescriptorArray: public FixedArray {
|
||||
|
||||
// Layout description.
|
||||
static const int kBackPointerStorageOffset = FixedArray::kHeaderSize;
|
||||
static const int kLastAddedOffset = kBackPointerStorageOffset +
|
||||
static const int kEnumCacheOffset = kBackPointerStorageOffset +
|
||||
kPointerSize;
|
||||
static const int kTransitionsOffset = kLastAddedOffset + kPointerSize;
|
||||
static const int kTransitionsOffset = kEnumCacheOffset + kPointerSize;
|
||||
static const int kFirstOffset = kTransitionsOffset + kPointerSize;
|
||||
|
||||
// Layout description for the bridge array.
|
||||
@ -4674,6 +4648,10 @@ class Map: public HeapObject {
|
||||
inline int bit_field3();
|
||||
inline void set_bit_field3(int value);
|
||||
|
||||
class IsShared: public BitField<bool, 0, 1> {};
|
||||
class FunctionWithPrototype: public BitField<bool, 1, 1> {};
|
||||
class LastAddedBits: public BitField<int, 2, 11> {};
|
||||
|
||||
// Tells whether the object in the prototype property will be used
|
||||
// for instances created from this function. If the prototype
|
||||
// property is set to a value that is not a JSObject, the prototype
|
||||
@ -4898,6 +4876,20 @@ class Map: public HeapObject {
|
||||
String* name,
|
||||
LookupResult* result);
|
||||
|
||||
void SetLastAdded(int index) {
|
||||
set_bit_field3(LastAddedBits::update(bit_field3(), index));
|
||||
}
|
||||
|
||||
int LastAdded() {
|
||||
return LastAddedBits::decode(bit_field3());
|
||||
}
|
||||
|
||||
int NumberOfSetDescriptors() {
|
||||
ASSERT(!instance_descriptors()->IsEmpty());
|
||||
if (LastAdded() == kNoneAdded) return 0;
|
||||
return instance_descriptors()->GetDetails(LastAdded()).index();
|
||||
}
|
||||
|
||||
MUST_USE_RESULT MaybeObject* RawCopy(int instance_size);
|
||||
MUST_USE_RESULT MaybeObject* CopyWithPreallocatedFieldDescriptors();
|
||||
MUST_USE_RESULT MaybeObject* CopyDropDescriptors();
|
||||
@ -5033,6 +5025,9 @@ class Map: public HeapObject {
|
||||
|
||||
static const int kMaxPreAllocatedPropertyFields = 255;
|
||||
|
||||
// Constant for denoting that the LastAdded field was not yet set.
|
||||
static const int kNoneAdded = LastAddedBits::kMax;
|
||||
|
||||
// Layout description.
|
||||
static const int kInstanceSizesOffset = HeapObject::kHeaderSize;
|
||||
static const int kInstanceAttributesOffset = kInstanceSizesOffset + kIntSize;
|
||||
|
@ -161,8 +161,8 @@ void TransitionArray::SetTarget(int transition_number, Map* value) {
|
||||
PropertyDetails TransitionArray::GetTargetDetails(int transition_number) {
|
||||
Map* map = GetTarget(transition_number);
|
||||
DescriptorArray* descriptors = map->instance_descriptors();
|
||||
int descriptor = descriptors->LastAdded();
|
||||
ASSERT(descriptor != DescriptorArray::kNotFound);
|
||||
int descriptor = map->LastAdded();
|
||||
ASSERT(descriptor != Map::kNoneAdded);
|
||||
return descriptors->GetDetails(descriptor);
|
||||
}
|
||||
|
||||
|
@ -1111,7 +1111,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
|
||||
// We got a map in register rax. Get the enumeration cache from it.
|
||||
__ bind(&use_cache);
|
||||
__ LoadInstanceDescriptors(rax, rcx);
|
||||
__ movq(rcx, FieldOperand(rcx, DescriptorArray::kLastAddedOffset));
|
||||
__ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
|
||||
__ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
|
||||
|
||||
// Set up the four remaining stack slots.
|
||||
|
@ -5007,7 +5007,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
|
||||
Register result = ToRegister(instr->result());
|
||||
__ LoadInstanceDescriptors(map, result);
|
||||
__ movq(result,
|
||||
FieldOperand(result, DescriptorArray::kLastAddedOffset));
|
||||
FieldOperand(result, DescriptorArray::kEnumCacheOffset));
|
||||
__ movq(result,
|
||||
FieldOperand(result, FixedArray::SizeFor(instr->idx())));
|
||||
Condition cc = masm()->CheckSmi(result);
|
||||
|
@ -4473,7 +4473,7 @@ void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
|
||||
// Check that there is an enum cache in the non-empty instance
|
||||
// descriptors (rdx). This is the case if the next enumeration
|
||||
// index field does not contain a smi.
|
||||
movq(rdx, FieldOperand(rdx, DescriptorArray::kLastAddedOffset));
|
||||
movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheOffset));
|
||||
JumpIfSmi(rdx, call_runtime);
|
||||
|
||||
// For all objects but the receiver, check that the cache is empty.
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Copyright 2012 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -158,15 +158,14 @@ TEST(StressJS) {
|
||||
Handle<DescriptorArray> new_descriptors = FACTORY->NewDescriptorArray(1);
|
||||
|
||||
v8::internal::DescriptorArray::WhitenessWitness witness(*new_descriptors);
|
||||
map->set_instance_descriptors(*new_descriptors);
|
||||
|
||||
CallbacksDescriptor d(*name,
|
||||
*foreign,
|
||||
static_cast<PropertyAttributes>(0),
|
||||
v8::internal::PropertyDetails::kInitialIndex);
|
||||
new_descriptors->Set(0, &d, witness);
|
||||
new_descriptors->SetLastAdded(0);
|
||||
map->AppendDescriptor(&d, witness);
|
||||
|
||||
map->set_instance_descriptors(*new_descriptors);
|
||||
// Add the Foo constructor the global object.
|
||||
env->Global()->Set(v8::String::New("Foo"), v8::Utils::ToLocal(function));
|
||||
// Call the accessor through JavaScript.
|
||||
|
Loading…
Reference in New Issue
Block a user