Allocate room for expected number of properties based on the

constructor in the JSObject. This removes the need to allocate
a properties array if the object is never assigned any extra
properties.

Review URL: http://codereview.chromium.org/7341

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@501 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
iposva@chromium.org 2008-10-15 06:03:26 +00:00
parent 09abba56ff
commit d09fcf70b7
11 changed files with 199 additions and 71 deletions

View File

@ -1161,7 +1161,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
HandleScope inner;
Handle<String> key = Handle<String>(stream.GetKey());
int index = stream.GetFieldIndex();
Handle<Object> value = Handle<Object>(from->properties()->get(index));
Handle<Object> value = Handle<Object>(from->FastPropertyAt(index));
SetProperty(to, key, value, details.attributes());
break;
}

View File

@ -111,6 +111,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// edi: constructor
// eax: initial map
__ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
__ shl(edi, kPointerSizeLog2);
// Make sure that the maximum heap object size will never cause us
// problem here, because it is always greater than the maximum
// instance size that can be represented in a byte.
@ -163,8 +164,11 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// ebx: JSObject
// edi: start of next object
__ movzx_b(edx, FieldOperand(eax, Map::kUnusedPropertyFieldsOffset));
__ movzx_b(ecx, FieldOperand(eax, Map::kInObjectPropertiesOffset));
// Calculate unused properties past the end of the in-object properties.
__ sub(edx, Operand(ecx));
__ test(edx, Operand(edx));
// Done if no unused properties are to be allocated.
// Done if no extra properties are to be allocated.
__ j(zero, &allocated);
// Scale the number of elements by pointer size and add the header for

View File

@ -843,6 +843,7 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type,
reinterpret_cast<Map*>(result)->set_map(meta_map());
reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
reinterpret_cast<Map*>(result)->set_inobject_properties(0);
reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
return result;
}
@ -858,6 +859,7 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
map->set_prototype(null_value());
map->set_constructor(null_value());
map->set_instance_size(instance_size);
map->set_inobject_properties(0);
map->set_instance_descriptors(empty_descriptor_array());
map->set_code_cache(empty_fixed_array());
map->set_unused_property_fields(0);
@ -1661,8 +1663,11 @@ Object* Heap::AllocateArgumentsObject(Object* callee, int length) {
Object* Heap::AllocateInitialMap(JSFunction* fun) {
ASSERT(!fun->has_initial_map());
// First create a new map.
Object* map_obj = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
// First create a new map with the expected number of properties being
// allocated in-object.
int expected_nof_properties = fun->shared()->expected_nof_properties();
Object* map_obj = Heap::AllocateMap(JS_OBJECT_TYPE,
JSObject::kHeaderSize + expected_nof_properties * kPointerSize);
if (map_obj->IsFailure()) return map_obj;
// Fetch or allocate prototype.
@ -1674,7 +1679,8 @@ Object* Heap::AllocateInitialMap(JSFunction* fun) {
if (prototype->IsFailure()) return prototype;
}
Map* map = Map::cast(map_obj);
map->set_unused_property_fields(fun->shared()->expected_nof_properties());
map->set_inobject_properties(expected_nof_properties);
map->set_unused_property_fields(expected_nof_properties);
map->set_prototype(prototype);
return map;
}
@ -1702,7 +1708,8 @@ Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
// Allocate the backing storage for the properties.
Object* properties = AllocateFixedArray(map->unused_property_fields());
int prop_size = map->unused_property_fields() - map->inobject_properties();
Object* properties = AllocateFixedArray(prop_size);
if (properties->IsFailure()) return properties;
// Allocate the JSObject.
@ -1751,7 +1758,8 @@ Object* Heap::ReinitializeJSGlobalObject(JSFunction* constructor,
ASSERT(map->instance_size() == object->map()->instance_size());
// Allocate the backing storage for the properties.
Object* properties = AllocateFixedArray(map->unused_property_fields());
int prop_size = map->unused_property_fields() - map->inobject_properties();
Object* properties = AllocateFixedArray(prop_size);
if (properties->IsFailure()) return properties;
// Reset the map for the object.

View File

@ -263,7 +263,7 @@ void JSObject::PrintProperties() {
r.GetKey()->StringPrint();
PrintF(": ");
if (r.type() == FIELD) {
properties()->get(r.GetFieldIndex())->ShortPrint();
FastPropertyAt(r.GetFieldIndex())->ShortPrint();
PrintF(" (field at offset %d)\n", r.GetFieldIndex());
} else if (r.type() == CONSTANT_FUNCTION) {
r.GetConstantFunction()->ShortPrint();
@ -313,7 +313,8 @@ void JSObject::JSObjectVerify() {
VerifyHeapPointer(elements());
if (HasFastProperties()) {
CHECK(map()->unused_property_fields() ==
(properties()->length() - map()->NextFreePropertyIndex()));
(map()->inobject_properties() + properties()->length() -
map()->NextFreePropertyIndex()));
}
}

View File

@ -898,24 +898,64 @@ int JSObject::GetHeaderSize() {
int JSObject::GetInternalFieldCount() {
ASSERT(1 << kPointerSizeLog2 == kPointerSize);
return (Size() - GetHeaderSize()) >> kPointerSizeLog2;
// Make sure to adjust for the number of in-object properties. These
// properties do contribute to the size, but are not internal fields.
return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
map()->inobject_properties();
}
Object* JSObject::GetInternalField(int index) {
ASSERT(index < GetInternalFieldCount() && index >= 0);
// Internal objects do follow immediately after the header, whereas in-object
// properties are at the end of the object. Therefore there is no need
// to adjust the index here.
return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
}
void JSObject::SetInternalField(int index, Object* value) {
ASSERT(index < GetInternalFieldCount() && index >= 0);
// Internal objects do follow immediately after the header, whereas in-object
// properties are at the end of the object. Therefore there is no need
// to adjust the index here.
int offset = GetHeaderSize() + (kPointerSize * index);
WRITE_FIELD(this, offset, value);
WRITE_BARRIER(this, offset);
}
// Access fast-case object properties at index. The use of these routines
// is needed to correctly distinguish between properties stored in-object and
// properties stored in the properties array.
inline Object* JSObject::FastPropertyAt(int index) {
// Adjust for the number of properties stored in the object.
index -= map()->inobject_properties();
if (index < 0) {
int offset = map()->instance_size() + (index * kPointerSize);
return READ_FIELD(this, offset);
} else {
ASSERT(index < properties()->length());
return properties()->get(index);
}
}
inline Object* JSObject::FastPropertyAtPut(int index, Object* value) {
// Adjust for the number of properties stored in the object.
index -= map()->inobject_properties();
if (index < 0) {
int offset = map()->instance_size() + (index * kPointerSize);
WRITE_FIELD(this, offset, value);
WRITE_BARRIER(this, offset);
} else {
ASSERT(index < properties()->length());
properties()->set(index, value);
}
return value;
}
void JSObject::InitializeBody(int object_size) {
for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
WRITE_FIELD(this, offset, Heap::undefined_value());
@ -1529,7 +1569,12 @@ Address ByteArray::GetDataStartAddress() {
int Map::instance_size() {
return READ_BYTE_FIELD(this, kInstanceSizeOffset);
return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
}
int Map::inobject_properties() {
return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
}
@ -1546,11 +1591,19 @@ int HeapObject::SizeFromMap(Map* map) {
void Map::set_instance_size(int value) {
ASSERT((value & ~(kPointerSize - 1)) == value);
value >>= kPointerSizeLog2;
ASSERT(0 <= value && value < 256);
WRITE_BYTE_FIELD(this, kInstanceSizeOffset, static_cast<byte>(value));
}
void Map::set_inobject_properties(int value) {
ASSERT(0 <= value && value < 256);
WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
}
InstanceType Map::instance_type() {
return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
}

View File

@ -366,7 +366,7 @@ Object* Object::GetProperty(Object* receiver,
ASSERT(!value->IsTheHole() || result->IsReadOnly());
return value->IsTheHole() ? Heap::undefined_value() : value;
case FIELD:
value = holder->properties()->get(result->GetFieldIndex());
value = holder->FastPropertyAt(result->GetFieldIndex());
ASSERT(!value->IsTheHole() || result->IsReadOnly());
return value->IsTheHole() ? Heap::undefined_value() : value;
case CONSTANT_FUNCTION:
@ -936,20 +936,16 @@ Object* JSObject::AddFastPropertyUsingMap(Map* new_map,
String* name,
Object* value) {
int index = new_map->PropertyIndexFor(name);
if (map()->unused_property_fields() > 0) {
ASSERT(index < properties()->length());
properties()->set(index, value);
} else {
if (map()->unused_property_fields() == 0) {
ASSERT(map()->unused_property_fields() == 0);
int new_unused = new_map->unused_property_fields();
Object* values =
properties()->CopySize(properties()->length() + new_unused + 1);
if (values->IsFailure()) return values;
FixedArray::cast(values)->set(index, value);
set_properties(FixedArray::cast(values));
}
set_map(new_map);
return value;
return FastPropertyAtPut(index, value);
}
@ -980,7 +976,8 @@ Object* JSObject::AddFastProperty(String* name,
!old_descriptors->Contains(name) &&
(Top::context()->global_context()->object_function()->map() != map());
ASSERT(index < properties()->length() ||
ASSERT(index < map()->inobject_properties() ||
(index - map()->inobject_properties()) < properties()->length() ||
map()->unused_property_fields() == 0);
// Allocate a new map for the object.
Object* r = map()->Copy();
@ -994,7 +991,6 @@ Object* JSObject::AddFastProperty(String* name,
old_descriptors = DescriptorArray::cast(r);
}
if (map()->unused_property_fields() == 0) {
if (properties()->length() > kMaxFastProperties) {
Object* obj = NormalizeProperties();
@ -1015,9 +1011,7 @@ Object* JSObject::AddFastProperty(String* name,
map()->set_instance_descriptors(old_descriptors);
new_map->set_instance_descriptors(DescriptorArray::cast(new_descriptors));
set_map(new_map);
properties()->set(index, value);
return value;
return FastPropertyAtPut(index, value);
}
@ -1211,8 +1205,7 @@ Object* JSObject::ConvertDescriptorToField(String* name,
if (new_properties) {
set_properties(FixedArray::cast(new_properties));
}
properties()->set(index, new_value);
return new_value;
return FastPropertyAtPut(index, new_value);
}
@ -1377,7 +1370,7 @@ void JSObject::LocalLookupRealNamedProperty(String* name,
// Disallow caching for uninitialized constants. These can only
// occur as fields.
if (result->IsReadOnly() && result->type() == FIELD &&
properties()->get(result->GetFieldIndex())->IsTheHole()) {
FastPropertyAt(result->GetFieldIndex())->IsTheHole()) {
result->DisallowCaching();
}
return;
@ -1514,8 +1507,7 @@ Object* JSObject::SetProperty(LookupResult* result,
property_dictionary()->ValueAtPut(result->GetDictionaryEntry(), value);
return value;
case FIELD:
properties()->set(result->GetFieldIndex(), value);
return value;
return FastPropertyAtPut(result->GetFieldIndex(), value);
case MAP_TRANSITION:
if (attributes == result->GetAttributes()) {
// Only use map transition if the attributes match.
@ -1585,8 +1577,7 @@ Object* JSObject::IgnoreAttributesAndSetLocalProperty(
property_dictionary()->ValueAtPut(result->GetDictionaryEntry(), value);
return value;
case FIELD:
properties()->set(result->GetFieldIndex(), value);
return value;
return FastPropertyAtPut(result->GetFieldIndex(), value);
case MAP_TRANSITION:
if (attributes == result->GetAttributes()) {
// Only use map transition if the attributes match.
@ -1780,7 +1771,7 @@ Object* JSObject::NormalizeProperties() {
case FIELD: {
PropertyDetails d =
PropertyDetails(details.attributes(), NORMAL, details.index());
Object* value = properties()->get(r.GetFieldIndex());
Object* value = FastPropertyAt(r.GetFieldIndex());
Object* result = dictionary->AddStringEntry(r.GetKey(), value, d);
if (result->IsFailure()) return result;
dictionary = Dictionary::cast(result);
@ -2335,7 +2326,7 @@ Object* JSObject::SlowReverseLookup(Object* value) {
!r.eos();
r.advance()) {
if (r.type() == FIELD) {
if (properties()->get(r.GetFieldIndex()) == value) {
if (FastPropertyAt(r.GetFieldIndex()) == value) {
return r.GetKey();
}
} else if (r.type() == CONSTANT_FUNCTION) {
@ -2359,6 +2350,7 @@ Object* Map::Copy() {
// Don't copy descriptors, so map transitions always remain a forest.
Map::cast(result)->set_instance_descriptors(Heap::empty_descriptor_array());
// Please note instance_type and instance_size are set when allocated.
Map::cast(result)->set_inobject_properties(inobject_properties());
Map::cast(result)->set_unused_property_fields(unused_property_fields());
Map::cast(result)->set_bit_field(bit_field());
Map::cast(result)->ClearCodeCache();

View File

@ -1349,6 +1349,11 @@ class JSObject: public HeapObject {
// Returns failure if allocation failed.
Object* TransformToFastProperties(int unused_property_fields);
// Access fast-case object properties at index.
inline Object* FastPropertyAt(int index);
inline Object* FastPropertyAtPut(int index, Object* value);
// initializes the body after properties slot, properties slot is
// initialized by set_properties
// Note: this call does not update write barrier, it is caller's
@ -2254,6 +2259,10 @@ class Map: public HeapObject {
inline int instance_size();
inline void set_instance_size(int value);
// Count of properties allocated in the object.
inline int inobject_properties();
inline void set_inobject_properties(int value);
// instance type.
inline InstanceType instance_type();
inline void set_instance_type(InstanceType value);
@ -2396,7 +2405,8 @@ class Map: public HeapObject {
#endif
// Layout description.
static const int kInstanceAttributesOffset = HeapObject::kHeaderSize;
static const int kInstanceSizesOffset = HeapObject::kHeaderSize;
static const int kInstanceAttributesOffset = kInstanceSizesOffset + kIntSize;
static const int kPrototypeOffset = kInstanceAttributesOffset + kIntSize;
static const int kConstructorOffset = kPrototypeOffset + kPointerSize;
static const int kInstanceDescriptorsOffset =
@ -2404,11 +2414,17 @@ class Map: public HeapObject {
static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize;
static const int kSize = kCodeCacheOffset + kIntSize;
// Byte offsets within kInstanceSizesOffset.
static const int kInstanceSizeOffset = kInstanceSizesOffset + 0;
static const int kInObjectPropertiesOffset = kInstanceSizesOffset + 1;
// The bytes at positions 2 and 3 are not in use at the moment.
// Byte offsets within kInstanceAttributesOffset attributes.
static const int kInstanceSizeOffset = kInstanceAttributesOffset + 0;
static const int kInstanceTypeOffset = kInstanceAttributesOffset + 1;
static const int kUnusedPropertyFieldsOffset = kInstanceAttributesOffset + 2;
static const int kBitFieldOffset = kInstanceAttributesOffset + 3;
static const int kInstanceTypeOffset = kInstanceAttributesOffset + 0;
static const int kUnusedPropertyFieldsOffset = kInstanceAttributesOffset + 1;
static const int kBitFieldOffset = kInstanceAttributesOffset + 2;
// The byte at position 3 is not in use at the moment.
// Bit positions for bit field.
static const int kUnused = 0; // To be used for marking recently used maps.

View File

@ -3978,7 +3978,7 @@ static Object* DebugLookupResultValue(LookupResult* result) {
case FIELD:
value =
JSObject::cast(
result->holder())->properties()->get(result->GetFieldIndex());
result->holder())->FastPropertyAt(result->GetFieldIndex());
if (value->IsTheHole()) {
return Heap::undefined_value();
}

View File

@ -317,7 +317,7 @@ void StringStream::PrintUsingMap(JSObject* js_object) {
key->ShortPrint();
}
Add(": ");
Object* value = js_object->properties()->get(r.GetFieldIndex());
Object* value = js_object->FastPropertyAt(r.GetFieldIndex());
Add("%o\n", value);
}
}

View File

@ -421,8 +421,15 @@ Object* StoreStubCompiler::CompileStoreField(JSObject* object,
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_ExtendStorage));
__ Jump(ic, RelocInfo::CODE_TARGET);
} else {
// Get the properties array
__ ldr(r1, FieldMemOperand(r3, JSObject::kPropertiesOffset));
// Adjust for the number of properties stored in the object. Even in the
// face of a transition we can use the old map here because the size of the
// object and the number of in-object properties is not going to change.
index -= object->map()->inobject_properties();
if (index >= 0) {
// Get the properties array
__ ldr(r1, FieldMemOperand(r3, JSObject::kPropertiesOffset));
}
if (transition != NULL) {
// Update the map of the object; no write barrier updating is
@ -431,17 +438,31 @@ Object* StoreStubCompiler::CompileStoreField(JSObject* object,
__ str(ip, FieldMemOperand(r3, HeapObject::kMapOffset));
}
// Write to the properties array.
int offset = index * kPointerSize + Array::kHeaderSize;
__ str(r0, FieldMemOperand(r1, offset));
if (index < 0) {
// Set the property straight into the object.
int offset = object->map()->instance_size() + (index * kPointerSize);
__ str(r0, FieldMemOperand(r3, offset));
// Skip updating write barrier if storing a smi.
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &exit);
// Update the write barrier for the array address.
__ mov(r1, Operand(offset));
__ RecordWrite(r3, r1, r2);
} else {
// Write to the properties array.
int offset = index * kPointerSize + Array::kHeaderSize;
__ str(r0, FieldMemOperand(r1, offset));
// Skip updating write barrier if storing a smi.
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &exit);
// Skip updating write barrier if storing a smi.
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &exit);
// Update the write barrier for the array address.
__ mov(r3, Operand(offset));
__ RecordWrite(r1, r3, r2); // OK to clobber r2, since we return
// Update the write barrier for the array address.
__ mov(r3, Operand(offset));
__ RecordWrite(r1, r3, r2); // OK to clobber r2, since we return
}
// Return the value (register r0).
__ bind(&exit);
@ -590,12 +611,19 @@ Object* LoadStubCompiler::CompileLoadField(JSObject* object,
// Check that the maps haven't changed.
Register reg = __ CheckMaps(object, r0, holder, r3, r1, &miss);
// Get the properties array of the holder.
__ ldr(r3, FieldMemOperand(reg, JSObject::kPropertiesOffset));
// Return the value from the properties array.
int offset = index * kPointerSize + Array::kHeaderSize;
__ ldr(r0, FieldMemOperand(r3, offset));
// Adjust for the number of properties stored in the holder.
index -= holder->map()->inobject_properties();
if (index < 0) {
// Get the property straight out of the holder.
int offset = holder->map()->instance_size() + (index * kPointerSize);
__ ldr(r0, FieldMemOperand(reg, offset));
} else {
// Get the properties array of the holder.
__ ldr(r3, FieldMemOperand(reg, JSObject::kPropertiesOffset));
// Return the value from the properties array.
int offset = index * kPointerSize + Array::kHeaderSize;
__ ldr(r0, FieldMemOperand(r3, offset));
}
__ Ret();
// Handle load cache miss.

View File

@ -254,12 +254,19 @@ void StubCompiler::GenerateLoadField(MacroAssembler* masm,
Register reg =
__ CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
// Get the properties array of the holder.
__ mov(scratch1, FieldOperand(reg, JSObject::kPropertiesOffset));
// Return the value from the properties array.
int offset = index * kPointerSize + Array::kHeaderSize;
__ mov(eax, FieldOperand(scratch1, offset));
// Adjust for the number of properties stored in the holder.
index -= holder->map()->inobject_properties();
if (index < 0) {
// Get the property straight out of the holder.
int offset = holder->map()->instance_size() + (index * kPointerSize);
__ mov(eax, FieldOperand(reg, offset));
} else {
// Get the properties array of the holder.
__ mov(scratch1, FieldOperand(reg, JSObject::kPropertiesOffset));
// Return the value from the properties array.
int offset = index * kPointerSize + Array::kHeaderSize;
__ mov(eax, FieldOperand(scratch1, offset));
}
__ ret(0);
}
@ -399,8 +406,16 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
return;
}
// Get the properties array (optimistically).
__ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
// Adjust for the number of properties stored in the object. Even in the
// face of a transition we can use the old map here because the size of the
// object and the number of in-object properties is not going to change.
index -= object->map()->inobject_properties();
if (index >= 0) {
// Get the properties array (optimistically).
__ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
}
if (transition != NULL) {
// Update the map of the object; no write barrier updating is
// needed because the map is never in new space.
@ -408,14 +423,25 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Immediate(Handle<Map>(transition)));
}
// Write to the properties array.
int offset = index * kPointerSize + Array::kHeaderSize;
__ mov(FieldOperand(scratch, offset), eax);
if (index < 0) {
// Set the property straight into the object.
int offset = object->map()->instance_size() + (index * kPointerSize);
__ mov(FieldOperand(receiver_reg, offset), eax);
// Update the write barrier for the array address.
// Pass the value being stored in the now unused name_reg.
__ mov(name_reg, Operand(eax));
__ RecordWrite(scratch, offset, name_reg, receiver_reg);
// Update the write barrier for the array address.
// Pass the value being stored in the now unused name_reg.
__ mov(name_reg, Operand(eax));
__ RecordWrite(receiver_reg, offset, name_reg, scratch);
} else {
// Write to the properties array.
int offset = index * kPointerSize + Array::kHeaderSize;
__ mov(FieldOperand(scratch, offset), eax);
// Update the write barrier for the array address.
// Pass the value being stored in the now unused name_reg.
__ mov(name_reg, Operand(eax));
__ RecordWrite(scratch, offset, name_reg, receiver_reg);
}
// Return the value (register eax).
__ ret(0);