Add "has fast elements" bit to maps and use it in inlined keyed loads.

A potential issue with this change is creating lots of maps when
objects flip between fast/slow elements modes.  We could add special
transitions to avoid this.  Yet testing this on our benchmarks, gmail,
and wave seems to indicate that this is not a real problem.

Review URL: http://codereview.chromium.org/2870018

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4941 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
vitalyr@chromium.org 2010-06-24 13:56:35 +00:00
parent 1e3743a4dd
commit 8ab6832203
18 changed files with 187 additions and 52 deletions

View File

@ -2606,6 +2606,8 @@ void v8::Object::SetIndexedPropertiesToPixelData(uint8_t* data, int length) {
return;
}
i::Handle<i::PixelArray> pixels = i::Factory::NewPixelArray(length, data);
self->set_map(
*i::Factory::GetSlowElementsMap(i::Handle<i::Map>(self->map())));
self->set_elements(*pixels);
}
@ -2659,6 +2661,8 @@ void v8::Object::SetIndexedPropertiesToExternalArrayData(
}
i::Handle<i::ExternalArray> array =
i::Factory::NewExternalArray(length, array_type, data);
self->set_map(
*i::Factory::GetSlowElementsMap(i::Handle<i::Map>(self->map())));
self->set_elements(*array);
}

View File

@ -1110,6 +1110,7 @@ class Assembler : public Malloced {
void EndBlockConstPool() {
const_pool_blocked_nesting_--;
}
bool is_const_pool_blocked() const { return const_pool_blocked_nesting_ > 0; }
private:
// Code buffer:

View File

@ -6123,10 +6123,12 @@ void CodeGenerator::EmitKeyedLoad() {
// Get the elements array from the receiver and check that it
// is not a dictionary.
__ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
if (FLAG_debug_code) {
__ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(scratch2, ip);
deferred->Branch(ne);
__ Assert(eq, "JSObject with fast elements map has slow elements");
}
// Check that key is within bounds. Use unsigned comparison to handle
// negative keys.
@ -6147,7 +6149,7 @@ void CodeGenerator::EmitKeyedLoad() {
__ mov(r0, scratch1);
// Make sure that the expected number of instructions are generated.
ASSERT_EQ(kInlinedKeyedLoadInstructionsAfterPatch,
ASSERT_EQ(GetInlinedKeyedLoadInstructionsAfterPatch(),
masm_->InstructionsGeneratedSince(&check_inlined_codesize));
}

View File

@ -276,7 +276,9 @@ class CodeGenerator: public AstVisitor {
static int InlineRuntimeCallArgumentsCount(Handle<String> name);
// Constants related to patching of inlined load/store.
static const int kInlinedKeyedLoadInstructionsAfterPatch = 17;
static int GetInlinedKeyedLoadInstructionsAfterPatch() {
return FLAG_debug_code ? 27 : 13;
}
static const int kInlinedKeyedStoreInstructionsAfterPatch = 5;
private:

View File

@ -930,7 +930,7 @@ bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
// Patch the map check.
Address ldr_map_instr_address =
inline_end_address -
(CodeGenerator::kInlinedKeyedLoadInstructionsAfterPatch *
(CodeGenerator::GetInlinedKeyedLoadInstructionsAfterPatch() *
Assembler::kInstrSize);
Assembler::set_target_address_at(ldr_map_instr_address,
reinterpret_cast<Address>(map));

View File

@ -1548,6 +1548,8 @@ void MacroAssembler::Check(Condition cc, const char* msg) {
void MacroAssembler::Abort(const char* msg) {
Label abort_start;
bind(&abort_start);
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
// properly. Instead, we pass an aligned pointer that is
@ -1571,6 +1573,17 @@ void MacroAssembler::Abort(const char* msg) {
push(r0);
CallRuntime(Runtime::kAbort, 2);
// will not return here
if (is_const_pool_blocked()) {
// If the calling code cares about the exact number of
// instructions generated, we insert padding here to keep the size
// of the Abort macro constant.
static const int kExpectedAbortInstructions = 10;
int abort_instructions = InstructionsGeneratedSince(&abort_start);
ASSERT(abort_instructions <= kExpectedAbortInstructions);
while (abort_instructions++ < kExpectedAbortInstructions) {
nop();
}
}
}

View File

@ -195,6 +195,7 @@ BUILTIN(ArrayCodeGeneric) {
}
// 'array' now contains the JSArray we should initialize.
ASSERT(array->HasFastElements());
// Optimize the case where there is one argument and the argument is a
// small smi.

View File

@ -274,11 +274,22 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
return copy;
}
Handle<Map> Factory::CopyMapDropTransitions(Handle<Map> src) {
CALL_HEAP_FUNCTION(src->CopyDropTransitions(), Map);
}
Handle<Map> Factory::GetFastElementsMap(Handle<Map> src) {
CALL_HEAP_FUNCTION(src->GetFastElementsMap(), Map);
}
Handle<Map> Factory::GetSlowElementsMap(Handle<Map> src) {
CALL_HEAP_FUNCTION(src->GetSlowElementsMap(), Map);
}
Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
CALL_HEAP_FUNCTION(array->Copy(), FixedArray);
}

View File

@ -180,6 +180,10 @@ class Factory : public AllStatic {
static Handle<Map> CopyMapDropTransitions(Handle<Map> map);
static Handle<Map> GetFastElementsMap(Handle<Map> map);
static Handle<Map> GetSlowElementsMap(Handle<Map> map);
static Handle<FixedArray> CopyFixedArray(Handle<FixedArray> array);
// Numbers (eg, literals) are pretenured by the parser.

View File

@ -1283,7 +1283,7 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
map->set_code_cache(empty_fixed_array());
map->set_unused_property_fields(0);
map->set_bit_field(0);
map->set_bit_field2(1 << Map::kIsExtensible);
map->set_bit_field2((1 << Map::kIsExtensible) | (1 << Map::kHasFastElements));
// If the map object is aligned fill the padding area with Smi 0 objects.
if (Map::kPadStart < Map::kSize) {
@ -2611,6 +2611,7 @@ Object* Heap::AllocateInitialMap(JSFunction* fun) {
map->set_inobject_properties(in_object_properties);
map->set_unused_property_fields(in_object_properties);
map->set_prototype(prototype);
ASSERT(map->has_fast_elements());
// If the function has only simple this property assignments add
// field descriptors for these to the initial map as the object
@ -2664,8 +2665,8 @@ Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
// properly initialized.
ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
// Both types of globla objects should be allocated using
// AllocateGloblaObject to be properly initialized.
// Both types of global objects should be allocated using
// AllocateGlobalObject to be properly initialized.
ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
@ -2689,6 +2690,7 @@ Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
InitializeJSObjectFromMap(JSObject::cast(obj),
FixedArray::cast(properties),
map);
ASSERT(JSObject::cast(obj)->HasFastElements());
return obj;
}

View File

@ -8868,9 +8868,11 @@ Result CodeGenerator::EmitKeyedLoad() {
// is not a dictionary.
__ mov(elements.reg(),
FieldOperand(receiver.reg(), JSObject::kElementsOffset));
if (FLAG_debug_code) {
__ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
Immediate(Factory::fixed_array_map()));
deferred->Branch(not_equal);
__ Assert(equal, "JSObject with fast elements map has slow elements");
}
// Check that the key is within bounds.
__ cmp(key.reg(),

View File

@ -992,12 +992,14 @@ Object* KeyedLoadIC::Load(State state,
}
}
set_target(stub);
// For JSObjects that are not value wrappers and that do not have
// indexed interceptors, we initialize the inlined fast case (if
// present) by patching the inlined map check.
// For JSObjects with fast elements that are not value wrappers
// and that do not have indexed interceptors, we initialize the
// inlined fast case (if present) by patching the inlined map
// check.
if (object->IsJSObject() &&
!object->IsJSValue() &&
!JSObject::cast(*object)->HasIndexedInterceptor()) {
!JSObject::cast(*object)->HasIndexedInterceptor() &&
JSObject::cast(*object)->HasFastElements()) {
Map* map = JSObject::cast(*object)->map();
PatchInlinedLoad(address(), map);
}

View File

@ -539,6 +539,9 @@ void JSObject::JSObjectVerify() {
(map()->inobject_properties() + properties()->length() -
map()->NextFreePropertyIndex()));
}
ASSERT(map()->has_fast_elements() ==
(elements()->map() == Heap::fixed_array_map()));
ASSERT(map()->has_fast_elements() == HasFastElements());
}

View File

@ -1166,6 +1166,8 @@ HeapObject* JSObject::elements() {
void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
ASSERT(map()->has_fast_elements() ==
(value->map() == Heap::fixed_array_map()));
// In the assert below Dictionary is covered under FixedArray.
ASSERT(value->IsFixedArray() || value->IsPixelArray() ||
value->IsExternalArray());
@ -1181,11 +1183,21 @@ void JSObject::initialize_properties() {
void JSObject::initialize_elements() {
ASSERT(map()->has_fast_elements());
ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
WRITE_FIELD(this, kElementsOffset, Heap::empty_fixed_array());
}
Object* JSObject::ResetElements() {
Object* obj = map()->GetFastElementsMap();
if (obj->IsFailure()) return obj;
set_map(Map::cast(obj));
initialize_elements();
return this;
}
ACCESSORS(Oddball, to_string, String, kToStringOffset)
ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
@ -2335,6 +2347,26 @@ void Map::set_prototype(Object* value, WriteBarrierMode mode) {
}
Object* Map::GetFastElementsMap() {
if (has_fast_elements()) return this;
Object* obj = CopyDropTransitions();
if (obj->IsFailure()) return obj;
Map* new_map = Map::cast(obj);
new_map->set_has_fast_elements(true);
return new_map;
}
Object* Map::GetSlowElementsMap() {
if (!has_fast_elements()) return this;
Object* obj = CopyDropTransitions();
if (obj->IsFailure()) return obj;
Map* new_map = Map::cast(obj);
new_map->set_has_fast_elements(false);
return new_map;
}
ACCESSORS(Map, instance_descriptors, DescriptorArray,
kInstanceDescriptorsOffset)
ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
@ -2838,11 +2870,14 @@ JSObject::ElementsKind JSObject::GetElementsKind() {
if (array->IsFixedArray()) {
// FAST_ELEMENTS or DICTIONARY_ELEMENTS are both stored in a FixedArray.
if (array->map() == Heap::fixed_array_map()) {
ASSERT(map()->has_fast_elements());
return FAST_ELEMENTS;
}
ASSERT(array->IsDictionary());
ASSERT(!map()->has_fast_elements());
return DICTIONARY_ELEMENTS;
}
ASSERT(!map()->has_fast_elements());
if (array->IsExternalArray()) {
switch (array->map()->instance_type()) {
case EXTERNAL_BYTE_ARRAY_TYPE:

View File

@ -2222,6 +2222,11 @@ Object* JSObject::TransformToFastProperties(int unused_property_fields) {
Object* JSObject::NormalizeElements() {
ASSERT(!HasPixelElements() && !HasExternalArrayElements());
if (HasDictionaryElements()) return this;
ASSERT(map()->has_fast_elements());
Object* obj = map()->GetSlowElementsMap();
if (obj->IsFailure()) return obj;
Map* new_map = Map::cast(obj);
// Get number of entries.
FixedArray* array = FixedArray::cast(elements());
@ -2230,7 +2235,7 @@ Object* JSObject::NormalizeElements() {
int length = IsJSArray() ?
Smi::cast(JSArray::cast(this)->length())->value() :
array->length();
Object* obj = NumberDictionary::Allocate(length);
obj = NumberDictionary::Allocate(length);
if (obj->IsFailure()) return obj;
NumberDictionary* dictionary = NumberDictionary::cast(obj);
// Copy entries.
@ -2243,7 +2248,10 @@ Object* JSObject::NormalizeElements() {
dictionary = NumberDictionary::cast(result);
}
}
// Switch to using the dictionary as the backing storage for elements.
// Switch to using the dictionary as the backing storage for
// elements. Set the new map first to satify the elements type
// assert in set_elements().
set_map(new_map);
set_elements(dictionary);
Counters::elements_to_dictionary.Increment();
@ -5473,14 +5481,18 @@ void Code::Disassemble(const char* name) {
#endif // ENABLE_DISASSEMBLER
void JSObject::SetFastElements(FixedArray* elems) {
Object* JSObject::SetFastElementsCapacityAndLength(int capacity, int length) {
// We should never end in here with a pixel or external array.
ASSERT(!HasPixelElements() && !HasExternalArrayElements());
#ifdef DEBUG
// Check the provided array is filled with the_hole.
uint32_t len = static_cast<uint32_t>(elems->length());
for (uint32_t i = 0; i < len; i++) ASSERT(elems->get(i)->IsTheHole());
#endif
Object* obj = Heap::AllocateFixedArrayWithHoles(capacity);
if (obj->IsFailure()) return obj;
FixedArray* elems = FixedArray::cast(obj);
obj = map()->GetFastElementsMap();
if (obj->IsFailure()) return obj;
Map* new_map = Map::cast(obj);
AssertNoAllocation no_gc;
WriteBarrierMode mode = elems->GetWriteBarrierMode(no_gc);
switch (GetElementsKind()) {
@ -5508,7 +5520,15 @@ void JSObject::SetFastElements(FixedArray* elems) {
UNREACHABLE();
break;
}
set_map(new_map);
set_elements(elems);
if (IsJSArray()) {
JSArray::cast(this)->set_length(Smi::FromInt(length));
}
return this;
}
@ -5595,7 +5615,7 @@ Object* JSObject::SetElementsLength(Object* len) {
Object* smi_length = len->ToSmi();
if (smi_length->IsSmi()) {
int value = Smi::cast(smi_length)->value();
const int value = Smi::cast(smi_length)->value();
if (value < 0) return ArrayLengthRangeError();
switch (GetElementsKind()) {
case FAST_ELEMENTS: {
@ -5617,12 +5637,8 @@ Object* JSObject::SetElementsLength(Object* len) {
int new_capacity = value > min ? value : min;
if (new_capacity <= kMaxFastElementsLength ||
!ShouldConvertToSlowElements(new_capacity)) {
Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
Object* obj = SetFastElementsCapacityAndLength(new_capacity, value);
if (obj->IsFailure()) return obj;
if (IsJSArray()) {
JSArray::cast(this)->set_length(Smi::cast(smi_length));
}
SetFastElements(FixedArray::cast(obj));
return this;
}
break;
@ -5633,7 +5649,8 @@ Object* JSObject::SetElementsLength(Object* len) {
// If the length of a slow array is reset to zero, we clear
// the array and flush backing storage. This has the added
// benefit that the array returns to fast mode.
initialize_elements();
Object* obj = ResetElements();
if (obj->IsFailure()) return obj;
} else {
// Remove deleted elements.
uint32_t old_length =
@ -6092,12 +6109,8 @@ Object* JSObject::SetFastElement(uint32_t index, Object* value) {
if (new_capacity <= kMaxFastElementsLength ||
!ShouldConvertToSlowElements(new_capacity)) {
ASSERT(static_cast<uint32_t>(new_capacity) > index);
Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
Object* obj = SetFastElementsCapacityAndLength(new_capacity, index + 1);
if (obj->IsFailure()) return obj;
SetFastElements(FixedArray::cast(obj));
if (IsJSArray()) {
JSArray::cast(this)->set_length(Smi::FromInt(index + 1));
}
FixedArray::cast(elements())->set(index, value);
return value;
}
@ -6216,13 +6229,11 @@ Object* JSObject::SetElementWithoutInterceptor(uint32_t index, Object* value) {
uint32_t new_length = 0;
if (IsJSArray()) {
CHECK(JSArray::cast(this)->length()->ToArrayIndex(&new_length));
JSArray::cast(this)->set_length(Smi::FromInt(new_length));
} else {
new_length = NumberDictionary::cast(elements())->max_number_key() + 1;
}
Object* obj = Heap::AllocateFixedArrayWithHoles(new_length);
Object* obj = SetFastElementsCapacityAndLength(new_length, new_length);
if (obj->IsFailure()) return obj;
SetFastElements(FixedArray::cast(obj));
#ifdef DEBUG
if (FLAG_trace_normalization) {
PrintF("Object elements are fast case again:\n");
@ -7526,14 +7537,18 @@ Object* JSObject::PrepareElementsForSort(uint32_t limit) {
}
// Convert to fast elements.
Object* obj = map()->GetFastElementsMap();
if (obj->IsFailure()) return obj;
Map* new_map = Map::cast(obj);
PretenureFlag tenure = Heap::InNewSpace(this) ? NOT_TENURED: TENURED;
Object* new_array =
Heap::AllocateFixedArray(dict->NumberOfElements(), tenure);
if (new_array->IsFailure()) {
return new_array;
}
if (new_array->IsFailure()) return new_array;
FixedArray* fast_elements = FixedArray::cast(new_array);
dict->CopyValuesTo(fast_elements);
set_map(new_map);
set_elements(fast_elements);
}
ASSERT(HasFastElements());

View File

@ -1191,6 +1191,7 @@ class JSObject: public HeapObject {
// case, and a PixelArray or ExternalArray in special cases.
DECL_ACCESSORS(elements, HeapObject)
inline void initialize_elements();
inline Object* ResetElements();
inline ElementsKind GetElementsKind();
inline bool HasFastElements();
inline bool HasDictionaryElements();
@ -1367,7 +1368,7 @@ class JSObject: public HeapObject {
// The undefined object if index is out of bounds.
Object* GetElementWithReceiver(JSObject* receiver, uint32_t index);
void SetFastElements(FixedArray* elements);
Object* SetFastElementsCapacityAndLength(int capacity, int length);
Object* SetSlowElements(Object* length);
// Lookup interceptors are used for handling properties controlled by host
@ -2987,6 +2988,19 @@ class Map: public HeapObject {
return ((1 << kIsExtensible) & bit_field2()) != 0;
}
// Tells whether the instance has fast elements.
void set_has_fast_elements(bool value) {
if (value) {
set_bit_field2(bit_field2() | (1 << kHasFastElements));
} else {
set_bit_field2(bit_field2() & ~(1 << kHasFastElements));
}
}
bool has_fast_elements() {
return ((1 << kHasFastElements) & bit_field2()) != 0;
}
// Tells whether the instance needs security checks when accessing its
// properties.
inline void set_is_access_check_needed(bool access_check_needed);
@ -3010,6 +3024,16 @@ class Map: public HeapObject {
// instance descriptors.
Object* CopyDropTransitions();
// Returns this map if it has the fast elements bit set, otherwise
// returns a copy of the map, with all transitions dropped from the
// descriptors and the fast elements bit set.
inline Object* GetFastElementsMap();
// Returns this map if it has the fast elements bit cleared,
// otherwise returns a copy of the map, with all transitions dropped
// from the descriptors and the fast elements bit cleared.
inline Object* GetSlowElementsMap();
// Returns the property index for name (only valid for FAST MODE).
int PropertyIndexFor(String* name);
@ -3111,6 +3135,7 @@ class Map: public HeapObject {
// Bit positions for bit field 2
static const int kIsExtensible = 0;
static const int kFunctionWithPrototype = 1;
static const int kHasFastElements = 2;
// Layout of the default cache. It holds alternating name and code objects.
static const int kCodeCacheEntrySize = 2;

View File

@ -7449,7 +7449,7 @@ class ArrayConcatVisitor {
uint32_t index_limit_;
// Index after last seen index. Always less than or equal to index_limit_.
uint32_t index_offset_;
bool fast_elements_;
const bool fast_elements_;
};
@ -7766,13 +7766,14 @@ static Object* Runtime_ArrayConcat(Arguments args) {
// The backing storage array must have non-existing elements to
// preserve holes across concat operations.
storage = Factory::NewFixedArrayWithHoles(result_length);
result->set_map(*Factory::GetFastElementsMap(Handle<Map>(result->map())));
} else {
// TODO(126): move 25% pre-allocation logic into Dictionary::Allocate
uint32_t at_least_space_for = estimate_nof_elements +
(estimate_nof_elements >> 2);
storage = Handle<FixedArray>::cast(
Factory::NewNumberDictionary(at_least_space_for));
result->set_map(*Factory::GetSlowElementsMap(Handle<Map>(result->map())));
}
Handle<Object> len = Factory::NewNumber(static_cast<double>(result_length));
@ -7822,9 +7823,19 @@ static Object* Runtime_MoveArrayContents(Arguments args) {
ASSERT(args.length() == 2);
CONVERT_CHECKED(JSArray, from, args[0]);
CONVERT_CHECKED(JSArray, to, args[1]);
to->SetContent(FixedArray::cast(from->elements()));
HeapObject* new_elements = from->elements();
Object* new_map;
if (new_elements->map() == Heap::fixed_array_map()) {
new_map = to->map()->GetFastElementsMap();
} else {
new_map = to->map()->GetSlowElementsMap();
}
if (new_map->IsFailure()) return new_map;
to->set_map(Map::cast(new_map));
to->set_elements(new_elements);
to->set_length(from->length());
from->SetContent(Heap::empty_fixed_array());
Object* obj = from->ResetElements();
if (obj->IsFailure()) return obj;
from->set_length(Smi::FromInt(0));
return to;
}

View File

@ -7529,9 +7529,11 @@ Result CodeGenerator::EmitKeyedLoad() {
// is not a dictionary.
__ movq(elements.reg(),
FieldOperand(receiver.reg(), JSObject::kElementsOffset));
if (FLAG_debug_code) {
__ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
Factory::fixed_array_map());
deferred->Branch(not_equal);
__ Assert(equal, "JSObject with fast elements map has slow elements");
}
// Check that key is within bounds.
__ SmiCompare(key.reg(),