From 52a7149efbf0f7a923092591ddae76d186e4d2e8 Mon Sep 17 00:00:00 2001 From: "verwaest@chromium.org" Date: Wed, 25 Jul 2012 11:12:29 +0000 Subject: [PATCH] In-place trimming of descriptor array when appending callbacks. Review URL: https://chromiumcodereview.appspot.com/10830005 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@12184 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/factory.cc | 60 +---------------- src/factory.h | 3 - src/objects.cc | 170 ++++++++++++++++++++++++++++++++++++------------- src/objects.h | 9 +++ 4 files changed, 135 insertions(+), 107 deletions(-) diff --git a/src/factory.cc b/src/factory.cc index 25989cace4..df72da50fb 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -892,64 +892,6 @@ Handle Factory::SymbolFromString(Handle value) { } -void Factory::CopyAppendCallbackDescriptors(Handle map, - Handle descriptors) { - Handle array(map->instance_descriptors()); - v8::NeanderArray callbacks(descriptors); - int nof_callbacks = callbacks.length(); - int descriptor_count = array->number_of_descriptors(); - Handle result = - NewDescriptorArray(descriptor_count + nof_callbacks); - - // Ensure that marking will not progress and change color of objects. - DescriptorArray::WhitenessWitness witness(*result); - - // Copy the descriptors from the array. - if (0 < descriptor_count) { - for (int i = 0; i < descriptor_count; i++) { - result->CopyFrom(i, *array, i, witness); - } - } - - map->set_instance_descriptors(*result); - - // Fill in new callback descriptors. Process the callbacks from - // back to front so that the last callback with a given name takes - // precedence over previously added callbacks with that name. - for (int i = nof_callbacks - 1; i >= 0; i--) { - Handle entry = - Handle(AccessorInfo::cast(callbacks.get(i))); - // Ensure the key is a symbol before writing into the instance descriptor. - Handle key = - SymbolFromString(Handle(String::cast(entry->name()))); - // Check if a descriptor with this name already exists before writing. - if (LinearSearch(*result, *key, map->NumberOfSetDescriptors()) == - DescriptorArray::kNotFound) { - CallbacksDescriptor desc(*key, *entry, entry->property_attributes()); - map->AppendDescriptor(&desc, witness); - } - } - - int new_number_of_descriptors = map->NumberOfSetDescriptors(); - // Reinstall the original descriptor array if no new elements were added. - if (new_number_of_descriptors == descriptor_count) { - map->set_instance_descriptors(*array); - return; - } - - // If duplicates were detected, allocate a result of the right size - // and transfer the elements. - if (new_number_of_descriptors < result->length()) { - Handle new_result = - NewDescriptorArray(new_number_of_descriptors); - for (int i = 0; i < new_number_of_descriptors; i++) { - new_result->CopyFrom(i, *result, i, witness); - } - map->set_instance_descriptors(*new_result); - } -} - - Handle Factory::NewJSObject(Handle constructor, PretenureFlag pretenure) { CALL_HEAP_FUNCTION( @@ -1337,7 +1279,7 @@ Handle Factory::CreateApiFunction( while (true) { Handle props = Handle(obj->property_accessors()); if (!props->IsUndefined()) { - CopyAppendCallbackDescriptors(map, props); + Map::CopyAppendCallbackDescriptors(map, props); } Handle parent = Handle(obj->parent_template()); if (parent->IsUndefined()) break; diff --git a/src/factory.h b/src/factory.h index 6b22140ff3..e70f3b1c14 100644 --- a/src/factory.h +++ b/src/factory.h @@ -496,9 +496,6 @@ class Factory { Handle name, LanguageMode language_mode); - void CopyAppendCallbackDescriptors(Handle map, - Handle descriptors); - // Create a new map cache. Handle NewMapCache(int at_least_space_for); diff --git a/src/objects.cc b/src/objects.cc index 65118437f1..67ff486905 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -2106,6 +2106,126 @@ void Map::LookupTransition(JSObject* holder, } +enum RightTrimMode { FROM_GC, FROM_MUTATOR }; + + +static void ZapEndOfFixedArray(Address new_end, int to_trim) { + // If we are doing a big trim in old space then we zap the space. + Object** zap = reinterpret_cast(new_end); + for (int i = 1; i < to_trim; i++) { + *zap++ = Smi::FromInt(0); + } +} + +template +static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { + ASSERT(elms->map() != HEAP->fixed_cow_array_map()); + // For now this trick is only applied to fixed arrays in new and paged space. + // In large object space the object's start must coincide with chunk + // and thus the trick is just not applicable. + ASSERT(!HEAP->lo_space()->Contains(elms)); + + const int len = elms->length(); + + ASSERT(to_trim < len); + + Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); + + if (trim_mode == FROM_GC) { +#ifdef DEBUG + ZapEndOfFixedArray(new_end, to_trim); +#endif + } else { + ZapEndOfFixedArray(new_end, to_trim); + } + + int size_delta = to_trim * kPointerSize; + + // Technically in new space this write might be omitted (except for + // debug mode which iterates through the heap), but to play safer + // we still do it. + heap->CreateFillerObjectAt(new_end, size_delta); + + elms->set_length(len - to_trim); + + // Maintain marking consistency for IncrementalMarking. + if (Marking::IsBlack(Marking::MarkBitFrom(elms))) { + if (trim_mode == FROM_GC) { + MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta); + } else { + MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta); + } + } +} + + +void Map::CopyAppendCallbackDescriptors(Handle map, + Handle descriptors) { + Isolate* isolate = map->GetIsolate(); + Handle array(map->instance_descriptors()); + v8::NeanderArray callbacks(descriptors); + int nof_callbacks = callbacks.length(); + int descriptor_count = array->number_of_descriptors(); + + // Ensure the keys are symbols before writing them into the instance + // descriptor. Since it may cause a GC, it has to be done before we + // temporarily put the heap in an invalid state while appending descriptors. + for (int i = 0; i < nof_callbacks; ++i) { + Handle entry(AccessorInfo::cast(callbacks.get(i))); + Handle key = + isolate->factory()->SymbolFromString( + Handle(String::cast(entry->name()))); + entry->set_name(*key); + } + + Handle result = + isolate->factory()->NewDescriptorArray(descriptor_count + nof_callbacks); + + // Ensure that marking will not progress and change color of objects. + DescriptorArray::WhitenessWitness witness(*result); + + // Copy the descriptors from the array. + if (0 < descriptor_count) { + for (int i = 0; i < descriptor_count; i++) { + result->CopyFrom(i, *array, i, witness); + } + } + + // After this point the GC is not allowed to run anymore until the map is in a + // consistent state again, i.e., all the descriptors are appended and the + // descriptor array is trimmed to the right size. + map->set_instance_descriptors(*result); + + // Fill in new callback descriptors. Process the callbacks from + // back to front so that the last callback with a given name takes + // precedence over previously added callbacks with that name. + for (int i = nof_callbacks - 1; i >= 0; i--) { + AccessorInfo* entry = AccessorInfo::cast(callbacks.get(i)); + String* key = String::cast(entry->name()); + // Check if a descriptor with this name already exists before writing. + if (LinearSearch(*result, key, map->NumberOfSetDescriptors()) == + DescriptorArray::kNotFound) { + CallbacksDescriptor desc(key, entry, entry->property_attributes()); + map->AppendDescriptor(&desc, witness); + } + } + + int new_number_of_descriptors = map->NumberOfSetDescriptors(); + // Reinstall the original descriptor array if no new elements were added. + if (new_number_of_descriptors == descriptor_count) { + map->set_instance_descriptors(*array); + return; + } + + // If duplicates were detected, trim the descriptor array to the right size. + int new_array_size = DescriptorArray::SizeFor(new_number_of_descriptors); + if (new_array_size < result->length()) { + RightTrimFixedArray( + isolate->heap(), *result, result->length() - new_array_size); + } +} + + static bool ContainsMap(MapHandleList* maps, Handle map) { ASSERT(!map.is_null()); for (int i = 0; i < maps->length(); ++i) { @@ -5723,10 +5843,9 @@ MaybeObject* DescriptorArray::Allocate(int number_of_descriptors, return heap->empty_descriptor_array(); } // Allocate the array of keys. - { MaybeObject* maybe_array = - heap->AllocateFixedArray(ToKeyIndex(number_of_descriptors)); - if (!maybe_array->To(&result)) return maybe_array; - } + MaybeObject* maybe_array = + heap->AllocateFixedArray(SizeFor(number_of_descriptors)); + if (!maybe_array->To(&result)) return maybe_array; result->set(kEnumCacheIndex, Smi::FromInt(0)); result->set(kTransitionsIndex, Smi::FromInt(0)); @@ -7083,46 +7202,6 @@ void String::PrintOn(FILE* file) { } -// This function should only be called from within the GC, since it uses -// IncrementLiveBytesFromGC. If called from anywhere else, this results in an -// inconsistent live-bytes count. -static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { - ASSERT(elms->map() != HEAP->fixed_cow_array_map()); - // For now this trick is only applied to fixed arrays in new and paged space. - // In large object space the object's start must coincide with chunk - // and thus the trick is just not applicable. - ASSERT(!HEAP->lo_space()->Contains(elms)); - - const int len = elms->length(); - - ASSERT(to_trim < len); - - Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); - -#ifdef DEBUG - // If we are doing a big trim in old space then we zap the space. - Object** zap = reinterpret_cast(new_end); - for (int i = 1; i < to_trim; i++) { - *zap++ = Smi::FromInt(0); - } -#endif - - int size_delta = to_trim * kPointerSize; - - // Technically in new space this write might be omitted (except for - // debug mode which iterates through the heap), but to play safer - // we still do it. - heap->CreateFillerObjectAt(new_end, size_delta); - - elms->set_length(len - to_trim); - - // Maintain marking consistency for IncrementalMarking. - if (Marking::IsBlack(Marking::MarkBitFrom(elms))) { - MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta); - } -} - - // Clear a possible back pointer in case the transition leads to a dead map. // Return true in case a back pointer has been cleared and false otherwise. static bool ClearBackPointer(Heap* heap, Object* target) { @@ -7185,7 +7264,8 @@ void Map::ClearNonLiveTransitions(Heap* heap) { int trim = t->number_of_transitions() - transition_index; if (trim > 0) { - RightTrimFixedArray(heap, t, trim * TransitionArray::kTransitionSize); + RightTrimFixedArray( + heap, t, trim * TransitionArray::kTransitionSize); } } diff --git a/src/objects.h b/src/objects.h index 92c2bc509a..3694955eaf 100644 --- a/src/objects.h +++ b/src/objects.h @@ -2635,6 +2635,10 @@ class DescriptorArray: public FixedArray { // fit in a page). static const int kMaxNumberOfDescriptors = 1024 + 512; + static int SizeFor(int number_of_descriptors) { + return ToKeyIndex(number_of_descriptors); + } + private: // An entry in a DescriptorArray, represented as an (array, index) pair. class Entry { @@ -4961,6 +4965,11 @@ class Map: public HeapObject { Handle code); MUST_USE_RESULT MaybeObject* UpdateCodeCache(String* name, Code* code); + // Extend the descriptor array of the map with the list of descriptors. + // In case of duplicates, the latest descriptor is used. + static void CopyAppendCallbackDescriptors(Handle map, + Handle descriptors); + // Returns the found code or undefined if absent. Object* FindInCodeCache(String* name, Code::Flags flags);