Revert r5147 due to failing assert, with no simple solution. Issue 808.
Review URL: http://codereview.chromium.org/3087001 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5167 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
24cf5459f5
commit
571eca3118
@ -231,7 +231,6 @@ class Genesis BASE_EMBEDDED {
|
||||
bool InstallNatives();
|
||||
void InstallCustomCallGenerators();
|
||||
void InstallJSFunctionResultCaches();
|
||||
void InitializeNormalizedMapCaches();
|
||||
// Used both for deserialized and from-scratch contexts to add the extensions
|
||||
// provided.
|
||||
static bool InstallExtensions(Handle<Context> global_context,
|
||||
@ -1393,13 +1392,6 @@ void Genesis::InstallJSFunctionResultCaches() {
|
||||
}
|
||||
|
||||
|
||||
void Genesis::InitializeNormalizedMapCaches() {
|
||||
Handle<FixedArray> array(
|
||||
Factory::NewFixedArray(NormalizedMapCache::kEntries, TENURED));
|
||||
global_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array));
|
||||
}
|
||||
|
||||
|
||||
int BootstrapperActive::nesting_ = 0;
|
||||
|
||||
|
||||
@ -1766,7 +1758,6 @@ Genesis::Genesis(Handle<Object> global_object,
|
||||
HookUpGlobalProxy(inner_global, global_proxy);
|
||||
InitializeGlobal(inner_global, empty_function);
|
||||
InstallJSFunctionResultCaches();
|
||||
InitializeNormalizedMapCaches();
|
||||
if (!InstallNatives()) return;
|
||||
|
||||
MakeFunctionInstancePrototypeWritable();
|
||||
|
@ -85,7 +85,6 @@ enum ContextLookupFlags {
|
||||
V(CONFIGURE_GLOBAL_INDEX, JSFunction, configure_global_fun) \
|
||||
V(FUNCTION_CACHE_INDEX, JSObject, function_cache) \
|
||||
V(JSFUNCTION_RESULT_CACHES_INDEX, FixedArray, jsfunction_result_caches) \
|
||||
V(NORMALIZED_MAP_CACHE_INDEX, NormalizedMapCache, normalized_map_cache) \
|
||||
V(RUNTIME_CONTEXT_INDEX, Context, runtime_context) \
|
||||
V(CALL_AS_FUNCTION_DELEGATE_INDEX, JSFunction, call_as_function_delegate) \
|
||||
V(CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, JSFunction, \
|
||||
@ -210,7 +209,6 @@ class Context: public FixedArray {
|
||||
CONFIGURE_GLOBAL_INDEX,
|
||||
FUNCTION_CACHE_INDEX,
|
||||
JSFUNCTION_RESULT_CACHES_INDEX,
|
||||
NORMALIZED_MAP_CACHE_INDEX,
|
||||
RUNTIME_CONTEXT_INDEX,
|
||||
CALL_AS_FUNCTION_DELEGATE_INDEX,
|
||||
CALL_AS_CONSTRUCTOR_DELEGATE_INDEX,
|
||||
|
18
src/heap.cc
18
src/heap.cc
@ -570,22 +570,6 @@ void Heap::ClearJSFunctionResultCaches() {
|
||||
}
|
||||
|
||||
|
||||
class ClearThreadNormalizedMapCachesVisitor: public ThreadVisitor {
|
||||
virtual void VisitThread(ThreadLocalTop* top) {
|
||||
Context* context = top->context_;
|
||||
if (context == NULL) return;
|
||||
context->global()->global_context()->normalized_map_cache()->Clear();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
void Heap::ClearNormalizedMapCaches() {
|
||||
if (Bootstrapper::IsActive()) return;
|
||||
ClearThreadNormalizedMapCachesVisitor visitor;
|
||||
ThreadManager::IterateArchivedThreads(&visitor);
|
||||
}
|
||||
|
||||
|
||||
#ifdef DEBUG
|
||||
|
||||
enum PageWatermarkValidity {
|
||||
@ -776,8 +760,6 @@ void Heap::MarkCompactPrologue(bool is_compacting) {
|
||||
CompletelyClearInstanceofCache();
|
||||
|
||||
if (is_compacting) FlushNumberStringCache();
|
||||
|
||||
ClearNormalizedMapCaches();
|
||||
}
|
||||
|
||||
|
||||
|
@ -1022,8 +1022,6 @@ class Heap : public AllStatic {
|
||||
|
||||
static void ClearJSFunctionResultCaches();
|
||||
|
||||
static void ClearNormalizedMapCaches();
|
||||
|
||||
static GCTracer* tracer() { return tracer_; }
|
||||
|
||||
private:
|
||||
|
@ -646,16 +646,6 @@ void Map::MapVerify() {
|
||||
}
|
||||
|
||||
|
||||
void Map::NormalizedMapVerify() {
|
||||
MapVerify();
|
||||
ASSERT(instance_descriptors() == Heap::empty_descriptor_array());
|
||||
ASSERT(code_cache() == Heap::empty_fixed_array());
|
||||
ASSERT(pre_allocated_property_fields() == 0);
|
||||
ASSERT(unused_property_fields() == 0);
|
||||
ASSERT(scavenger() == Heap::GetScavenger(instance_type(), instance_size()));
|
||||
}
|
||||
|
||||
|
||||
void CodeCache::CodeCachePrint() {
|
||||
HeapObject::PrintHeader("CodeCache");
|
||||
PrintF("\n - default_cache: ");
|
||||
@ -1371,21 +1361,6 @@ void JSFunctionResultCache::JSFunctionResultCacheVerify() {
|
||||
}
|
||||
|
||||
|
||||
void NormalizedMapCache::NormalizedMapCacheVerify() {
|
||||
FixedArray::cast(this)->Verify();
|
||||
if (FLAG_enable_slow_asserts) {
|
||||
for (int i = 0; i < length(); i++) {
|
||||
Object* e = get(i);
|
||||
if (e->IsMap()) {
|
||||
Map::cast(e)->NormalizedMapVerify();
|
||||
} else {
|
||||
ASSERT(e->IsUndefined());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#endif // DEBUG
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
@ -574,18 +574,6 @@ bool Object::IsJSFunctionResultCache() {
|
||||
}
|
||||
|
||||
|
||||
bool Object::IsNormalizedMapCache() {
|
||||
if (!IsFixedArray()) return false;
|
||||
if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
|
||||
return false;
|
||||
}
|
||||
#ifdef DEBUG
|
||||
reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool Object::IsCompilationCacheTable() {
|
||||
return IsHashTable();
|
||||
}
|
||||
@ -1639,7 +1627,6 @@ CAST_ACCESSOR(FixedArray)
|
||||
CAST_ACCESSOR(DescriptorArray)
|
||||
CAST_ACCESSOR(SymbolTable)
|
||||
CAST_ACCESSOR(JSFunctionResultCache)
|
||||
CAST_ACCESSOR(NormalizedMapCache)
|
||||
CAST_ACCESSOR(CompilationCacheTable)
|
||||
CAST_ACCESSOR(CodeCacheHashTable)
|
||||
CAST_ACCESSOR(MapCache)
|
||||
|
129
src/objects.cc
129
src/objects.cc
@ -2114,81 +2114,6 @@ PropertyAttributes JSObject::GetLocalPropertyAttribute(String* name) {
|
||||
}
|
||||
|
||||
|
||||
Object* NormalizedMapCache::Get(Map* fast, PropertyNormalizationMode mode) {
|
||||
int index = Hash(fast) % kEntries;
|
||||
Object* obj = get(index);
|
||||
|
||||
if (obj->IsMap() && CheckHit(Map::cast(obj), fast, mode)) {
|
||||
#ifdef DEBUG
|
||||
if (FLAG_enable_slow_asserts) {
|
||||
// The cached map should match freshly created normalized map bit-by-bit.
|
||||
Object* fresh = fast->CopyNormalized(mode);
|
||||
if (!fresh->IsFailure()) {
|
||||
// Copy the unused byte so that the assertion below works.
|
||||
Map::cast(fresh)->address()[Map::kUnusedOffset] =
|
||||
Map::cast(obj)->address()[Map::kUnusedOffset];
|
||||
ASSERT(memcmp(Map::cast(fresh)->address(),
|
||||
Map::cast(obj)->address(),
|
||||
Map::kSize) == 0);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return obj;
|
||||
}
|
||||
|
||||
obj = fast->CopyNormalized(mode);
|
||||
if (obj->IsFailure()) return obj;
|
||||
set(index, obj);
|
||||
Counters::normalized_maps.Increment();
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
||||
void NormalizedMapCache::Clear() {
|
||||
int entries = length();
|
||||
for (int i = 0; i != entries; i++) {
|
||||
set_undefined(i);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int NormalizedMapCache::Hash(Map* fast) {
|
||||
// For performance reasons we only hash the 3 most variable fields of a map:
|
||||
// constructor, prototype and bit_field2.
|
||||
|
||||
// Shift away the tag.
|
||||
int hash = (static_cast<uint32_t>(
|
||||
reinterpret_cast<uintptr_t>(fast->constructor())) >> 2);
|
||||
|
||||
// XOR-ing the prototype and constructor directly yields too many zero bits
|
||||
// when the two pointers are close (which is fairly common).
|
||||
// To avoid this we shift the prototype 4 bits relatively to the constructor.
|
||||
hash ^= (static_cast<uint32_t>(
|
||||
reinterpret_cast<uintptr_t>(fast->prototype())) << 2);
|
||||
|
||||
return hash ^ (hash >> 16) ^ fast->bit_field2();
|
||||
}
|
||||
|
||||
|
||||
bool NormalizedMapCache::CheckHit(Map* slow,
|
||||
Map* fast,
|
||||
PropertyNormalizationMode mode) {
|
||||
#ifdef DEBUG
|
||||
slow->NormalizedMapVerify();
|
||||
#endif
|
||||
return
|
||||
slow->constructor() == fast->constructor() &&
|
||||
slow->prototype() == fast->prototype() &&
|
||||
slow->inobject_properties() == ((mode == CLEAR_INOBJECT_PROPERTIES) ?
|
||||
0 :
|
||||
fast->inobject_properties()) &&
|
||||
slow->instance_type() == fast->instance_type() &&
|
||||
slow->bit_field() == fast->bit_field() &&
|
||||
slow->bit_field2() == fast->bit_field2();
|
||||
}
|
||||
|
||||
|
||||
Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
|
||||
int expected_additional_properties) {
|
||||
if (!HasFastProperties()) return this;
|
||||
@ -2253,22 +2178,29 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
|
||||
int index = map()->instance_descriptors()->NextEnumerationIndex();
|
||||
dictionary->SetNextEnumerationIndex(index);
|
||||
|
||||
obj = Top::context()->global_context()->
|
||||
normalized_map_cache()->Get(map(), mode);
|
||||
// Allocate new map.
|
||||
obj = map()->CopyDropDescriptors();
|
||||
if (obj->IsFailure()) return obj;
|
||||
Map* new_map = Map::cast(obj);
|
||||
|
||||
// Clear inobject properties if needed by adjusting the instance size and
|
||||
// putting in a filler object instead of the inobject properties.
|
||||
if (mode == CLEAR_INOBJECT_PROPERTIES && map()->inobject_properties() > 0) {
|
||||
int instance_size_delta = map()->inobject_properties() * kPointerSize;
|
||||
int new_instance_size = map()->instance_size() - instance_size_delta;
|
||||
new_map->set_inobject_properties(0);
|
||||
new_map->set_instance_size(new_instance_size);
|
||||
new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
|
||||
new_map->instance_size()));
|
||||
Heap::CreateFillerObjectAt(this->address() + new_instance_size,
|
||||
instance_size_delta);
|
||||
}
|
||||
new_map->set_unused_property_fields(0);
|
||||
|
||||
// We have now successfully allocated all the necessary objects.
|
||||
// Changes can now be made with the guarantee that all of them take effect.
|
||||
|
||||
// Resize the object in the heap if necessary.
|
||||
int new_instance_size = new_map->instance_size();
|
||||
int instance_size_delta = map()->instance_size() - new_instance_size;
|
||||
ASSERT(instance_size_delta >= 0);
|
||||
Heap::CreateFillerObjectAt(this->address() + new_instance_size,
|
||||
instance_size_delta);
|
||||
|
||||
set_map(new_map);
|
||||
map()->set_instance_descriptors(Heap::empty_descriptor_array());
|
||||
|
||||
set_properties(dictionary);
|
||||
|
||||
@ -3164,33 +3096,6 @@ Object* Map::CopyDropDescriptors() {
|
||||
}
|
||||
|
||||
|
||||
Object* Map::CopyNormalized(PropertyNormalizationMode mode) {
|
||||
int new_instance_size = instance_size();
|
||||
if (mode == CLEAR_INOBJECT_PROPERTIES) {
|
||||
new_instance_size -= inobject_properties() * kPointerSize;
|
||||
}
|
||||
|
||||
Object* result = Heap::AllocateMap(instance_type(), new_instance_size);
|
||||
if (result->IsFailure()) return result;
|
||||
|
||||
if (mode != CLEAR_INOBJECT_PROPERTIES) {
|
||||
Map::cast(result)->set_inobject_properties(inobject_properties());
|
||||
}
|
||||
|
||||
Map::cast(result)->set_prototype(prototype());
|
||||
Map::cast(result)->set_constructor(constructor());
|
||||
|
||||
Map::cast(result)->set_bit_field(bit_field());
|
||||
Map::cast(result)->set_bit_field2(bit_field2());
|
||||
|
||||
#ifdef DEBUG
|
||||
Map::cast(result)->NormalizedMapVerify();
|
||||
#endif
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
Object* Map::CopyDropTransitions() {
|
||||
Object* new_map = CopyDropDescriptors();
|
||||
if (new_map->IsFailure()) return new_map;
|
||||
|
@ -631,7 +631,6 @@ class Object BASE_EMBEDDED {
|
||||
inline bool IsDictionary();
|
||||
inline bool IsSymbolTable();
|
||||
inline bool IsJSFunctionResultCache();
|
||||
inline bool IsNormalizedMapCache();
|
||||
inline bool IsCompilationCacheTable();
|
||||
inline bool IsCodeCacheHashTable();
|
||||
inline bool IsMapCache();
|
||||
@ -2388,31 +2387,6 @@ class JSFunctionResultCache: public FixedArray {
|
||||
};
|
||||
|
||||
|
||||
// The cache for maps used by normalized (dictionary mode) objects.
|
||||
// Such maps do not have property descriptors, so a typical program
|
||||
// needs very limited number of distinct normalized maps.
|
||||
class NormalizedMapCache: public FixedArray {
|
||||
public:
|
||||
static const int kEntries = 64;
|
||||
|
||||
Object* Get(Map* fast, PropertyNormalizationMode mode);
|
||||
|
||||
void Clear();
|
||||
|
||||
// Casting
|
||||
static inline NormalizedMapCache* cast(Object* obj);
|
||||
|
||||
#ifdef DEBUG
|
||||
void NormalizedMapCacheVerify();
|
||||
#endif
|
||||
|
||||
private:
|
||||
static int Hash(Map* fast);
|
||||
|
||||
static bool CheckHit(Map* slow, Map* fast, PropertyNormalizationMode mode);
|
||||
};
|
||||
|
||||
|
||||
// ByteArray represents fixed sized byte arrays. Used by the outside world,
|
||||
// such as PCRE, and also by the memory allocator and garbage collector to
|
||||
// fill in free blocks in the heap.
|
||||
@ -3056,8 +3030,6 @@ class Map: public HeapObject {
|
||||
|
||||
Object* CopyDropDescriptors();
|
||||
|
||||
Object* CopyNormalized(PropertyNormalizationMode mode);
|
||||
|
||||
// Returns a copy of the map, with all transitions dropped from the
|
||||
// instance descriptors.
|
||||
Object* CopyDropTransitions();
|
||||
@ -3121,7 +3093,6 @@ class Map: public HeapObject {
|
||||
#ifdef DEBUG
|
||||
void MapPrint();
|
||||
void MapVerify();
|
||||
void NormalizedMapVerify();
|
||||
#endif
|
||||
|
||||
inline Scavenger scavenger();
|
||||
@ -3160,8 +3131,6 @@ class Map: public HeapObject {
|
||||
static const int kPreAllocatedPropertyFieldsOffset =
|
||||
kInstanceSizesOffset + kPreAllocatedPropertyFieldsByte;
|
||||
// The byte at position 3 is not in use at the moment.
|
||||
static const int kUnusedByte = 3;
|
||||
static const int kUnusedOffset = kInstanceSizesOffset + kUnusedByte;
|
||||
|
||||
// Byte offsets within kInstanceAttributesOffset attributes.
|
||||
static const int kInstanceTypeOffset = kInstanceAttributesOffset + 0;
|
||||
|
@ -67,7 +67,6 @@ namespace internal {
|
||||
SC(pcre_mallocs, V8.PcreMallocCount) \
|
||||
/* OS Memory allocated */ \
|
||||
SC(memory_allocated, V8.OsMemoryAllocated) \
|
||||
SC(normalized_maps, V8.NormalizedMaps) \
|
||||
SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
|
||||
SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
|
||||
SC(alive_after_last_gc, V8.AliveAfterLastGC) \
|
||||
|
Loading…
Reference in New Issue
Block a user