Reapplying r5147 (caching maps for slow case objects).

r5147 wrongly assumed that a code cache for a slow case map is always empty.
This patch solves this: whenever we attempt to add a stub to a map's code cache 
we check that this map is cached. If it is we give the object its own copy
of the map and only then modify the map.

Review URL: http://codereview.chromium.org/3134027

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5342 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
kaznacheev@chromium.org 2010-08-25 13:25:54 +00:00
parent feb20d30f8
commit 6bd9d953dd
13 changed files with 311 additions and 74 deletions

View File

@ -232,6 +232,7 @@ class Genesis BASE_EMBEDDED {
bool InstallNatives();
void InstallCustomCallGenerators();
void InstallJSFunctionResultCaches();
void InitializeNormalizedMapCaches();
// Used both for deserialized and from-scratch contexts to add the extensions
// provided.
static bool InstallExtensions(Handle<Context> global_context,
@ -1400,6 +1401,13 @@ void Genesis::InstallJSFunctionResultCaches() {
}
void Genesis::InitializeNormalizedMapCaches() {
Handle<FixedArray> array(
Factory::NewFixedArray(NormalizedMapCache::kEntries, TENURED));
global_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array));
}
int BootstrapperActive::nesting_ = 0;
@ -1768,6 +1776,7 @@ Genesis::Genesis(Handle<Object> global_object,
HookUpGlobalProxy(inner_global, global_proxy);
InitializeGlobal(inner_global, empty_function);
InstallJSFunctionResultCaches();
InitializeNormalizedMapCaches();
if (!InstallNatives()) return;
MakeFunctionInstancePrototypeWritable();

View File

@ -86,6 +86,7 @@ enum ContextLookupFlags {
V(CONFIGURE_GLOBAL_INDEX, JSFunction, configure_global_fun) \
V(FUNCTION_CACHE_INDEX, JSObject, function_cache) \
V(JSFUNCTION_RESULT_CACHES_INDEX, FixedArray, jsfunction_result_caches) \
V(NORMALIZED_MAP_CACHE_INDEX, NormalizedMapCache, normalized_map_cache) \
V(RUNTIME_CONTEXT_INDEX, Context, runtime_context) \
V(CALL_AS_FUNCTION_DELEGATE_INDEX, JSFunction, call_as_function_delegate) \
V(CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, JSFunction, \
@ -211,6 +212,7 @@ class Context: public FixedArray {
CONFIGURE_GLOBAL_INDEX,
FUNCTION_CACHE_INDEX,
JSFUNCTION_RESULT_CACHES_INDEX,
NORMALIZED_MAP_CACHE_INDEX,
RUNTIME_CONTEXT_INDEX,
CALL_AS_FUNCTION_DELEGATE_INDEX,
CALL_AS_CONSTRUCTOR_DELEGATE_INDEX,

View File

@ -571,6 +571,22 @@ void Heap::ClearJSFunctionResultCaches() {
}
class ClearThreadNormalizedMapCachesVisitor: public ThreadVisitor {
virtual void VisitThread(ThreadLocalTop* top) {
Context* context = top->context_;
if (context == NULL) return;
context->global()->global_context()->normalized_map_cache()->Clear();
}
};
void Heap::ClearNormalizedMapCaches() {
if (Bootstrapper::IsActive()) return;
ClearThreadNormalizedMapCachesVisitor visitor;
ThreadManager::IterateArchivedThreads(&visitor);
}
#ifdef DEBUG
enum PageWatermarkValidity {
@ -755,6 +771,8 @@ void Heap::MarkCompactPrologue(bool is_compacting) {
CompletelyClearInstanceofCache();
if (is_compacting) FlushNumberStringCache();
ClearNormalizedMapCaches();
}

View File

@ -1021,6 +1021,8 @@ class Heap : public AllStatic {
static void ClearJSFunctionResultCaches();
static void ClearNormalizedMapCaches();
static GCTracer* tracer() { return tracer_; }
private:

View File

@ -108,10 +108,10 @@ InlineCacheHolderFlag IC::GetCodeCacheForObject(JSObject* object,
}
Map* IC::GetCodeCacheMap(Object* object, InlineCacheHolderFlag holder) {
JSObject* IC::GetCodeCacheHolder(Object* object, InlineCacheHolderFlag holder) {
Object* map_owner = (holder == OWN_MAP ? object : object->GetPrototype());
ASSERT(map_owner->IsJSObject());
return JSObject::cast(map_owner)->map();
return JSObject::cast(map_owner);
}

View File

@ -165,14 +165,14 @@ IC::State IC::StateFrom(Code* target, Object* receiver, Object* name) {
if (cache_holder == OWN_MAP && !receiver->IsJSObject()) {
// The stub was generated for JSObject but called for non-JSObject.
// IC::GetCodeCacheMap is not applicable.
// IC::GetCodeCacheHolder is not applicable.
return MONOMORPHIC;
} else if (cache_holder == PROTOTYPE_MAP &&
receiver->GetPrototype()->IsNull()) {
// IC::GetCodeCacheMap is not applicable.
// IC::GetCodeCacheHolder is not applicable.
return MONOMORPHIC;
}
Map* map = IC::GetCodeCacheMap(receiver, cache_holder);
Map* map = IC::GetCodeCacheHolder(receiver, cache_holder)->map();
// Decide whether the inline cache failed because of changes to the
// receiver itself or changes to one of its prototypes.

View File

@ -123,7 +123,7 @@ class IC {
JSObject* holder);
static inline InlineCacheHolderFlag GetCodeCacheForObject(JSObject* object,
JSObject* holder);
static inline Map* GetCodeCacheMap(Object* object,
static inline JSObject* GetCodeCacheHolder(Object* object,
InlineCacheHolderFlag holder);
protected:

View File

@ -30,6 +30,7 @@
#include "disassembler.h"
#include "disasm.h"
#include "jsregexp.h"
#include "objects-visiting.h"
namespace v8 {
namespace internal {
@ -648,6 +649,17 @@ void Map::MapVerify() {
}
void Map::NormalizedMapVerify() {
MapVerify();
ASSERT_EQ(Heap::empty_descriptor_array(), instance_descriptors());
ASSERT_EQ(Heap::empty_fixed_array(), code_cache());
ASSERT_EQ(0, pre_allocated_property_fields());
ASSERT_EQ(0, unused_property_fields());
ASSERT_EQ(StaticVisitorBase::GetVisitorId(instance_type(), instance_size()),
visitor_id());
}
void CodeCache::CodeCachePrint() {
HeapObject::PrintHeader("CodeCache");
PrintF("\n - default_cache: ");
@ -1363,6 +1375,21 @@ void JSFunctionResultCache::JSFunctionResultCacheVerify() {
}
void NormalizedMapCache::NormalizedMapCacheVerify() {
FixedArray::cast(this)->Verify();
if (FLAG_enable_slow_asserts) {
for (int i = 0; i < length(); i++) {
Object* e = get(i);
if (e->IsMap()) {
Map::cast(e)->NormalizedMapVerify();
} else {
ASSERT(e->IsUndefined());
}
}
}
}
#endif // DEBUG
} } // namespace v8::internal

View File

@ -575,6 +575,18 @@ bool Object::IsJSFunctionResultCache() {
}
bool Object::IsNormalizedMapCache() {
if (!IsFixedArray()) return false;
if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
return false;
}
#ifdef DEBUG
reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
#endif
return true;
}
bool Object::IsCompilationCacheTable() {
return IsHashTable();
}
@ -1660,6 +1672,7 @@ CAST_ACCESSOR(FixedArray)
CAST_ACCESSOR(DescriptorArray)
CAST_ACCESSOR(SymbolTable)
CAST_ACCESSOR(JSFunctionResultCache)
CAST_ACCESSOR(NormalizedMapCache)
CAST_ACCESSOR(CompilationCacheTable)
CAST_ACCESSOR(CodeCacheHashTable)
CAST_ACCESSOR(MapCache)

View File

@ -2098,6 +2098,124 @@ PropertyAttributes JSObject::GetLocalPropertyAttribute(String* name) {
}
bool NormalizedMapCache::IsCacheable(JSObject* object) {
// Caching for global objects is not worth it (there are too few of them).
return !object->IsGlobalObject();
}
Object* NormalizedMapCache::Get(JSObject* obj, PropertyNormalizationMode mode) {
Object* result;
Map* fast = obj->map();
if (!IsCacheable(obj)) {
result = fast->CopyNormalized(mode);
if (result->IsFailure()) return result;
} else {
int index = Hash(fast) % kEntries;
result = get(index);
if (result->IsMap() && CheckHit(Map::cast(result), fast, mode)) {
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
// Make sure that the new slow map has exactly the same hash as the
// original fast map. This way we can use hash to check if a slow map
// is already in the hash (see Contains method).
ASSERT(Hash(fast) == Hash(Map::cast(result)));
// The cached map should match newly created normalized map bit-by-bit.
Object* fresh = fast->CopyNormalized(mode);
if (!fresh->IsFailure()) {
// Copy the unused byte so that the assertion below works.
Map::cast(fresh)->address()[Map::kUnusedOffset] =
Map::cast(result)->address()[Map::kUnusedOffset];
ASSERT(memcmp(Map::cast(fresh)->address(),
Map::cast(result)->address(),
Map::kSize) == 0);
}
}
#endif
return result;
}
result = fast->CopyNormalized(mode);
if (result->IsFailure()) return result;
set(index, result);
}
Counters::normalized_maps.Increment();
return result;
}
bool NormalizedMapCache::Contains(Map* map) {
// If the map is present in the cache it can only be at one place:
// at the index calculated from the hash. We assume that a slow map has the
// same hash as a fast map it has been generated from.
int index = Hash(map) % kEntries;
return get(index) == map;
}
void NormalizedMapCache::Clear() {
int entries = length();
for (int i = 0; i != entries; i++) {
set_undefined(i);
}
}
int NormalizedMapCache::Hash(Map* fast) {
// For performance reasons we only hash the 3 most variable fields of a map:
// constructor, prototype and bit_field2.
// Shift away the tag.
int hash = (static_cast<uint32_t>(
reinterpret_cast<uintptr_t>(fast->constructor())) >> 2);
// XOR-ing the prototype and constructor directly yields too many zero bits
// when the two pointers are close (which is fairly common).
// To avoid this we shift the prototype 4 bits relatively to the constructor.
hash ^= (static_cast<uint32_t>(
reinterpret_cast<uintptr_t>(fast->prototype())) << 2);
return hash ^ (hash >> 16) ^ fast->bit_field2();
}
bool NormalizedMapCache::CheckHit(Map* slow,
Map* fast,
PropertyNormalizationMode mode) {
#ifdef DEBUG
slow->NormalizedMapVerify();
#endif
return
slow->constructor() == fast->constructor() &&
slow->prototype() == fast->prototype() &&
slow->inobject_properties() == ((mode == CLEAR_INOBJECT_PROPERTIES) ?
0 :
fast->inobject_properties()) &&
slow->instance_type() == fast->instance_type() &&
slow->bit_field() == fast->bit_field() &&
slow->bit_field2() == fast->bit_field2();
}
Object* JSObject::UpdateMapCodeCache(String* name, Code* code) {
if (!HasFastProperties() &&
NormalizedMapCache::IsCacheable(this) &&
Top::context()->global_context()->normalized_map_cache()->
Contains(map())) {
// Replace the map with the identical copy that can be safely modified.
Object* obj = map()->CopyNormalized(KEEP_INOBJECT_PROPERTIES);
if (obj->IsFailure()) return obj;
Counters::normalized_maps.Increment();
set_map(Map::cast(obj));
}
return map()->UpdateCodeCache(name, code);
}
Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
int expected_additional_properties) {
if (!HasFastProperties()) return this;
@ -2162,28 +2280,22 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
int index = map()->instance_descriptors()->NextEnumerationIndex();
dictionary->SetNextEnumerationIndex(index);
// Allocate new map.
obj = map()->CopyDropDescriptors();
obj = Top::context()->global_context()->
normalized_map_cache()->Get(this, mode);
if (obj->IsFailure()) return obj;
Map* new_map = Map::cast(obj);
// Clear inobject properties if needed by adjusting the instance size and
// putting in a filler object instead of the inobject properties.
if (mode == CLEAR_INOBJECT_PROPERTIES && map()->inobject_properties() > 0) {
int instance_size_delta = map()->inobject_properties() * kPointerSize;
int new_instance_size = map()->instance_size() - instance_size_delta;
new_map->set_inobject_properties(0);
new_map->set_instance_size(new_instance_size);
new_map->set_visitor_id(StaticVisitorBase::GetVisitorId(new_map));
Heap::CreateFillerObjectAt(this->address() + new_instance_size,
instance_size_delta);
}
new_map->set_unused_property_fields(0);
// We have now successfully allocated all the necessary objects.
// Changes can now be made with the guarantee that all of them take effect.
// Resize the object in the heap if necessary.
int new_instance_size = new_map->instance_size();
int instance_size_delta = map()->instance_size() - new_instance_size;
ASSERT(instance_size_delta >= 0);
Heap::CreateFillerObjectAt(this->address() + new_instance_size,
instance_size_delta);
set_map(new_map);
map()->set_instance_descriptors(Heap::empty_descriptor_array());
set_properties(dictionary);
@ -3083,6 +3195,33 @@ Object* Map::CopyDropDescriptors() {
}
Object* Map::CopyNormalized(PropertyNormalizationMode mode) {
int new_instance_size = instance_size();
if (mode == CLEAR_INOBJECT_PROPERTIES) {
new_instance_size -= inobject_properties() * kPointerSize;
}
Object* result = Heap::AllocateMap(instance_type(), new_instance_size);
if (result->IsFailure()) return result;
if (mode != CLEAR_INOBJECT_PROPERTIES) {
Map::cast(result)->set_inobject_properties(inobject_properties());
}
Map::cast(result)->set_prototype(prototype());
Map::cast(result)->set_constructor(constructor());
Map::cast(result)->set_bit_field(bit_field());
Map::cast(result)->set_bit_field2(bit_field2());
#ifdef DEBUG
Map::cast(result)->NormalizedMapVerify();
#endif
return result;
}
Object* Map::CopyDropTransitions() {
Object* new_map = CopyDropDescriptors();
if (new_map->IsFailure()) return new_map;

View File

@ -638,6 +638,7 @@ class Object BASE_EMBEDDED {
inline bool IsDictionary();
inline bool IsSymbolTable();
inline bool IsJSFunctionResultCache();
inline bool IsNormalizedMapCache();
inline bool IsCompilationCacheTable();
inline bool IsCodeCacheHashTable();
inline bool IsMapCache();
@ -1551,6 +1552,8 @@ class JSObject: public HeapObject {
int expected_additional_properties);
Object* NormalizeElements();
Object* UpdateMapCodeCache(String* name, Code* code);
// Transform slow named properties to fast variants.
// Returns failure if allocation failed.
Object* TransformToFastProperties(int unused_property_fields);
@ -2465,6 +2468,35 @@ class JSFunctionResultCache: public FixedArray {
};
// The cache for maps used by normalized (dictionary mode) objects.
// Such maps do not have property descriptors, so a typical program
// needs very limited number of distinct normalized maps.
class NormalizedMapCache: public FixedArray {
public:
static const int kEntries = 64;
static bool IsCacheable(JSObject* object);
Object* Get(JSObject* object, PropertyNormalizationMode mode);
bool Contains(Map* map);
void Clear();
// Casting
static inline NormalizedMapCache* cast(Object* obj);
#ifdef DEBUG
void NormalizedMapCacheVerify();
#endif
private:
static int Hash(Map* fast);
static bool CheckHit(Map* slow, Map* fast, PropertyNormalizationMode mode);
};
// ByteArray represents fixed sized byte arrays. Used by the outside world,
// such as PCRE, and also by the memory allocator and garbage collector to
// fill in free blocks in the heap.
@ -3123,6 +3155,8 @@ class Map: public HeapObject {
Object* CopyDropDescriptors();
Object* CopyNormalized(PropertyNormalizationMode mode);
// Returns a copy of the map, with all transitions dropped from the
// instance descriptors.
Object* CopyDropTransitions();
@ -3185,6 +3219,7 @@ class Map: public HeapObject {
#ifdef DEBUG
void MapPrint();
void MapVerify();
void NormalizedMapVerify();
#endif
inline int visitor_id();
@ -3219,6 +3254,8 @@ class Map: public HeapObject {
static const int kPreAllocatedPropertyFieldsOffset =
kInstanceSizesOffset + kPreAllocatedPropertyFieldsByte;
// The byte at position 3 is not in use at the moment.
static const int kUnusedByte = 3;
static const int kUnusedOffset = kInstanceSizesOffset + kUnusedByte;
// Byte offsets within kInstanceAttributesOffset attributes.
static const int kInstanceTypeOffset = kInstanceAttributesOffset + 0;

View File

@ -119,7 +119,7 @@ Object* StubCache::ComputeLoadNonexistent(String* name, JSObject* receiver) {
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), cache_name));
Object* result =
receiver->map()->UpdateCodeCache(cache_name, Code::cast(code));
receiver->UpdateMapCodeCache(cache_name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -131,15 +131,14 @@ Object* StubCache::ComputeLoadField(String* name,
JSObject* holder,
int field_index) {
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, FIELD);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
LoadStubCompiler compiler;
code = compiler.CompileLoadField(receiver, holder, field_index, name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -152,15 +151,14 @@ Object* StubCache::ComputeLoadCallback(String* name,
AccessorInfo* callback) {
ASSERT(v8::ToCData<Address>(callback->getter()) != 0);
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, CALLBACKS);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
LoadStubCompiler compiler;
code = compiler.CompileLoadCallback(name, receiver, holder, callback);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -172,16 +170,15 @@ Object* StubCache::ComputeLoadConstant(String* name,
JSObject* holder,
Object* value) {
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::LOAD_IC, CONSTANT_FUNCTION);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
LoadStubCompiler compiler;
code = compiler.CompileLoadConstant(receiver, holder, value, name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -192,15 +189,14 @@ Object* StubCache::ComputeLoadInterceptor(String* name,
JSObject* receiver,
JSObject* holder) {
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, INTERCEPTOR);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
LoadStubCompiler compiler;
code = compiler.CompileLoadInterceptor(receiver, holder, name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -218,9 +214,8 @@ Object* StubCache::ComputeLoadGlobal(String* name,
JSGlobalPropertyCell* cell,
bool is_dont_delete) {
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
LoadStubCompiler compiler;
code = compiler.CompileLoadGlobal(receiver,
@ -230,7 +225,7 @@ Object* StubCache::ComputeLoadGlobal(String* name,
is_dont_delete);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -242,15 +237,14 @@ Object* StubCache::ComputeKeyedLoadField(String* name,
JSObject* holder,
int field_index) {
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, FIELD);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
KeyedLoadStubCompiler compiler;
code = compiler.CompileLoadField(name, receiver, holder, field_index);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -262,16 +256,15 @@ Object* StubCache::ComputeKeyedLoadConstant(String* name,
JSObject* holder,
Object* value) {
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CONSTANT_FUNCTION);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
KeyedLoadStubCompiler compiler;
code = compiler.CompileLoadConstant(name, receiver, holder, value);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -282,16 +275,15 @@ Object* StubCache::ComputeKeyedLoadInterceptor(String* name,
JSObject* receiver,
JSObject* holder) {
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, INTERCEPTOR);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
KeyedLoadStubCompiler compiler;
code = compiler.CompileLoadInterceptor(receiver, holder, name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -303,16 +295,15 @@ Object* StubCache::ComputeKeyedLoadCallback(String* name,
JSObject* holder,
AccessorInfo* callback) {
ASSERT(IC::GetCodeCacheForObject(receiver, holder) == OWN_MAP);
Map* map = receiver->map();
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
KeyedLoadStubCompiler compiler;
code = compiler.CompileLoadCallback(name, receiver, holder, callback);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -325,14 +316,13 @@ Object* StubCache::ComputeKeyedLoadArrayLength(String* name,
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
ASSERT(receiver->IsJSObject());
Map* map = receiver->map();
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
KeyedLoadStubCompiler compiler;
code = compiler.CompileLoadArrayLength(name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -361,14 +351,13 @@ Object* StubCache::ComputeKeyedLoadFunctionPrototype(String* name,
JSFunction* receiver) {
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CALLBACKS);
Map* map = receiver->map();
Object* code = map->FindInCodeCache(name, flags);
Object* code = receiver->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
KeyedLoadStubCompiler compiler;
code = compiler.CompileLoadFunctionPrototype(name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::KEYED_LOAD_IC_TAG, Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -387,7 +376,7 @@ Object* StubCache::ComputeStoreField(String* name,
code = compiler.CompileStoreField(receiver, field_index, transition, name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -409,7 +398,7 @@ Object* StubCache::ComputeStoreGlobal(String* name,
code = compiler.CompileStoreGlobal(receiver, cell, name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -427,7 +416,7 @@ Object* StubCache::ComputeStoreCallback(String* name,
code = compiler.CompileStoreCallback(receiver, callback, name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -444,7 +433,7 @@ Object* StubCache::ComputeStoreInterceptor(String* name,
code = compiler.CompileStoreInterceptor(receiver, name);
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(Logger::STORE_IC_TAG, Code::cast(code), name));
Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -462,7 +451,7 @@ Object* StubCache::ComputeKeyedStoreField(String* name, JSObject* receiver,
if (code->IsFailure()) return code;
PROFILE(CodeCreateEvent(
Logger::KEYED_STORE_IC_TAG, Code::cast(code), name));
Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code));
Object* result = receiver->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -481,7 +470,7 @@ Object* StubCache::ComputeCallConstant(int argc,
// Compute the check type and the map.
InlineCacheHolderFlag cache_holder =
IC::GetCodeCacheForObject(object, holder);
Map* map = IC::GetCodeCacheMap(object, cache_holder);
JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder);
// Compute check type based on receiver/holder.
StubCompiler::CheckType check = StubCompiler::RECEIVER_MAP_CHECK;
@ -499,7 +488,7 @@ Object* StubCache::ComputeCallConstant(int argc,
cache_holder,
in_loop,
argc);
Object* code = map->FindInCodeCache(name, flags);
Object* code = map_holder->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
// If the function hasn't been compiled yet, we cannot do it now
// because it may cause GC. To avoid this issue, we return an
@ -513,7 +502,7 @@ Object* StubCache::ComputeCallConstant(int argc,
ASSERT_EQ(flags, Code::cast(code)->flags());
PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = map_holder->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -530,7 +519,7 @@ Object* StubCache::ComputeCallField(int argc,
// Compute the check type and the map.
InlineCacheHolderFlag cache_holder =
IC::GetCodeCacheForObject(object, holder);
Map* map = IC::GetCodeCacheMap(object, cache_holder);
JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder);
// TODO(1233596): We cannot do receiver map check for non-JS objects
// because they may be represented as immediates without a
@ -544,7 +533,7 @@ Object* StubCache::ComputeCallField(int argc,
cache_holder,
in_loop,
argc);
Object* code = map->FindInCodeCache(name, flags);
Object* code = map_holder->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
CallStubCompiler compiler(argc, in_loop, kind, cache_holder);
code = compiler.CompileCallField(JSObject::cast(object),
@ -555,7 +544,7 @@ Object* StubCache::ComputeCallField(int argc,
ASSERT_EQ(flags, Code::cast(code)->flags());
PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = map_holder->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -570,7 +559,7 @@ Object* StubCache::ComputeCallInterceptor(int argc,
// Compute the check type and the map.
InlineCacheHolderFlag cache_holder =
IC::GetCodeCacheForObject(object, holder);
Map* map = IC::GetCodeCacheMap(object, cache_holder);
JSObject* map_holder = IC::GetCodeCacheHolder(object, cache_holder);
// TODO(1233596): We cannot do receiver map check for non-JS objects
// because they may be represented as immediates without a
@ -585,7 +574,7 @@ Object* StubCache::ComputeCallInterceptor(int argc,
cache_holder,
NOT_IN_LOOP,
argc);
Object* code = map->FindInCodeCache(name, flags);
Object* code = map_holder->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
CallStubCompiler compiler(argc, NOT_IN_LOOP, kind, cache_holder);
code = compiler.CompileCallInterceptor(JSObject::cast(object),
@ -595,7 +584,7 @@ Object* StubCache::ComputeCallInterceptor(int argc,
ASSERT_EQ(flags, Code::cast(code)->flags());
PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = map_holder->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;
@ -623,14 +612,14 @@ Object* StubCache::ComputeCallGlobal(int argc,
JSFunction* function) {
InlineCacheHolderFlag cache_holder =
IC::GetCodeCacheForObject(receiver, holder);
Map* map = IC::GetCodeCacheMap(receiver, cache_holder);
JSObject* map_holder = IC::GetCodeCacheHolder(receiver, cache_holder);
Code::Flags flags =
Code::ComputeMonomorphicFlags(kind,
NORMAL,
cache_holder,
in_loop,
argc);
Object* code = map->FindInCodeCache(name, flags);
Object* code = map_holder->map()->FindInCodeCache(name, flags);
if (code->IsUndefined()) {
// If the function hasn't been compiled yet, we cannot do it now
// because it may cause GC. To avoid this issue, we return an
@ -643,7 +632,7 @@ Object* StubCache::ComputeCallGlobal(int argc,
ASSERT_EQ(flags, Code::cast(code)->flags());
PROFILE(CodeCreateEvent(CALL_LOGGER_TAG(kind, CALL_IC_TAG),
Code::cast(code), name));
Object* result = map->UpdateCodeCache(name, Code::cast(code));
Object* result = map_holder->UpdateMapCodeCache(name, Code::cast(code));
if (result->IsFailure()) return result;
}
return code;

View File

@ -67,6 +67,7 @@ namespace internal {
SC(pcre_mallocs, V8.PcreMallocCount) \
/* OS Memory allocated */ \
SC(memory_allocated, V8.OsMemoryAllocated) \
SC(normalized_maps, V8.NormalizedMaps) \
SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
SC(alive_after_last_gc, V8.AliveAfterLastGC) \