[builtins] HasOwnProperty: handle non-internalized string keys

Taking the slow runtime path for every non-internalized string key
can be avoided by doing optimistic string table lookups: if there
is a matching entry, use that; if there isn't, then no existing
object has a property with that name.
The hashing/internalizing logic is in C++ and called directly.

Review-Url: https://codereview.chromium.org/2811333002
Cr-Commit-Position: refs/heads/master@{#44650}
This commit is contained in:
jkummerow 2017-04-13 07:41:22 -07:00 committed by Commit bot
parent fa0066d170
commit 204989a5aa
16 changed files with 323 additions and 62 deletions

View File

@ -1562,6 +1562,12 @@ ExternalReference ExternalReference::libc_memset_function(Isolate* isolate) {
return ExternalReference(Redirect(isolate, FUNCTION_ADDR(libc_memset)));
}
ExternalReference ExternalReference::try_internalize_string_function(
Isolate* isolate) {
return ExternalReference(Redirect(
isolate, FUNCTION_ADDR(StringTable::LookupStringIfExists_NoAllocate)));
}
ExternalReference ExternalReference::page_flags(Page* page) {
return ExternalReference(reinterpret_cast<Address>(page) +
MemoryChunk::kFlagsOffset);

View File

@ -992,6 +992,8 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference libc_memcpy_function(Isolate* isolate);
static ExternalReference libc_memset_function(Isolate* isolate);
static ExternalReference try_internalize_string_function(Isolate* isolate);
static ExternalReference page_flags(Page* page);
static ExternalReference ForDeoptEntry(Address entry);

View File

@ -68,19 +68,31 @@ TF_BUILTIN(ObjectHasOwnProperty, ObjectBuiltinsAssembler) {
VARIABLE(var_index, MachineType::PointerRepresentation());
VARIABLE(var_unique, MachineRepresentation::kTagged);
Label keyisindex(this), if_iskeyunique(this);
TryToName(key, &keyisindex, &var_index, &if_iskeyunique, &var_unique,
&call_runtime);
Label if_index(this), if_unique_name(this), if_notunique_name(this);
TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique,
&call_runtime, &if_notunique_name);
BIND(&if_iskeyunique);
BIND(&if_unique_name);
TryHasOwnProperty(object, map, instance_type, var_unique.value(),
&return_true, &return_false, &call_runtime);
BIND(&keyisindex);
// Handle negative keys in the runtime.
GotoIf(IntPtrLessThan(var_index.value(), IntPtrConstant(0)), &call_runtime);
TryLookupElement(object, map, instance_type, var_index.value(),
&return_true, &return_false, &return_false, &call_runtime);
BIND(&if_index);
{
// Handle negative keys in the runtime.
GotoIf(IntPtrLessThan(var_index.value(), IntPtrConstant(0)),
&call_runtime);
TryLookupElement(object, map, instance_type, var_index.value(),
&return_true, &return_false, &return_false,
&call_runtime);
}
BIND(&if_notunique_name);
{
// If the string was not found in the string table, then no object can
// have a property with that name, so return |false|.
TryInternalizeString(key, &if_index, &var_index, &if_unique_name,
&var_unique, &return_false, &call_runtime);
}
}
BIND(&return_true);
Return(BooleanConstant(true));

View File

@ -4468,7 +4468,8 @@ void CodeStubAssembler::Use(Label* label) {
void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
Variable* var_index, Label* if_keyisunique,
Variable* var_unique, Label* if_bailout) {
Variable* var_unique, Label* if_bailout,
Label* if_notinternalized) {
DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
Comment("TryToName");
@ -4507,7 +4508,8 @@ void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
STATIC_ASSERT(kNotInternalizedTag != 0);
Node* not_internalized =
Word32And(key_instance_type, Int32Constant(kIsNotInternalizedMask));
GotoIf(Word32NotEqual(not_internalized, Int32Constant(0)), if_bailout);
GotoIf(Word32NotEqual(not_internalized, Int32Constant(0)),
if_notinternalized != nullptr ? if_notinternalized : if_bailout);
Goto(if_keyisunique);
BIND(&if_thinstring);
@ -4519,6 +4521,30 @@ void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
Goto(if_keyisindex);
}
void CodeStubAssembler::TryInternalizeString(
Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
DCHECK(var_index->rep() == MachineType::PointerRepresentation());
DCHECK(var_internalized->rep() == MachineRepresentation::kTagged);
Node* function = ExternalConstant(
ExternalReference::try_internalize_string_function(isolate()));
Node* result = CallCFunction1(MachineType::AnyTagged(),
MachineType::AnyTagged(), function, string);
Label internalized(this);
GotoIf(TaggedIsNotSmi(result), &internalized);
Node* word_result = SmiUntag(result);
GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
if_not_internalized);
GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
if_bailout);
var_index->Bind(word_result);
Goto(if_index);
BIND(&internalized);
var_internalized->Bind(result);
Goto(if_internalized);
}
template <typename Dictionary>
Node* CodeStubAssembler::EntryToIndex(Node* entry, int field_index) {
Node* entry_index = IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));

View File

@ -926,9 +926,24 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void Use(Label* label);
// Various building blocks for stubs doing property lookups.
// |if_notinternalized| is optional; |if_bailout| will be used by default.
void TryToName(Node* key, Label* if_keyisindex, Variable* var_index,
Label* if_keyisunique, Variable* var_unique,
Label* if_bailout);
Label* if_keyisunique, Variable* var_unique, Label* if_bailout,
Label* if_notinternalized = nullptr);
// Performs a hash computation and string table lookup for the given string,
// and jumps to:
// - |if_index| if the string is an array index like "123"; |var_index|
// will contain the intptr representation of that index.
// - |if_internalized| if the string exists in the string table; the
// internalized version will be in |var_internalized|.
// - |if_not_internalized| if the string is not in the string table (but
// does not add it).
// - |if_bailout| for unsupported cases (e.g. uncachable array index).
void TryInternalizeString(Node* string, Label* if_index, Variable* var_index,
Label* if_internalized, Variable* var_internalized,
Label* if_not_internalized, Label* if_bailout);
// Calculates array index for given dictionary entry and entry field.
// See Dictionary::EntryToIndex().

View File

@ -709,6 +709,13 @@ Node* CodeAssembler::CallCFunctionN(Signature<MachineType>* signature,
return raw_assembler()->CallN(desc, input_count, inputs);
}
Node* CodeAssembler::CallCFunction1(MachineType return_type,
MachineType arg0_type, Node* function,
Node* arg0) {
return raw_assembler()->CallCFunction1(return_type, arg0_type, function,
arg0);
}
Node* CodeAssembler::CallCFunction2(MachineType return_type,
MachineType arg0_type,
MachineType arg1_type, Node* function,

View File

@ -407,6 +407,10 @@ class V8_EXPORT_PRIVATE CodeAssembler {
Node* CallCFunctionN(Signature<MachineType>* signature, int input_count,
Node* const* inputs);
// Call to a C function with one argument.
Node* CallCFunction1(MachineType return_type, MachineType arg0_type,
Node* function, Node* arg0);
// Call to a C function with two arguments.
Node* CallCFunction2(MachineType return_type, MachineType arg0_type,
MachineType arg1_type, Node* function, Node* arg0,

View File

@ -237,6 +237,8 @@ void ExternalReferenceTable::AddReferences(Isolate* isolate) {
"libc_memcpy");
Add(ExternalReference::libc_memset_function(isolate).address(),
"libc_memset");
Add(ExternalReference::try_internalize_string_function(isolate).address(),
"try_internalize_string_function");
Add(ExternalReference::log_enter_external_function(isolate).address(),
"Logger::EnterExternal");
Add(ExternalReference::log_leave_external_function(isolate).address(),

View File

@ -303,6 +303,9 @@ DEFINE_IMPLICATION(track_field_types, track_heap_object_fields)
DEFINE_BOOL(type_profile, false, "collect type information")
DEFINE_BOOL(feedback_normalization, false,
"feed back normalization to constructors")
// TODO(jkummerow): This currently adds too much load on the stub cache.
DEFINE_BOOL_READONLY(internalize_on_the_fly, false,
"internalize string keys for generic keyed ICs on the fly")
// Flags for optimization types.
DEFINE_BOOL(optimize_for_size, false,

View File

@ -674,6 +674,8 @@ enum InlineCacheState {
enum WhereToStart { kStartAtReceiver, kStartAtPrototype };
enum ResultSentinel { kNotFound = -1, kUnsupported = -2 };
// The Store Buffer (GC).
typedef enum {
kStoreBufferFullEvent,

View File

@ -2090,15 +2090,15 @@ void AccessorAssembler::KeyedLoadICGeneric(const LoadICParameters* p) {
VARIABLE(var_index, MachineType::PointerRepresentation());
VARIABLE(var_unique, MachineRepresentation::kTagged);
var_unique.Bind(p->name); // Dummy initialization.
Label if_index(this), if_unique_name(this), slow(this);
Label if_index(this), if_unique_name(this), if_notunique(this), slow(this);
Node* receiver = p->receiver;
GotoIf(TaggedIsSmi(receiver), &slow);
Node* receiver_map = LoadMap(receiver);
Node* instance_type = LoadMapInstanceType(receiver_map);
TryToName(p->name, &if_index, &var_index, &if_unique_name, &var_unique,
&slow);
TryToName(p->name, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
&if_notunique);
BIND(&if_index);
{
@ -2112,6 +2112,22 @@ void AccessorAssembler::KeyedLoadICGeneric(const LoadICParameters* p) {
var_unique.value(), p, &slow);
}
BIND(&if_notunique);
{
if (FLAG_internalize_on_the_fly) {
Label not_in_string_table(this);
TryInternalizeString(p->name, &if_index, &var_index, &if_unique_name,
&var_unique, &not_in_string_table, &slow);
BIND(&not_in_string_table);
// If the string was not found in the string table, then no object can
// have a property with that name.
Return(UndefinedConstant());
} else {
Goto(&slow);
}
}
BIND(&slow);
{
Comment("KeyedLoadGeneric_slow");

View File

@ -833,6 +833,11 @@ bool String::HasOnlyOneByteChars() {
IsOneByteRepresentation();
}
bool StringShape::HasOnlyOneByteChars() {
return (type_ & kStringEncodingMask) == kOneByteStringTag ||
(type_ & kOneByteDataHintMask) == kOneByteDataHintTag;
}
bool StringShape::IsCons() {
return (type_ & kStringRepresentationMask) == kConsStringTag;
}

View File

@ -17510,10 +17510,9 @@ void StringTable::EnsureCapacityForDeserialization(Isolate* isolate,
namespace {
template <class StringClass>
void MigrateExternalStringResource(Isolate* isolate, Handle<String> from,
Handle<String> to) {
Handle<StringClass> cast_from = Handle<StringClass>::cast(from);
Handle<StringClass> cast_to = Handle<StringClass>::cast(to);
void MigrateExternalStringResource(Isolate* isolate, String* from, String* to) {
StringClass* cast_from = StringClass::cast(from);
StringClass* cast_to = StringClass::cast(to);
const typename StringClass::Resource* to_resource = cast_to->resource();
if (to_resource == nullptr) {
// |to| is a just-created internalized copy of |from|. Migrate the resource.
@ -17523,7 +17522,43 @@ void MigrateExternalStringResource(Isolate* isolate, Handle<String> from,
cast_from->set_resource(nullptr);
} else if (to_resource != cast_from->resource()) {
// |to| already existed and has its own resource. Finalize |from|.
isolate->heap()->FinalizeExternalString(*from);
isolate->heap()->FinalizeExternalString(from);
}
}
void MakeStringThin(String* string, String* internalized, Isolate* isolate) {
if (string->IsExternalString()) {
if (internalized->IsExternalOneByteString()) {
MigrateExternalStringResource<ExternalOneByteString>(isolate, string,
internalized);
} else if (internalized->IsExternalTwoByteString()) {
MigrateExternalStringResource<ExternalTwoByteString>(isolate, string,
internalized);
} else {
// If the external string is duped into an existing non-external
// internalized string, free its resource (it's about to be rewritten
// into a ThinString below).
isolate->heap()->FinalizeExternalString(string);
}
}
if (!string->IsInternalizedString()) {
DisallowHeapAllocation no_gc;
bool one_byte = internalized->IsOneByteRepresentation();
Handle<Map> map = one_byte ? isolate->factory()->thin_one_byte_string_map()
: isolate->factory()->thin_string_map();
int old_size = string->Size();
DCHECK(old_size >= ThinString::kSize);
string->synchronized_set_map(*map);
ThinString* thin = ThinString::cast(string);
thin->set_actual(internalized);
Address thin_end = thin->address() + ThinString::kSize;
int size_delta = old_size - ThinString::kSize;
if (size_delta != 0) {
Heap* heap = isolate->heap();
heap->CreateFillerObjectAt(thin_end, size_delta, ClearRecordedSlots::kNo);
heap->AdjustLiveBytes(thin, -size_delta);
}
}
}
@ -17544,44 +17579,7 @@ Handle<String> StringTable::LookupString(Isolate* isolate,
Handle<String> result = LookupKey(isolate, &key);
if (FLAG_thin_strings) {
if (string->IsExternalString()) {
if (result->IsExternalOneByteString()) {
MigrateExternalStringResource<ExternalOneByteString>(isolate, string,
result);
} else if (result->IsExternalTwoByteString()) {
MigrateExternalStringResource<ExternalTwoByteString>(isolate, string,
result);
} else {
// If the external string is duped into an existing non-external
// internalized string, free its resource (it's about to be rewritten
// into a ThinString below).
isolate->heap()->FinalizeExternalString(*string);
}
}
// The LookupKey() call above tries to internalize the string in-place.
// In cases where that wasn't possible (e.g. new-space strings), turn them
// into ThinStrings referring to their internalized versions now.
if (!string->IsInternalizedString()) {
DisallowHeapAllocation no_gc;
bool one_byte = result->IsOneByteRepresentation();
Handle<Map> map = one_byte
? isolate->factory()->thin_one_byte_string_map()
: isolate->factory()->thin_string_map();
int old_size = string->Size();
DCHECK(old_size >= ThinString::kSize);
string->synchronized_set_map(*map);
Handle<ThinString> thin = Handle<ThinString>::cast(string);
thin->set_actual(*result);
Address thin_end = thin->address() + ThinString::kSize;
int size_delta = old_size - ThinString::kSize;
if (size_delta != 0) {
Heap* heap = isolate->heap();
heap->CreateFillerObjectAt(thin_end, size_delta,
ClearRecordedSlots::kNo);
heap->AdjustLiveBytes(*thin, -size_delta);
}
}
MakeStringThin(*string, *result, isolate);
} else { // !FLAG_thin_strings
if (string->IsConsString()) {
Handle<ConsString> cons = Handle<ConsString>::cast(string);
@ -17631,10 +17629,172 @@ Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) {
return Handle<String>::cast(string);
}
namespace {
class StringTableNoAllocateKey : public HashTableKey {
public:
StringTableNoAllocateKey(String* string, uint32_t seed)
: string_(string), length_(string->length()) {
StringShape shape(string);
one_byte_ = shape.HasOnlyOneByteChars();
DCHECK(!shape.IsInternalized());
DCHECK(!shape.IsThin());
if (shape.IsCons() && length_ <= String::kMaxHashCalcLength) {
special_flattening_ = true;
uint32_t hash_field = 0;
if (one_byte_) {
one_byte_content_ = new uint8_t[length_];
String::WriteToFlat(string, one_byte_content_, 0, length_);
hash_field = StringHasher::HashSequentialString(one_byte_content_,
length_, seed);
} else {
two_byte_content_ = new uint16_t[length_];
String::WriteToFlat(string, two_byte_content_, 0, length_);
hash_field = StringHasher::HashSequentialString(two_byte_content_,
length_, seed);
}
string->set_hash_field(hash_field);
} else {
special_flattening_ = false;
}
hash_ = string->Hash();
}
~StringTableNoAllocateKey() {
if (one_byte_) {
delete[] one_byte_content_;
} else {
delete[] two_byte_content_;
}
}
bool IsMatch(Object* otherstring) override {
String* other = String::cast(otherstring);
DCHECK(other->IsInternalizedString());
DCHECK(other->IsFlat());
if (hash_ != other->Hash()) return false;
int len = length_;
if (len != other->length()) return false;
if (!special_flattening_) {
if (string_->Get(0) != other->Get(0)) return false;
if (string_->IsFlat()) {
StringShape shape1(string_);
StringShape shape2(other);
if (shape1.encoding_tag() == kOneByteStringTag &&
shape2.encoding_tag() == kOneByteStringTag) {
String::FlatContent flat1 = string_->GetFlatContent();
String::FlatContent flat2 = other->GetFlatContent();
return CompareRawStringContents(flat1.ToOneByteVector().start(),
flat2.ToOneByteVector().start(), len);
}
if (shape1.encoding_tag() == kTwoByteStringTag &&
shape2.encoding_tag() == kTwoByteStringTag) {
String::FlatContent flat1 = string_->GetFlatContent();
String::FlatContent flat2 = other->GetFlatContent();
return CompareRawStringContents(flat1.ToUC16Vector().start(),
flat2.ToUC16Vector().start(), len);
}
}
StringComparator comparator;
return comparator.Equals(string_, other);
}
String::FlatContent flat_content = other->GetFlatContent();
if (one_byte_) {
if (flat_content.IsOneByte()) {
return CompareRawStringContents(
one_byte_content_, flat_content.ToOneByteVector().start(), len);
} else {
DCHECK(flat_content.IsTwoByte());
for (int i = 0; i < len; i++) {
if (flat_content.Get(i) != one_byte_content_[i]) return false;
}
return true;
}
} else {
if (flat_content.IsTwoByte()) {
return CompareRawStringContents(
two_byte_content_, flat_content.ToUC16Vector().start(), len);
} else {
DCHECK(flat_content.IsOneByte());
for (int i = 0; i < len; i++) {
if (flat_content.Get(i) != two_byte_content_[i]) return false;
}
return true;
}
}
}
uint32_t Hash() override { return hash_; }
uint32_t HashForObject(Object* key) override {
return String::cast(key)->Hash();
}
MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) override {
UNREACHABLE();
return Handle<String>();
}
private:
String* string_;
int length_;
bool one_byte_;
bool special_flattening_;
uint32_t hash_ = 0;
union {
uint8_t* one_byte_content_ = nullptr;
uint16_t* two_byte_content_;
};
};
} // namespace
// static
Object* StringTable::LookupStringIfExists_NoAllocate(String* string) {
DisallowHeapAllocation no_gc;
Heap* heap = string->GetHeap();
Isolate* isolate = heap->isolate();
StringTable* table = heap->string_table();
StringTableNoAllocateKey key(string, heap->HashSeed());
// String could be an array index.
DCHECK(string->HasHashCode());
uint32_t hash = string->hash_field();
// Valid array indices are >= 0, so they cannot be mixed up with any of
// the result sentinels, which are negative.
STATIC_ASSERT(
!String::ArrayIndexValueBits::is_valid(ResultSentinel::kUnsupported));
STATIC_ASSERT(
!String::ArrayIndexValueBits::is_valid(ResultSentinel::kNotFound));
if ((hash & Name::kContainsCachedArrayIndexMask) == 0) {
return Smi::FromInt(String::ArrayIndexValueBits::decode(hash));
}
if ((hash & Name::kIsNotArrayIndexMask) == 0) {
// It is an indexed, but it's not cached.
return Smi::FromInt(ResultSentinel::kUnsupported);
}
int entry = table->FindEntry(isolate, &key, key.Hash());
if (entry != kNotFound) {
String* internalized = String::cast(table->KeyAt(entry));
if (FLAG_thin_strings) {
MakeStringThin(string, internalized, isolate);
}
return internalized;
}
// A string that's not an array index, and not in the string table,
// cannot have been used as a property name before.
return Smi::FromInt(ResultSentinel::kNotFound);
}
String* StringTable::LookupKeyIfExists(Isolate* isolate, HashTableKey* key) {
Handle<StringTable> table = isolate->factory()->string_table();
int entry = table->FindEntry(key);
int entry = table->FindEntry(isolate, key);
if (entry != kNotFound) return String::cast(table->KeyAt(entry));
return NULL;
}

View File

@ -7838,7 +7838,7 @@ class StringShape BASE_EMBEDDED {
inline StringRepresentationTag representation_tag();
inline uint32_t encoding_tag();
inline uint32_t full_representation_tag();
inline uint32_t size_tag();
inline bool HasOnlyOneByteChars();
#ifdef DEBUG
inline uint32_t type() { return type_; }
inline void invalidate() { valid_ = false; }

View File

@ -58,6 +58,7 @@ class StringTable
Isolate* isolate, Handle<String> str);
MUST_USE_RESULT static MaybeHandle<String> LookupTwoCharsStringIfExists(
Isolate* isolate, uint16_t c1, uint16_t c2);
static Object* LookupStringIfExists_NoAllocate(String* string);
static void EnsureCapacityForDeserialization(Isolate* isolate, int expected);

View File

@ -313,7 +313,7 @@ class BitFieldBase {
static const T kMax = static_cast<T>((kOne << size) - 1);
// Tells whether the provided value fits into the bit field.
static bool is_valid(T value) {
static constexpr bool is_valid(T value) {
return (static_cast<U>(value) & ~static_cast<U>(kMax)) == 0;
}