[objects.h splitting] Move leftover functions out of objects-inl.h

These should've been moved earlier but were left behind.

BUG=v8:5402,v8:7109

Change-Id: I9736da35f1ef89628c987c33eed40a07aa266bea
Reviewed-on: https://chromium-review.googlesource.com/803375
Commit-Queue: Marja Hölttä <marja@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49789}
This commit is contained in:
Marja Hölttä 2017-12-01 10:26:39 +01:00 committed by Commit Bot
parent bd839c551b
commit 2b1f79881c
3 changed files with 882 additions and 915 deletions

View File

@ -105,7 +105,6 @@ TYPE_CHECKER(JSStringIterator, JS_STRING_ITERATOR_TYPE)
TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
TYPE_CHECKER(Map, MAP_TYPE)
TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
TYPE_CHECKER(Oddball, ODDBALL_TYPE)
TYPE_CHECKER(PreParsedScopeData, TUPLE2_TYPE)
@ -1076,11 +1075,6 @@ int HeapNumber::get_sign() {
return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
}
inline Object* OrderedHashMap::ValueAt(int entry) {
DCHECK_LT(entry, this->UsedCapacity());
return get(EntryToIndex(entry) + kValueOffset);
}
ACCESSORS(JSReceiver, raw_properties_or_hash, Object, kPropertiesOrHashOffset)
FixedArrayBase* JSObject::elements() const {
@ -1440,6 +1434,15 @@ inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
}
// This should be in objects/map-inl.h, but can't, because of a cyclic
// dependency.
bool Map::IsSpecialReceiverMap() const {
bool result = IsSpecialReceiverInstanceType(instance_type());
DCHECK_IMPLIES(!result,
!has_named_interceptor() && !is_access_check_needed());
return result;
}
// static
int JSObject::GetEmbedderFieldCount(const Map* map) {
int instance_size = map->instance_size();
@ -1490,13 +1493,6 @@ bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
return map()->IsUnboxedDoubleField(index);
}
bool Map::IsUnboxedDoubleField(FieldIndex index) const {
if (!FLAG_unbox_double_fields) return false;
if (index.is_hidden_field() || !index.is_inobject()) return false;
return !layout_descriptor()->IsTagged(index.property_index());
}
// Access fast-case object properties at index. The use of these routines
// is needed to correctly distinguish between properties stored in-object and
// properties stored in the properties array.
@ -1630,16 +1626,6 @@ void JSObject::InitializeBody(Map* map, int start_offset,
}
}
bool Map::TooManyFastProperties(StoreFromKeyed store_mode) const {
if (UnusedPropertyFields() != 0) return false;
if (is_prototype_map()) return false;
int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
int limit = Max(minimum, GetInObjectProperties());
int external = NumberOfFields() - GetInObjectProperties();
return external > limit;
}
void Struct::InitializeBody(int object_size) {
Object* value = GetHeap()->undefined_value();
for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
@ -1937,52 +1923,6 @@ int DescriptorArray::SearchWithCache(Isolate* isolate, Name* name, Map* map) {
return number;
}
PropertyDetails Map::GetLastDescriptorDetails() const {
return instance_descriptors()->GetDetails(LastAdded());
}
int Map::LastAdded() const {
int number_of_own_descriptors = NumberOfOwnDescriptors();
DCHECK_GT(number_of_own_descriptors, 0);
return number_of_own_descriptors - 1;
}
int Map::NumberOfOwnDescriptors() const {
return NumberOfOwnDescriptorsBits::decode(bit_field3());
}
void Map::SetNumberOfOwnDescriptors(int number) {
DCHECK(number <= instance_descriptors()->number_of_descriptors());
set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
}
int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }
void Map::SetEnumLength(int length) {
if (length != kInvalidEnumCacheSentinel) {
DCHECK_GE(length, 0);
DCHECK(length <= NumberOfOwnDescriptors());
}
set_bit_field3(EnumLengthBits::update(bit_field3(), length));
}
FixedArrayBase* Map::GetInitialElements() const {
FixedArrayBase* result = nullptr;
if (has_fast_elements() || has_fast_string_wrapper_elements()) {
result = GetHeap()->empty_fixed_array();
} else if (has_fast_sloppy_arguments_elements()) {
result = GetHeap()->empty_sloppy_arguments_elements();
} else if (has_fixed_typed_array_elements()) {
result = GetHeap()->EmptyFixedTypedArrayForMap(this);
} else if (has_dictionary_elements()) {
result = GetHeap()->empty_slow_element_dictionary();
} else {
UNREACHABLE();
}
DCHECK(!GetHeap()->InNewSpace(result));
return result;
}
Object** DescriptorArray::GetKeySlot(int descriptor_number) {
DCHECK(descriptor_number < number_of_descriptors());
@ -2109,101 +2049,6 @@ void DescriptorArray::SwapSortedKeys(int first, int second) {
SetSortedKey(second, first_key);
}
int HashTableBase::NumberOfElements() const {
return Smi::ToInt(get(kNumberOfElementsIndex));
}
int HashTableBase::NumberOfDeletedElements() const {
return Smi::ToInt(get(kNumberOfDeletedElementsIndex));
}
int HashTableBase::Capacity() const { return Smi::ToInt(get(kCapacityIndex)); }
void HashTableBase::ElementAdded() {
SetNumberOfElements(NumberOfElements() + 1);
}
void HashTableBase::ElementRemoved() {
SetNumberOfElements(NumberOfElements() - 1);
SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
}
void HashTableBase::ElementsRemoved(int n) {
SetNumberOfElements(NumberOfElements() - n);
SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
}
// static
int HashTableBase::ComputeCapacity(int at_least_space_for) {
// Add 50% slack to make slot collisions sufficiently unlikely.
// See matching computation in HashTable::HasSufficientCapacityToAdd().
// Must be kept in sync with CodeStubAssembler::HashTableComputeCapacity().
int raw_cap = at_least_space_for + (at_least_space_for >> 1);
int capacity = base::bits::RoundUpToPowerOfTwo32(raw_cap);
return Max(capacity, kMinCapacity);
}
void HashTableBase::SetNumberOfElements(int nof) {
set(kNumberOfElementsIndex, Smi::FromInt(nof));
}
void HashTableBase::SetNumberOfDeletedElements(int nod) {
set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
}
template <typename Key>
int BaseShape<Key>::GetMapRootIndex() {
return Heap::kHashTableMapRootIndex;
}
template <typename Derived, typename Shape>
int HashTable<Derived, Shape>::FindEntry(Key key) {
return FindEntry(GetIsolate(), key);
}
template <typename Derived, typename Shape>
int HashTable<Derived, Shape>::FindEntry(Isolate* isolate, Key key) {
return FindEntry(isolate, key, Shape::Hash(isolate, key));
}
// Find entry for key otherwise return kNotFound.
template <typename Derived, typename Shape>
int HashTable<Derived, Shape>::FindEntry(Isolate* isolate, Key key,
int32_t hash) {
uint32_t capacity = Capacity();
uint32_t entry = FirstProbe(hash, capacity);
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
Object* undefined = isolate->heap()->undefined_value();
Object* the_hole = isolate->heap()->the_hole_value();
USE(the_hole);
while (true) {
Object* element = KeyAt(entry);
// Empty entry. Uses raw unchecked accessors because it is called by the
// string table during bootstrapping.
if (element == undefined) break;
if (!(Shape::kNeedsHoleCheck && the_hole == element)) {
if (Shape::IsMatch(key, element)) return entry;
}
entry = NextProbe(entry, count++, capacity);
}
return kNotFound;
}
bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key, int32_t hash) {
return FindEntry(isolate, key, hash) != kNotFound;
}
bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key) {
Object* hash = key->GetHash();
if (!hash->IsSmi()) return false;
return FindEntry(isolate, key, Smi::ToInt(hash)) != kNotFound;
}
bool StringSetShape::IsMatch(String* key, Object* value) {
DCHECK(value->IsString());
return key->Equals(String::cast(value));
@ -2269,19 +2114,6 @@ DEFINE_DEOPT_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
template <typename Derived, typename Shape>
HashTable<Derived, Shape>* HashTable<Derived, Shape>::cast(Object* obj) {
SLOW_DCHECK(obj->IsHashTable());
return reinterpret_cast<HashTable*>(obj);
}
template <typename Derived, typename Shape>
const HashTable<Derived, Shape>* HashTable<Derived, Shape>::cast(
const Object* obj) {
SLOW_DCHECK(obj->IsHashTable());
return reinterpret_cast<const HashTable*>(obj);
}
int PropertyArray::length() const {
Object* value_obj = READ_FIELD(this, kLengthAndHashOffset);
int value = Smi::ToInt(value_obj);
@ -2344,88 +2176,6 @@ FreeSpace* FreeSpace::cast(HeapObject* o) {
return reinterpret_cast<FreeSpace*>(o);
}
VisitorId Map::visitor_id() const {
return static_cast<VisitorId>(READ_BYTE_FIELD(this, kVisitorIdOffset));
}
void Map::set_visitor_id(VisitorId id) {
DCHECK_LE(0, id);
DCHECK_LT(id, 256);
WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
}
int Map::instance_size_in_words() const {
return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeInWordsOffset);
}
void Map::set_instance_size_in_words(int value) {
RELAXED_WRITE_BYTE_FIELD(this, kInstanceSizeInWordsOffset,
static_cast<byte>(value));
}
int Map::instance_size() const {
return instance_size_in_words() << kPointerSizeLog2;
}
void Map::set_instance_size(int value) {
DCHECK_EQ(0, value & (kPointerSize - 1));
value >>= kPointerSizeLog2;
DCHECK(0 <= value && value < 256);
set_instance_size_in_words(value);
}
int Map::inobject_properties_start_or_constructor_function_index() const {
return RELAXED_READ_BYTE_FIELD(
this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
}
void Map::set_inobject_properties_start_or_constructor_function_index(
int value) {
DCHECK_LE(0, value);
DCHECK_LT(value, 256);
RELAXED_WRITE_BYTE_FIELD(
this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
static_cast<byte>(value));
}
int Map::GetInObjectPropertiesStartInWords() const {
DCHECK(IsJSObjectMap());
return inobject_properties_start_or_constructor_function_index();
}
void Map::SetInObjectPropertiesStartInWords(int value) {
DCHECK(IsJSObjectMap());
set_inobject_properties_start_or_constructor_function_index(value);
}
int Map::GetInObjectProperties() const {
DCHECK(IsJSObjectMap());
return instance_size_in_words() - GetInObjectPropertiesStartInWords();
}
int Map::GetConstructorFunctionIndex() const {
DCHECK(IsPrimitiveMap());
return inobject_properties_start_or_constructor_function_index();
}
void Map::SetConstructorFunctionIndex(int value) {
DCHECK(IsPrimitiveMap());
set_inobject_properties_start_or_constructor_function_index(value);
}
int Map::GetInObjectPropertyOffset(int index) const {
return (GetInObjectPropertiesStartInWords() + index) * kPointerSize;
}
Handle<Map> Map::AddMissingTransitionsForTesting(
Handle<Map> split_map, Handle<DescriptorArray> descriptors,
Handle<LayoutDescriptor> full_layout_descriptor) {
return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
}
int HeapObject::SizeFromMap(Map* map) const {
int instance_size = map->instance_size();
if (instance_size != kVariableSizeSentinel) return instance_size;
@ -2491,635 +2241,6 @@ int HeapObject::SizeFromMap(Map* map) const {
return reinterpret_cast<const Code*>(this)->CodeSize();
}
InstanceType Map::instance_type() const {
return static_cast<InstanceType>(
READ_UINT16_FIELD(this, kInstanceTypeOffset));
}
void Map::set_instance_type(InstanceType value) {
WRITE_UINT16_FIELD(this, kInstanceTypeOffset, value);
}
int Map::UnusedPropertyFields() const {
int value = used_or_unused_instance_size_in_words();
DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
int unused;
if (value >= JSObject::kFieldsAdded) {
unused = instance_size_in_words() - value;
} else {
// For out of object properties "used_or_unused_instance_size_in_words"
// byte encodes the slack in the property array.
unused = value;
}
return unused;
}
int Map::used_or_unused_instance_size_in_words() const {
return RELAXED_READ_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset);
}
void Map::set_used_or_unused_instance_size_in_words(int value) {
DCHECK_LE(0, value);
DCHECK_LE(value, 255);
RELAXED_WRITE_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset,
static_cast<byte>(value));
}
int Map::UsedInstanceSize() const {
int words = used_or_unused_instance_size_in_words();
if (words < JSObject::kFieldsAdded) {
// All in-object properties are used and the words is tracking the slack
// in the property array.
return instance_size();
}
return words * kPointerSize;
}
void Map::SetInObjectUnusedPropertyFields(int value) {
STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
if (!IsJSObjectMap()) {
DCHECK_EQ(0, value);
set_used_or_unused_instance_size_in_words(0);
DCHECK_EQ(0, UnusedPropertyFields());
return;
}
DCHECK_LE(0, value);
DCHECK_LE(value, GetInObjectProperties());
int used_inobject_properties = GetInObjectProperties() - value;
set_used_or_unused_instance_size_in_words(
GetInObjectPropertyOffset(used_inobject_properties) / kPointerSize);
DCHECK_EQ(value, UnusedPropertyFields());
}
void Map::SetOutOfObjectUnusedPropertyFields(int value) {
STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
DCHECK_LE(0, value);
DCHECK_LT(value, JSObject::kFieldsAdded);
// For out of object properties "used_instance_size_in_words" byte encodes
// the slack in the property array.
set_used_or_unused_instance_size_in_words(value);
DCHECK_EQ(value, UnusedPropertyFields());
}
void Map::CopyUnusedPropertyFields(Map* map) {
set_used_or_unused_instance_size_in_words(
map->used_or_unused_instance_size_in_words());
DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
}
void Map::AccountAddedPropertyField() {
// Update used instance size and unused property fields number.
STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
#ifdef DEBUG
int new_unused = UnusedPropertyFields() - 1;
if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
#endif
int value = used_or_unused_instance_size_in_words();
if (value >= JSObject::kFieldsAdded) {
if (value == instance_size_in_words()) {
AccountAddedOutOfObjectPropertyField(0);
} else {
// The property is added in-object, so simply increment the counter.
set_used_or_unused_instance_size_in_words(value + 1);
}
} else {
AccountAddedOutOfObjectPropertyField(value);
}
DCHECK_EQ(new_unused, UnusedPropertyFields());
}
void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
unused_in_property_array--;
if (unused_in_property_array < 0) {
unused_in_property_array += JSObject::kFieldsAdded;
}
DCHECK_GE(unused_in_property_array, 0);
DCHECK_LT(unused_in_property_array, JSObject::kFieldsAdded);
set_used_or_unused_instance_size_in_words(unused_in_property_array);
DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
}
byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
void Map::set_bit_field(byte value) {
WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
}
byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
void Map::set_bit_field2(byte value) {
WRITE_BYTE_FIELD(this, kBitField2Offset, value);
}
void Map::set_non_instance_prototype(bool value) {
if (value) {
set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
} else {
set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
}
}
bool Map::has_non_instance_prototype() const {
if (!has_prototype_slot()) return false;
return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
}
void Map::set_is_constructor(bool value) {
if (value) {
set_bit_field(bit_field() | (1 << kIsConstructor));
} else {
set_bit_field(bit_field() & ~(1 << kIsConstructor));
}
}
bool Map::is_constructor() const {
return ((1 << kIsConstructor) & bit_field()) != 0;
}
BOOL_ACCESSORS(Map, bit_field, has_prototype_slot, kHasPrototypeSlot)
void Map::set_has_hidden_prototype(bool value) {
set_bit_field3(HasHiddenPrototype::update(bit_field3(), value));
}
bool Map::has_hidden_prototype() const {
return HasHiddenPrototype::decode(bit_field3());
}
void Map::set_has_indexed_interceptor() {
set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
}
bool Map::has_indexed_interceptor() const {
return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
}
void Map::set_is_undetectable() {
set_bit_field(bit_field() | (1 << kIsUndetectable));
}
bool Map::is_undetectable() const {
return ((1 << kIsUndetectable) & bit_field()) != 0;
}
void Map::set_has_named_interceptor() {
set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
}
bool Map::has_named_interceptor() const {
return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
}
void Map::set_is_access_check_needed(bool access_check_needed) {
if (access_check_needed) {
set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
} else {
set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
}
}
bool Map::is_access_check_needed() const {
return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
}
void Map::set_is_extensible(bool value) {
if (value) {
set_bit_field2(bit_field2() | (1 << kIsExtensible));
} else {
set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
}
}
bool Map::is_extensible() const {
return ((1 << kIsExtensible) & bit_field2()) != 0;
}
void Map::set_is_prototype_map(bool value) {
set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
}
bool Map::is_prototype_map() const {
return IsPrototypeMapBits::decode(bit_field2());
}
bool Map::is_abandoned_prototype_map() const {
return is_prototype_map() && !owns_descriptors();
}
bool Map::should_be_fast_prototype_map() const {
if (!prototype_info()->IsPrototypeInfo()) return false;
return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
}
void Map::set_elements_kind(ElementsKind elements_kind) {
DCHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
DCHECK_LE(kElementsKindCount, 1 << Map::ElementsKindBits::kSize);
set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
DCHECK(this->elements_kind() == elements_kind);
}
ElementsKind Map::elements_kind() const {
return Map::ElementsKindBits::decode(bit_field2());
}
bool Map::has_fast_smi_elements() const {
return IsSmiElementsKind(elements_kind());
}
bool Map::has_fast_object_elements() const {
return IsObjectElementsKind(elements_kind());
}
bool Map::has_fast_smi_or_object_elements() const {
return IsSmiOrObjectElementsKind(elements_kind());
}
bool Map::has_fast_double_elements() const {
return IsDoubleElementsKind(elements_kind());
}
bool Map::has_fast_elements() const {
return IsFastElementsKind(elements_kind());
}
bool Map::has_sloppy_arguments_elements() const {
return IsSloppyArgumentsElementsKind(elements_kind());
}
bool Map::has_fast_sloppy_arguments_elements() const {
return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
}
bool Map::has_fast_string_wrapper_elements() const {
return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
}
bool Map::has_fixed_typed_array_elements() const {
return IsFixedTypedArrayElementsKind(elements_kind());
}
bool Map::has_dictionary_elements() const {
return IsDictionaryElementsKind(elements_kind());
}
void Map::set_dictionary_map(bool value) {
uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
new_bit_field3 = IsUnstable::update(new_bit_field3, value);
set_bit_field3(new_bit_field3);
}
bool Map::is_dictionary_map() const {
return DictionaryMap::decode(bit_field3());
}
void Map::set_owns_descriptors(bool owns_descriptors) {
set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
}
bool Map::owns_descriptors() const {
return OwnsDescriptors::decode(bit_field3());
}
void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
bool Map::is_callable() const {
return ((1 << kIsCallable) & bit_field()) != 0;
}
void Map::deprecate() {
set_bit_field3(Deprecated::update(bit_field3(), true));
if (FLAG_trace_maps) {
LOG(GetIsolate(), MapEvent("Deprecate", this, nullptr));
}
}
bool Map::is_deprecated() const { return Deprecated::decode(bit_field3()); }
void Map::set_migration_target(bool value) {
set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
}
bool Map::is_migration_target() const {
return IsMigrationTarget::decode(bit_field3());
}
void Map::set_immutable_proto(bool value) {
set_bit_field3(ImmutablePrototype::update(bit_field3(), value));
}
bool Map::is_immutable_proto() const {
return ImmutablePrototype::decode(bit_field3());
}
void Map::set_new_target_is_base(bool value) {
set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
}
bool Map::new_target_is_base() const {
return NewTargetIsBase::decode(bit_field3());
}
void Map::set_may_have_interesting_symbols(bool value) {
set_bit_field3(MayHaveInterestingSymbols::update(bit_field3(), value));
}
bool Map::may_have_interesting_symbols() const {
return MayHaveInterestingSymbols::decode(bit_field3());
}
void Map::set_construction_counter(int value) {
set_bit_field3(ConstructionCounter::update(bit_field3(), value));
}
int Map::construction_counter() const {
return ConstructionCounter::decode(bit_field3());
}
void Map::mark_unstable() {
set_bit_field3(IsUnstable::update(bit_field3(), true));
}
bool Map::is_stable() const { return !IsUnstable::decode(bit_field3()); }
bool Map::CanBeDeprecated() const {
int descriptor = LastAdded();
for (int i = 0; i <= descriptor; i++) {
PropertyDetails details = instance_descriptors()->GetDetails(i);
if (details.representation().IsNone()) return true;
if (details.representation().IsSmi()) return true;
if (details.representation().IsDouble()) return true;
if (details.representation().IsHeapObject()) return true;
if (details.kind() == kData && details.location() == kDescriptor) {
return true;
}
}
return false;
}
void Map::NotifyLeafMapLayoutChange() {
if (is_stable()) {
mark_unstable();
dependent_code()->DeoptimizeDependentCodeGroup(
GetIsolate(),
DependentCode::kPrototypeCheckGroup);
}
}
bool Map::CanTransition() const {
// Only JSObject and subtypes have map transitions and back pointers.
STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
return instance_type() >= FIRST_JS_OBJECT_TYPE;
}
bool Map::IsBooleanMap() const { return this == GetHeap()->boolean_map(); }
bool Map::IsPrimitiveMap() const {
STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
return instance_type() <= LAST_PRIMITIVE_TYPE;
}
bool Map::IsJSReceiverMap() const {
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
return instance_type() >= FIRST_JS_RECEIVER_TYPE;
}
bool Map::IsJSObjectMap() const {
STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
return instance_type() >= FIRST_JS_OBJECT_TYPE;
}
bool Map::IsJSArrayMap() const { return instance_type() == JS_ARRAY_TYPE; }
bool Map::IsJSFunctionMap() const {
return instance_type() == JS_FUNCTION_TYPE;
}
bool Map::IsStringMap() const { return instance_type() < FIRST_NONSTRING_TYPE; }
bool Map::IsJSProxyMap() const { return instance_type() == JS_PROXY_TYPE; }
bool Map::IsJSGlobalProxyMap() const {
return instance_type() == JS_GLOBAL_PROXY_TYPE;
}
bool Map::IsJSGlobalObjectMap() const {
return instance_type() == JS_GLOBAL_OBJECT_TYPE;
}
bool Map::IsJSTypedArrayMap() const {
return instance_type() == JS_TYPED_ARRAY_TYPE;
}
bool Map::IsJSDataViewMap() const {
return instance_type() == JS_DATA_VIEW_TYPE;
}
bool Map::IsSpecialReceiverMap() const {
bool result = IsSpecialReceiverInstanceType(instance_type());
DCHECK_IMPLIES(!result,
!has_named_interceptor() && !is_access_check_needed());
return result;
}
Object* Map::prototype() const {
return READ_FIELD(this, kPrototypeOffset);
}
void Map::set_prototype(Object* value, WriteBarrierMode mode) {
DCHECK(value->IsNull(GetIsolate()) || value->IsJSReceiver());
WRITE_FIELD(this, kPrototypeOffset, value);
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
}
LayoutDescriptor* Map::layout_descriptor_gc_safe() const {
DCHECK(FLAG_unbox_double_fields);
Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset);
return LayoutDescriptor::cast_gc_safe(layout_desc);
}
bool Map::HasFastPointerLayout() const {
DCHECK(FLAG_unbox_double_fields);
Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset);
return LayoutDescriptor::IsFastPointerLayout(layout_desc);
}
void Map::UpdateDescriptors(DescriptorArray* descriptors,
LayoutDescriptor* layout_desc) {
set_instance_descriptors(descriptors);
if (FLAG_unbox_double_fields) {
if (layout_descriptor()->IsSlowLayout()) {
set_layout_descriptor(layout_desc);
}
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
CHECK(layout_descriptor()->IsConsistentWithMap(this));
CHECK_EQ(Map::GetVisitorId(this), visitor_id());
}
#else
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
DCHECK(visitor_id() == Map::GetVisitorId(this));
#endif
}
}
void Map::InitializeDescriptors(DescriptorArray* descriptors,
LayoutDescriptor* layout_desc) {
int len = descriptors->number_of_descriptors();
set_instance_descriptors(descriptors);
SetNumberOfOwnDescriptors(len);
if (FLAG_unbox_double_fields) {
set_layout_descriptor(layout_desc);
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
CHECK(layout_descriptor()->IsConsistentWithMap(this));
}
#else
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
#endif
set_visitor_id(Map::GetVisitorId(this));
}
}
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor,
kLayoutDescriptorOffset, FLAG_unbox_double_fields)
void Map::set_bit_field3(uint32_t bits) {
if (kInt32Size != kPointerSize) {
WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
}
WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
}
uint32_t Map::bit_field3() const {
return READ_UINT32_FIELD(this, kBitField3Offset);
}
LayoutDescriptor* Map::GetLayoutDescriptor() const {
return FLAG_unbox_double_fields ? layout_descriptor()
: LayoutDescriptor::FastPointerLayout();
}
void Map::AppendDescriptor(Descriptor* desc) {
DescriptorArray* descriptors = instance_descriptors();
int number_of_own_descriptors = NumberOfOwnDescriptors();
DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
descriptors->Append(desc);
SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
// Properly mark the map if the {desc} is an "interesting symbol".
if (desc->GetKey()->IsInterestingSymbol()) {
set_may_have_interesting_symbols(true);
}
PropertyDetails details = desc->GetDetails();
if (details.location() == kField) {
DCHECK_GT(UnusedPropertyFields(), 0);
AccountAddedPropertyField();
}
// This function does not support appending double field descriptors and
// it should never try to (otherwise, layout descriptor must be updated too).
#ifdef DEBUG
DCHECK(details.location() != kField || !details.representation().IsDouble());
#endif
}
Object* Map::GetBackPointer() const {
Object* object = constructor_or_backpointer();
if (object->IsMap()) {
return object;
}
return GetIsolate()->heap()->undefined_value();
}
Map* Map::ElementsTransitionMap() {
DisallowHeapAllocation no_gc;
return TransitionsAccessor(this, &no_gc)
.SearchSpecial(GetHeap()->elements_transition_symbol());
}
ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
Object* Map::prototype_info() const {
DCHECK(is_prototype_map());
return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
}
void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
DCHECK(is_prototype_map());
WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
CONDITIONAL_WRITE_BARRIER(
GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
}
void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
DCHECK(value->IsMap());
DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
DCHECK(!value->IsMap() ||
Map::cast(value)->GetConstructor() == constructor_or_backpointer());
set_constructor_or_backpointer(value, mode);
}
ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
ACCESSORS(Map, constructor_or_backpointer, Object,
kConstructorOrBackPointerOffset)
Object* Map::GetConstructor() const {
Object* maybe_constructor = constructor_or_backpointer();
// Follow any back pointers.
while (maybe_constructor->IsMap()) {
maybe_constructor =
Map::cast(maybe_constructor)->constructor_or_backpointer();
}
return maybe_constructor;
}
FunctionTemplateInfo* Map::GetFunctionTemplateInfo() const {
Object* constructor = GetConstructor();
if (constructor->IsJSFunction()) {
DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction());
return JSFunction::cast(constructor)->shared()->get_api_func_data();
}
DCHECK(constructor->IsFunctionTemplateInfo());
return FunctionTemplateInfo::cast(constructor);
}
void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
// Never overwrite a back pointer with a constructor.
DCHECK(!constructor_or_backpointer()->IsMap());
set_constructor_or_backpointer(constructor, mode);
}
Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
map->UnusedPropertyFields());
}
Object* JSBoundFunction::raw_bound_target_function() const {
return READ_FIELD(this, kBoundTargetFunctionOffset);
}
@ -3449,22 +2570,6 @@ void JSFunction::CompleteInobjectSlackTrackingIfActive() {
}
}
bool Map::IsInobjectSlackTrackingInProgress() const {
return construction_counter() != Map::kNoSlackTracking;
}
void Map::InobjectSlackTrackingStep() {
// Slack tracking should only be performed on an initial map.
DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
if (!IsInobjectSlackTrackingInProgress()) return;
int counter = construction_counter();
set_construction_counter(counter - 1);
if (counter == kSlackTrackingCounterEnd) {
CompleteInobjectSlackTracking();
}
}
AbstractCode* JSFunction::abstract_code() {
if (IsInterpreted()) {
return AbstractCode::cast(shared()->bytecode_array());
@ -4321,16 +3426,6 @@ int WeakHashTableShape::GetMapRootIndex() {
return Heap::kWeakHashTableMapRootIndex;
}
int Map::SlackForArraySize(int old_size, int size_limit) {
const int max_slack = size_limit - old_size;
CHECK_LE(0, max_slack);
if (old_size < 4) {
DCHECK_LE(1, max_slack);
return 1;
}
return Min(max_slack, old_size / 4);
}
int TypeFeedbackInfo::ic_total_count() {
int current = Smi::ToInt(READ_FIELD(this, kStorage1Offset));
return ICTotalCountField::decode(current);

View File

@ -11,12 +11,116 @@
namespace v8 {
namespace internal {
int HashTableBase::NumberOfElements() const {
return Smi::ToInt(get(kNumberOfElementsIndex));
}
int HashTableBase::NumberOfDeletedElements() const {
return Smi::ToInt(get(kNumberOfDeletedElementsIndex));
}
int HashTableBase::Capacity() const { return Smi::ToInt(get(kCapacityIndex)); }
void HashTableBase::ElementAdded() {
SetNumberOfElements(NumberOfElements() + 1);
}
void HashTableBase::ElementRemoved() {
SetNumberOfElements(NumberOfElements() - 1);
SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
}
void HashTableBase::ElementsRemoved(int n) {
SetNumberOfElements(NumberOfElements() - n);
SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
}
// static
int HashTableBase::ComputeCapacity(int at_least_space_for) {
// Add 50% slack to make slot collisions sufficiently unlikely.
// See matching computation in HashTable::HasSufficientCapacityToAdd().
// Must be kept in sync with CodeStubAssembler::HashTableComputeCapacity().
int raw_cap = at_least_space_for + (at_least_space_for >> 1);
int capacity = base::bits::RoundUpToPowerOfTwo32(raw_cap);
return Max(capacity, kMinCapacity);
}
void HashTableBase::SetNumberOfElements(int nof) {
set(kNumberOfElementsIndex, Smi::FromInt(nof));
}
void HashTableBase::SetNumberOfDeletedElements(int nod) {
set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
}
template <typename Key>
int BaseShape<Key>::GetMapRootIndex() {
return Heap::kHashTableMapRootIndex;
}
template <typename Derived, typename Shape>
int HashTable<Derived, Shape>::FindEntry(Key key) {
return FindEntry(GetIsolate(), key);
}
template <typename Derived, typename Shape>
int HashTable<Derived, Shape>::FindEntry(Isolate* isolate, Key key) {
return FindEntry(isolate, key, Shape::Hash(isolate, key));
}
// Find entry for key otherwise return kNotFound.
template <typename Derived, typename Shape>
int HashTable<Derived, Shape>::FindEntry(Isolate* isolate, Key key,
int32_t hash) {
uint32_t capacity = Capacity();
uint32_t entry = FirstProbe(hash, capacity);
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
Object* undefined = isolate->heap()->undefined_value();
Object* the_hole = isolate->heap()->the_hole_value();
USE(the_hole);
while (true) {
Object* element = KeyAt(entry);
// Empty entry. Uses raw unchecked accessors because it is called by the
// string table during bootstrapping.
if (element == undefined) break;
if (!(Shape::kNeedsHoleCheck && the_hole == element)) {
if (Shape::IsMatch(key, element)) return entry;
}
entry = NextProbe(entry, count++, capacity);
}
return kNotFound;
}
template <typename KeyT>
bool BaseShape<KeyT>::IsLive(Isolate* isolate, Object* k) {
Heap* heap = isolate->heap();
return k != heap->the_hole_value() && k != heap->undefined_value();
}
template <typename Derived, typename Shape>
HashTable<Derived, Shape>* HashTable<Derived, Shape>::cast(Object* obj) {
SLOW_DCHECK(obj->IsHashTable());
return reinterpret_cast<HashTable*>(obj);
}
template <typename Derived, typename Shape>
const HashTable<Derived, Shape>* HashTable<Derived, Shape>::cast(
const Object* obj) {
SLOW_DCHECK(obj->IsHashTable());
return reinterpret_cast<const HashTable*>(obj);
}
bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key, int32_t hash) {
return FindEntry(isolate, key, hash) != kNotFound;
}
bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key) {
Object* hash = key->GetHash();
if (!hash->IsSmi()) return false;
return FindEntry(isolate, key, Smi::ToInt(hash)) != kNotFound;
}
int OrderedHashSet::GetMapRootIndex() {
return Heap::kOrderedHashSetMapRootIndex;
}
@ -25,6 +129,11 @@ int OrderedHashMap::GetMapRootIndex() {
return Heap::kOrderedHashMapMapRootIndex;
}
inline Object* OrderedHashMap::ValueAt(int entry) {
DCHECK_LT(entry, this->UsedCapacity());
return get(EntryToIndex(entry) + kValueOffset);
}
} // namespace internal
} // namespace v8

View File

@ -5,9 +5,19 @@
#ifndef V8_OBJECTS_MAP_INL_H_
#define V8_OBJECTS_MAP_INL_H_
#include "src/field-type.h"
#include "src/objects/map.h"
#include "src/field-type.h"
#include "src/objects-inl.h"
#include "src/objects/descriptor-array.h"
#include "src/objects/shared-function-info.h"
#include "src/property.h"
#include "src/transitions.h"
// For pulling in heap/incremental-marking.h which is needed by
// ACCESSORS_CHECKED.
#include "src/heap/heap-inl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@ -16,6 +26,14 @@ namespace internal {
CAST_ACCESSOR(Map)
ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor,
kLayoutDescriptorOffset, FLAG_unbox_double_fields)
ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
BOOL_ACCESSORS(Map, bit_field, has_prototype_slot, kHasPrototypeSlot)
TYPE_CHECKER(Map, MAP_TYPE)
InterceptorInfo* Map::GetNamedInterceptor() {
DCHECK(has_named_interceptor());
FunctionTemplateInfo* info = GetFunctionTemplateInfo();
@ -75,6 +93,751 @@ void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
}
}
bool Map::IsUnboxedDoubleField(FieldIndex index) const {
if (!FLAG_unbox_double_fields) return false;
if (index.is_hidden_field() || !index.is_inobject()) return false;
return !layout_descriptor()->IsTagged(index.property_index());
}
bool Map::TooManyFastProperties(StoreFromKeyed store_mode) const {
if (UnusedPropertyFields() != 0) return false;
if (is_prototype_map()) return false;
int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
int limit = Max(minimum, GetInObjectProperties());
int external = NumberOfFields() - GetInObjectProperties();
return external > limit;
}
PropertyDetails Map::GetLastDescriptorDetails() const {
return instance_descriptors()->GetDetails(LastAdded());
}
int Map::LastAdded() const {
int number_of_own_descriptors = NumberOfOwnDescriptors();
DCHECK_GT(number_of_own_descriptors, 0);
return number_of_own_descriptors - 1;
}
int Map::NumberOfOwnDescriptors() const {
return NumberOfOwnDescriptorsBits::decode(bit_field3());
}
void Map::SetNumberOfOwnDescriptors(int number) {
DCHECK(number <= instance_descriptors()->number_of_descriptors());
set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
}
int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }
void Map::SetEnumLength(int length) {
if (length != kInvalidEnumCacheSentinel) {
DCHECK_GE(length, 0);
DCHECK(length <= NumberOfOwnDescriptors());
}
set_bit_field3(EnumLengthBits::update(bit_field3(), length));
}
FixedArrayBase* Map::GetInitialElements() const {
FixedArrayBase* result = nullptr;
if (has_fast_elements() || has_fast_string_wrapper_elements()) {
result = GetHeap()->empty_fixed_array();
} else if (has_fast_sloppy_arguments_elements()) {
result = GetHeap()->empty_sloppy_arguments_elements();
} else if (has_fixed_typed_array_elements()) {
result = GetHeap()->EmptyFixedTypedArrayForMap(this);
} else if (has_dictionary_elements()) {
result = GetHeap()->empty_slow_element_dictionary();
} else {
UNREACHABLE();
}
DCHECK(!GetHeap()->InNewSpace(result));
return result;
}
VisitorId Map::visitor_id() const {
return static_cast<VisitorId>(READ_BYTE_FIELD(this, kVisitorIdOffset));
}
void Map::set_visitor_id(VisitorId id) {
DCHECK_LE(0, id);
DCHECK_LT(id, 256);
WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
}
int Map::instance_size_in_words() const {
return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeInWordsOffset);
}
void Map::set_instance_size_in_words(int value) {
RELAXED_WRITE_BYTE_FIELD(this, kInstanceSizeInWordsOffset,
static_cast<byte>(value));
}
int Map::instance_size() const {
return instance_size_in_words() << kPointerSizeLog2;
}
void Map::set_instance_size(int value) {
DCHECK_EQ(0, value & (kPointerSize - 1));
value >>= kPointerSizeLog2;
DCHECK(0 <= value && value < 256);
set_instance_size_in_words(value);
}
int Map::inobject_properties_start_or_constructor_function_index() const {
return RELAXED_READ_BYTE_FIELD(
this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
}
void Map::set_inobject_properties_start_or_constructor_function_index(
int value) {
DCHECK_LE(0, value);
DCHECK_LT(value, 256);
RELAXED_WRITE_BYTE_FIELD(
this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
static_cast<byte>(value));
}
int Map::GetInObjectPropertiesStartInWords() const {
DCHECK(IsJSObjectMap());
return inobject_properties_start_or_constructor_function_index();
}
void Map::SetInObjectPropertiesStartInWords(int value) {
DCHECK(IsJSObjectMap());
set_inobject_properties_start_or_constructor_function_index(value);
}
int Map::GetInObjectProperties() const {
DCHECK(IsJSObjectMap());
return instance_size_in_words() - GetInObjectPropertiesStartInWords();
}
int Map::GetConstructorFunctionIndex() const {
DCHECK(IsPrimitiveMap());
return inobject_properties_start_or_constructor_function_index();
}
void Map::SetConstructorFunctionIndex(int value) {
DCHECK(IsPrimitiveMap());
set_inobject_properties_start_or_constructor_function_index(value);
}
int Map::GetInObjectPropertyOffset(int index) const {
return (GetInObjectPropertiesStartInWords() + index) * kPointerSize;
}
Handle<Map> Map::AddMissingTransitionsForTesting(
Handle<Map> split_map, Handle<DescriptorArray> descriptors,
Handle<LayoutDescriptor> full_layout_descriptor) {
return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
}
InstanceType Map::instance_type() const {
return static_cast<InstanceType>(
READ_UINT16_FIELD(this, kInstanceTypeOffset));
}
void Map::set_instance_type(InstanceType value) {
WRITE_UINT16_FIELD(this, kInstanceTypeOffset, value);
}
int Map::UnusedPropertyFields() const {
int value = used_or_unused_instance_size_in_words();
DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
int unused;
if (value >= JSObject::kFieldsAdded) {
unused = instance_size_in_words() - value;
} else {
// For out of object properties "used_or_unused_instance_size_in_words"
// byte encodes the slack in the property array.
unused = value;
}
return unused;
}
int Map::used_or_unused_instance_size_in_words() const {
return RELAXED_READ_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset);
}
void Map::set_used_or_unused_instance_size_in_words(int value) {
DCHECK_LE(0, value);
DCHECK_LE(value, 255);
RELAXED_WRITE_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset,
static_cast<byte>(value));
}
int Map::UsedInstanceSize() const {
int words = used_or_unused_instance_size_in_words();
if (words < JSObject::kFieldsAdded) {
// All in-object properties are used and the words is tracking the slack
// in the property array.
return instance_size();
}
return words * kPointerSize;
}
void Map::SetInObjectUnusedPropertyFields(int value) {
STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
if (!IsJSObjectMap()) {
DCHECK_EQ(0, value);
set_used_or_unused_instance_size_in_words(0);
DCHECK_EQ(0, UnusedPropertyFields());
return;
}
DCHECK_LE(0, value);
DCHECK_LE(value, GetInObjectProperties());
int used_inobject_properties = GetInObjectProperties() - value;
set_used_or_unused_instance_size_in_words(
GetInObjectPropertyOffset(used_inobject_properties) / kPointerSize);
DCHECK_EQ(value, UnusedPropertyFields());
}
void Map::SetOutOfObjectUnusedPropertyFields(int value) {
STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
DCHECK_LE(0, value);
DCHECK_LT(value, JSObject::kFieldsAdded);
// For out of object properties "used_instance_size_in_words" byte encodes
// the slack in the property array.
set_used_or_unused_instance_size_in_words(value);
DCHECK_EQ(value, UnusedPropertyFields());
}
void Map::CopyUnusedPropertyFields(Map* map) {
set_used_or_unused_instance_size_in_words(
map->used_or_unused_instance_size_in_words());
DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
}
void Map::AccountAddedPropertyField() {
// Update used instance size and unused property fields number.
STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize);
#ifdef DEBUG
int new_unused = UnusedPropertyFields() - 1;
if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
#endif
int value = used_or_unused_instance_size_in_words();
if (value >= JSObject::kFieldsAdded) {
if (value == instance_size_in_words()) {
AccountAddedOutOfObjectPropertyField(0);
} else {
// The property is added in-object, so simply increment the counter.
set_used_or_unused_instance_size_in_words(value + 1);
}
} else {
AccountAddedOutOfObjectPropertyField(value);
}
DCHECK_EQ(new_unused, UnusedPropertyFields());
}
void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
unused_in_property_array--;
if (unused_in_property_array < 0) {
unused_in_property_array += JSObject::kFieldsAdded;
}
DCHECK_GE(unused_in_property_array, 0);
DCHECK_LT(unused_in_property_array, JSObject::kFieldsAdded);
set_used_or_unused_instance_size_in_words(unused_in_property_array);
DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
}
byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
void Map::set_bit_field(byte value) {
WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
}
byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
void Map::set_bit_field2(byte value) {
WRITE_BYTE_FIELD(this, kBitField2Offset, value);
}
void Map::set_non_instance_prototype(bool value) {
if (value) {
set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
} else {
set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
}
}
bool Map::has_non_instance_prototype() const {
if (!has_prototype_slot()) return false;
return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
}
void Map::set_is_constructor(bool value) {
if (value) {
set_bit_field(bit_field() | (1 << kIsConstructor));
} else {
set_bit_field(bit_field() & ~(1 << kIsConstructor));
}
}
bool Map::is_constructor() const {
return ((1 << kIsConstructor) & bit_field()) != 0;
}
void Map::set_has_hidden_prototype(bool value) {
set_bit_field3(HasHiddenPrototype::update(bit_field3(), value));
}
bool Map::has_hidden_prototype() const {
return HasHiddenPrototype::decode(bit_field3());
}
void Map::set_has_indexed_interceptor() {
set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
}
bool Map::has_indexed_interceptor() const {
return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
}
void Map::set_is_undetectable() {
set_bit_field(bit_field() | (1 << kIsUndetectable));
}
bool Map::is_undetectable() const {
return ((1 << kIsUndetectable) & bit_field()) != 0;
}
void Map::set_has_named_interceptor() {
set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
}
bool Map::has_named_interceptor() const {
return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
}
void Map::set_is_access_check_needed(bool access_check_needed) {
if (access_check_needed) {
set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
} else {
set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
}
}
bool Map::is_access_check_needed() const {
return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
}
void Map::set_is_extensible(bool value) {
if (value) {
set_bit_field2(bit_field2() | (1 << kIsExtensible));
} else {
set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
}
}
bool Map::is_extensible() const {
return ((1 << kIsExtensible) & bit_field2()) != 0;
}
void Map::set_is_prototype_map(bool value) {
set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
}
bool Map::is_prototype_map() const {
return IsPrototypeMapBits::decode(bit_field2());
}
bool Map::is_abandoned_prototype_map() const {
return is_prototype_map() && !owns_descriptors();
}
bool Map::should_be_fast_prototype_map() const {
if (!prototype_info()->IsPrototypeInfo()) return false;
return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
}
void Map::set_elements_kind(ElementsKind elements_kind) {
DCHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
DCHECK_LE(kElementsKindCount, 1 << Map::ElementsKindBits::kSize);
set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
DCHECK(this->elements_kind() == elements_kind);
}
ElementsKind Map::elements_kind() const {
return Map::ElementsKindBits::decode(bit_field2());
}
bool Map::has_fast_smi_elements() const {
return IsSmiElementsKind(elements_kind());
}
bool Map::has_fast_object_elements() const {
return IsObjectElementsKind(elements_kind());
}
bool Map::has_fast_smi_or_object_elements() const {
return IsSmiOrObjectElementsKind(elements_kind());
}
bool Map::has_fast_double_elements() const {
return IsDoubleElementsKind(elements_kind());
}
bool Map::has_fast_elements() const {
return IsFastElementsKind(elements_kind());
}
bool Map::has_sloppy_arguments_elements() const {
return IsSloppyArgumentsElementsKind(elements_kind());
}
bool Map::has_fast_sloppy_arguments_elements() const {
return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
}
bool Map::has_fast_string_wrapper_elements() const {
return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
}
bool Map::has_fixed_typed_array_elements() const {
return IsFixedTypedArrayElementsKind(elements_kind());
}
bool Map::has_dictionary_elements() const {
return IsDictionaryElementsKind(elements_kind());
}
void Map::set_dictionary_map(bool value) {
uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
new_bit_field3 = IsUnstable::update(new_bit_field3, value);
set_bit_field3(new_bit_field3);
}
bool Map::is_dictionary_map() const {
return DictionaryMap::decode(bit_field3());
}
void Map::set_owns_descriptors(bool owns_descriptors) {
set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
}
bool Map::owns_descriptors() const {
return OwnsDescriptors::decode(bit_field3());
}
void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
bool Map::is_callable() const {
return ((1 << kIsCallable) & bit_field()) != 0;
}
void Map::deprecate() {
set_bit_field3(Deprecated::update(bit_field3(), true));
if (FLAG_trace_maps) {
LOG(GetIsolate(), MapEvent("Deprecate", this, nullptr));
}
}
bool Map::is_deprecated() const { return Deprecated::decode(bit_field3()); }
void Map::set_migration_target(bool value) {
set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
}
bool Map::is_migration_target() const {
return IsMigrationTarget::decode(bit_field3());
}
void Map::set_immutable_proto(bool value) {
set_bit_field3(ImmutablePrototype::update(bit_field3(), value));
}
bool Map::is_immutable_proto() const {
return ImmutablePrototype::decode(bit_field3());
}
void Map::set_new_target_is_base(bool value) {
set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
}
bool Map::new_target_is_base() const {
return NewTargetIsBase::decode(bit_field3());
}
void Map::set_may_have_interesting_symbols(bool value) {
set_bit_field3(MayHaveInterestingSymbols::update(bit_field3(), value));
}
bool Map::may_have_interesting_symbols() const {
return MayHaveInterestingSymbols::decode(bit_field3());
}
void Map::set_construction_counter(int value) {
set_bit_field3(ConstructionCounter::update(bit_field3(), value));
}
int Map::construction_counter() const {
return ConstructionCounter::decode(bit_field3());
}
void Map::mark_unstable() {
set_bit_field3(IsUnstable::update(bit_field3(), true));
}
bool Map::is_stable() const { return !IsUnstable::decode(bit_field3()); }
bool Map::CanBeDeprecated() const {
int descriptor = LastAdded();
for (int i = 0; i <= descriptor; i++) {
PropertyDetails details = instance_descriptors()->GetDetails(i);
if (details.representation().IsNone()) return true;
if (details.representation().IsSmi()) return true;
if (details.representation().IsDouble()) return true;
if (details.representation().IsHeapObject()) return true;
if (details.kind() == kData && details.location() == kDescriptor) {
return true;
}
}
return false;
}
void Map::NotifyLeafMapLayoutChange() {
if (is_stable()) {
mark_unstable();
dependent_code()->DeoptimizeDependentCodeGroup(
GetIsolate(), DependentCode::kPrototypeCheckGroup);
}
}
bool Map::CanTransition() const {
// Only JSObject and subtypes have map transitions and back pointers.
STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
return instance_type() >= FIRST_JS_OBJECT_TYPE;
}
bool Map::IsBooleanMap() const { return this == GetHeap()->boolean_map(); }
bool Map::IsPrimitiveMap() const {
STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
return instance_type() <= LAST_PRIMITIVE_TYPE;
}
bool Map::IsJSReceiverMap() const {
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
return instance_type() >= FIRST_JS_RECEIVER_TYPE;
}
bool Map::IsJSObjectMap() const {
STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
return instance_type() >= FIRST_JS_OBJECT_TYPE;
}
bool Map::IsJSArrayMap() const { return instance_type() == JS_ARRAY_TYPE; }
bool Map::IsJSFunctionMap() const {
return instance_type() == JS_FUNCTION_TYPE;
}
bool Map::IsStringMap() const { return instance_type() < FIRST_NONSTRING_TYPE; }
bool Map::IsJSProxyMap() const { return instance_type() == JS_PROXY_TYPE; }
bool Map::IsJSGlobalProxyMap() const {
return instance_type() == JS_GLOBAL_PROXY_TYPE;
}
bool Map::IsJSGlobalObjectMap() const {
return instance_type() == JS_GLOBAL_OBJECT_TYPE;
}
bool Map::IsJSTypedArrayMap() const {
return instance_type() == JS_TYPED_ARRAY_TYPE;
}
bool Map::IsJSDataViewMap() const {
return instance_type() == JS_DATA_VIEW_TYPE;
}
Object* Map::prototype() const { return READ_FIELD(this, kPrototypeOffset); }
void Map::set_prototype(Object* value, WriteBarrierMode mode) {
DCHECK(value->IsNull(GetIsolate()) || value->IsJSReceiver());
WRITE_FIELD(this, kPrototypeOffset, value);
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
}
LayoutDescriptor* Map::layout_descriptor_gc_safe() const {
DCHECK(FLAG_unbox_double_fields);
Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset);
return LayoutDescriptor::cast_gc_safe(layout_desc);
}
bool Map::HasFastPointerLayout() const {
DCHECK(FLAG_unbox_double_fields);
Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset);
return LayoutDescriptor::IsFastPointerLayout(layout_desc);
}
void Map::UpdateDescriptors(DescriptorArray* descriptors,
LayoutDescriptor* layout_desc) {
set_instance_descriptors(descriptors);
if (FLAG_unbox_double_fields) {
if (layout_descriptor()->IsSlowLayout()) {
set_layout_descriptor(layout_desc);
}
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
CHECK(layout_descriptor()->IsConsistentWithMap(this));
CHECK_EQ(Map::GetVisitorId(this), visitor_id());
}
#else
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
DCHECK(visitor_id() == Map::GetVisitorId(this));
#endif
}
}
void Map::InitializeDescriptors(DescriptorArray* descriptors,
LayoutDescriptor* layout_desc) {
int len = descriptors->number_of_descriptors();
set_instance_descriptors(descriptors);
SetNumberOfOwnDescriptors(len);
if (FLAG_unbox_double_fields) {
set_layout_descriptor(layout_desc);
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
CHECK(layout_descriptor()->IsConsistentWithMap(this));
}
#else
SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
#endif
set_visitor_id(Map::GetVisitorId(this));
}
}
void Map::set_bit_field3(uint32_t bits) {
if (kInt32Size != kPointerSize) {
WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
}
WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
}
uint32_t Map::bit_field3() const {
return READ_UINT32_FIELD(this, kBitField3Offset);
}
LayoutDescriptor* Map::GetLayoutDescriptor() const {
return FLAG_unbox_double_fields ? layout_descriptor()
: LayoutDescriptor::FastPointerLayout();
}
void Map::AppendDescriptor(Descriptor* desc) {
DescriptorArray* descriptors = instance_descriptors();
int number_of_own_descriptors = NumberOfOwnDescriptors();
DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
descriptors->Append(desc);
SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
// Properly mark the map if the {desc} is an "interesting symbol".
if (desc->GetKey()->IsInterestingSymbol()) {
set_may_have_interesting_symbols(true);
}
PropertyDetails details = desc->GetDetails();
if (details.location() == kField) {
DCHECK_GT(UnusedPropertyFields(), 0);
AccountAddedPropertyField();
}
// This function does not support appending double field descriptors and
// it should never try to (otherwise, layout descriptor must be updated too).
#ifdef DEBUG
DCHECK(details.location() != kField || !details.representation().IsDouble());
#endif
}
Object* Map::GetBackPointer() const {
Object* object = constructor_or_backpointer();
if (object->IsMap()) {
return object;
}
return GetIsolate()->heap()->undefined_value();
}
Map* Map::ElementsTransitionMap() {
DisallowHeapAllocation no_gc;
return TransitionsAccessor(this, &no_gc)
.SearchSpecial(GetHeap()->elements_transition_symbol());
}
Object* Map::prototype_info() const {
DCHECK(is_prototype_map());
return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
}
void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
DCHECK(is_prototype_map());
WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
CONDITIONAL_WRITE_BARRIER(
GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
}
void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
DCHECK(value->IsMap());
DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
DCHECK(!value->IsMap() ||
Map::cast(value)->GetConstructor() == constructor_or_backpointer());
set_constructor_or_backpointer(value, mode);
}
ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
ACCESSORS(Map, constructor_or_backpointer, Object,
kConstructorOrBackPointerOffset)
Object* Map::GetConstructor() const {
Object* maybe_constructor = constructor_or_backpointer();
// Follow any back pointers.
while (maybe_constructor->IsMap()) {
maybe_constructor =
Map::cast(maybe_constructor)->constructor_or_backpointer();
}
return maybe_constructor;
}
FunctionTemplateInfo* Map::GetFunctionTemplateInfo() const {
Object* constructor = GetConstructor();
if (constructor->IsJSFunction()) {
DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction());
return JSFunction::cast(constructor)->shared()->get_api_func_data();
}
DCHECK(constructor->IsFunctionTemplateInfo());
return FunctionTemplateInfo::cast(constructor);
}
void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
// Never overwrite a back pointer with a constructor.
DCHECK(!constructor_or_backpointer()->IsMap());
set_constructor_or_backpointer(constructor, mode);
}
Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
map->UnusedPropertyFields());
}
bool Map::IsInobjectSlackTrackingInProgress() const {
return construction_counter() != Map::kNoSlackTracking;
}
void Map::InobjectSlackTrackingStep() {
// Slack tracking should only be performed on an initial map.
DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
if (!IsInobjectSlackTrackingInProgress()) return;
int counter = construction_counter();
set_construction_counter(counter - 1);
if (counter == kSlackTrackingCounterEnd) {
CompleteInobjectSlackTracking();
}
}
int Map::SlackForArraySize(int old_size, int size_limit) {
const int max_slack = size_limit - old_size;
CHECK_LE(0, max_slack);
if (old_size < 4) {
DCHECK_LE(1, max_slack);
return 1;
}
return Min(max_slack, old_size / 4);
}
int NormalizedMapCache::GetIndex(Handle<Map> map) {
return map->Hash() % NormalizedMapCache::kEntries;
}