[cleanup] Clean up SYNCHRONIZED_ACCESSORS macro naming and its uses

We can use tag dispatching to distinguish between the synchronized and
non-synchronized accessors. Also eliminated the need of adding explicit
"synchronized" in the name when using the macros.

As a note, we currently have one case of using both relaxed and
synchronized accessors (Map::instance_descriptors).

Cleaned up:
 * BytecodeArray::source_position_table
 * Code::code_data_container
 * Code::source_position_table
 * FunctionTemplateInfo::call_code
 * Map::instance_descriptors
 * Map::layout_descriptor
 * SharedFunctionInfo::function_data

Bug: v8:7790
Change-Id: I5a502f4b2df6addb6c45056e77061271012c7d90
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2424130
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Reviewed-by: Georg Neis <neis@chromium.org>
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70306}
This commit is contained in:
Santiago Aboy Solanes 2020-10-05 10:22:13 +01:00 committed by Commit Bot
parent 215238d389
commit c7c8472ddc
70 changed files with 640 additions and 473 deletions

View File

@ -361,7 +361,8 @@ bool IsSimpleInstantiation(Isolate* isolate, ObjectTemplateInfo info,
if (!new_target.IsJSFunction()) return false;
JSFunction fun = JSFunction::cast(new_target);
if (fun.shared().function_data() != info.constructor()) return false;
if (fun.shared().function_data(kAcquireLoad) != info.constructor())
return false;
if (info.immutable_proto()) return false;
return fun.context().native_context() == isolate->raw_native_context();
}

View File

@ -1560,7 +1560,7 @@ void FunctionTemplate::SetCallHandler(FunctionCallback callback,
isolate, info,
i::handle(*FromCData(isolate, c_function->GetTypeInfo()), isolate));
}
info->set_synchronized_call_code(*obj);
info->set_call_code(*obj, kReleaseStore);
}
namespace {
@ -4441,7 +4441,8 @@ MaybeLocal<Array> v8::Object::GetPropertyNames(
accumulator.GetKeys(static_cast<i::GetKeysConversion>(key_conversion));
DCHECK(self->map().EnumLength() == i::kInvalidEnumCacheSentinel ||
self->map().EnumLength() == 0 ||
self->map().instance_descriptors().enum_cache().keys() != *value);
self->map().instance_descriptors(kRelaxedLoad).enum_cache().keys() !=
*value);
auto result = isolate->factory()->NewJSArrayWithElements(value);
RETURN_ESCAPED(Utils::ToLocal(result));
}
@ -4946,7 +4947,8 @@ MaybeLocal<Object> Function::NewInstanceWithSideEffectType(
CHECK(self->IsJSFunction() &&
i::JSFunction::cast(*self).shared().IsApiFunction());
i::Object obj =
i::JSFunction::cast(*self).shared().get_api_func_data().call_code();
i::JSFunction::cast(*self).shared().get_api_func_data().call_code(
kAcquireLoad);
if (obj.IsCallHandlerInfo()) {
i::CallHandlerInfo handler_info = i::CallHandlerInfo::cast(obj);
if (!handler_info.IsSideEffectFreeCallHandlerInfo()) {
@ -4960,7 +4962,8 @@ MaybeLocal<Object> Function::NewInstanceWithSideEffectType(
i::Execution::New(isolate, self, self, argc, args), &result);
if (should_set_has_no_side_effect) {
i::Object obj =
i::JSFunction::cast(*self).shared().get_api_func_data().call_code();
i::JSFunction::cast(*self).shared().get_api_func_data().call_code(
kAcquireLoad);
if (obj.IsCallHandlerInfo()) {
i::CallHandlerInfo handler_info = i::CallHandlerInfo::cast(obj);
if (has_pending_exception) {

View File

@ -99,7 +99,7 @@ V8_WARN_UNUSED_RESULT MaybeHandle<Object> HandleApiCallHelper(
}
}
Object raw_call_data = fun_data->call_code();
Object raw_call_data = fun_data->call_code(kAcquireLoad);
if (!raw_call_data.IsUndefined(isolate)) {
DCHECK(raw_call_data.IsCallHandlerInfo());
CallHandlerInfo call_data = CallHandlerInfo::cast(raw_call_data);

View File

@ -269,7 +269,7 @@ void AsyncBuiltinsAssembler::InitializeNativeClosure(
StoreObjectFieldNoWriteBarrier(function, JSFunction::kContextOffset, context);
// For the native closures that are initialized here (for `await`)
// we know that their SharedFunctionInfo::function_data() slot
// we know that their SharedFunctionInfo::function_data(kAcquireLoad) slot
// contains a builtin index (as Smi), so there's no need to use
// CodeStubAssembler::GetSharedFunctionInfoCode() helper here,
// which almost doubles the size of `await` builtins (unnecessarily).

View File

@ -480,7 +480,7 @@ bool UseAsmWasm(FunctionLiteral* literal, bool asm_wasm_broken) {
void InstallInterpreterTrampolineCopy(Isolate* isolate,
Handle<SharedFunctionInfo> shared_info) {
DCHECK(FLAG_interpreted_frames_native_stack);
if (!shared_info->function_data().IsBytecodeArray()) {
if (!shared_info->function_data(kAcquireLoad).IsBytecodeArray()) {
DCHECK(!shared_info->HasBytecodeArray());
return;
}
@ -1700,8 +1700,8 @@ bool Compiler::CollectSourcePositions(Isolate* isolate,
shared_info->GetDebugInfo().HasInstrumentedBytecodeArray()) {
ByteArray source_position_table =
job->compilation_info()->bytecode_array()->SourcePositionTable();
shared_info->GetDebugBytecodeArray().set_synchronized_source_position_table(
source_position_table);
shared_info->GetDebugBytecodeArray().set_source_position_table(
source_position_table, kReleaseStore);
}
DCHECK(!isolate->has_pending_exception());

View File

@ -1682,6 +1682,17 @@ enum class DynamicMapChecksStatus : uint8_t {
};
} // namespace internal
// Tag dispatching support for acquire loads and release stores.
struct AcquireLoadTag {};
struct RelaxedLoadTag {};
struct ReleaseStoreTag {};
struct RelaxedStoreTag {};
static constexpr AcquireLoadTag kAcquireLoad;
static constexpr RelaxedLoadTag kRelaxedLoad;
static constexpr ReleaseStoreTag kReleaseStore;
static constexpr RelaxedStoreTag kRelaxedStore;
} // namespace v8
namespace i = v8::internal;

View File

@ -364,7 +364,8 @@ PropertyAccessInfo AccessInfoFactory::ComputeDataFieldAccessInfo(
Handle<Map> receiver_map, Handle<Map> map, MaybeHandle<JSObject> holder,
InternalIndex descriptor, AccessMode access_mode) const {
DCHECK(descriptor.is_found());
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate());
PropertyDetails const details = descriptors->GetDetails(descriptor);
int index = descriptors->GetFieldIndex(descriptor);
Representation details_representation = details.representation();
@ -459,7 +460,8 @@ PropertyAccessInfo AccessInfoFactory::ComputeAccessorDescriptorAccessInfo(
MaybeHandle<JSObject> holder, InternalIndex descriptor,
AccessMode access_mode) const {
DCHECK(descriptor.is_found());
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate());
SLOW_DCHECK(descriptor == descriptors->Search(*name, *map));
if (map->instance_type() == JS_MODULE_NAMESPACE_TYPE) {
DCHECK(map->is_prototype_map());
@ -557,8 +559,8 @@ PropertyAccessInfo AccessInfoFactory::ComputePropertyAccessInfo(
MaybeHandle<JSObject> holder;
while (true) {
// Lookup the named property on the {map}.
Handle<DescriptorArray> descriptors(
map->synchronized_instance_descriptors(), isolate());
Handle<DescriptorArray> descriptors(map->instance_descriptors(kAcquireLoad),
isolate());
InternalIndex const number =
descriptors->Search(*name, *map, broker()->is_concurrent_inlining());
if (number.is_found()) {
@ -830,7 +832,7 @@ PropertyAccessInfo AccessInfoFactory::LookupTransition(
Handle<Map> transition_map(transition, isolate());
InternalIndex const number = transition_map->LastAdded();
Handle<DescriptorArray> descriptors(
transition_map->synchronized_instance_descriptors(), isolate());
transition_map->instance_descriptors(kAcquireLoad), isolate());
PropertyDetails const details = descriptors->GetDetails(number);
// Don't bother optimizing stores to read-only properties.
if (details.IsReadOnly()) {

View File

@ -170,8 +170,9 @@ class FieldRepresentationDependency final : public CompilationDependency {
bool IsValid() const override {
DisallowHeapAllocation no_heap_allocation;
Handle<Map> owner = owner_.object();
return representation_.Equals(
owner->instance_descriptors().GetDetails(descriptor_).representation());
return representation_.Equals(owner->instance_descriptors(kRelaxedLoad)
.GetDetails(descriptor_)
.representation());
}
void Install(const MaybeObjectHandle& code) const override {
@ -208,7 +209,8 @@ class FieldTypeDependency final : public CompilationDependency {
DisallowHeapAllocation no_heap_allocation;
Handle<Map> owner = owner_.object();
Handle<Object> type = type_.object();
return *type == owner->instance_descriptors().GetFieldType(descriptor_);
return *type ==
owner->instance_descriptors(kRelaxedLoad).GetFieldType(descriptor_);
}
void Install(const MaybeObjectHandle& code) const override {
@ -236,7 +238,9 @@ class FieldConstnessDependency final : public CompilationDependency {
DisallowHeapAllocation no_heap_allocation;
Handle<Map> owner = owner_.object();
return PropertyConstness::kConst ==
owner->instance_descriptors().GetDetails(descriptor_).constness();
owner->instance_descriptors(kRelaxedLoad)
.GetDetails(descriptor_)
.constness();
}
void Install(const MaybeObjectHandle& code) const override {

View File

@ -329,7 +329,8 @@ void FunctionTemplateInfoData::SerializeCallCode(JSHeapBroker* broker) {
TraceScope tracer(broker, this,
"FunctionTemplateInfoData::SerializeCallCode");
auto function_template_info = Handle<FunctionTemplateInfo>::cast(object());
call_code_ = broker->GetOrCreateData(function_template_info->call_code());
call_code_ =
broker->GetOrCreateData(function_template_info->call_code(kAcquireLoad));
if (call_code_->should_access_heap()) {
// TODO(mvstanton): When ObjectRef is in the never serialized list, this
// code can be removed.
@ -950,8 +951,8 @@ bool IsFastLiteralHelper(Handle<JSObject> boilerplate, int max_depth,
}
// Check the in-object properties.
Handle<DescriptorArray> descriptors(boilerplate->map().instance_descriptors(),
isolate);
Handle<DescriptorArray> descriptors(
boilerplate->map().instance_descriptors(kRelaxedLoad), isolate);
for (InternalIndex i : boilerplate->map().IterateOwnDescriptors()) {
PropertyDetails details = descriptors->GetDetails(i);
if (details.location() != kField) continue;
@ -1241,7 +1242,7 @@ namespace {
bool IsReadOnlyLengthDescriptor(Isolate* isolate, Handle<Map> jsarray_map) {
DCHECK(!jsarray_map->is_dictionary_map());
Handle<Name> length_string = isolate->factory()->length_string();
DescriptorArray descriptors = jsarray_map->instance_descriptors();
DescriptorArray descriptors = jsarray_map->instance_descriptors(kRelaxedLoad);
// TODO(jkummerow): We could skip the search and hardcode number == 0.
InternalIndex number = descriptors.Search(*length_string, *jsarray_map);
DCHECK(number.is_found());
@ -1807,7 +1808,7 @@ void SharedFunctionInfoData::SerializeFunctionTemplateInfo(
JSHeapBroker* broker) {
if (function_template_info_) return;
function_template_info_ = broker->GetOrCreateData(
Handle<SharedFunctionInfo>::cast(object())->function_data());
Handle<SharedFunctionInfo>::cast(object())->function_data(kAcquireLoad));
}
void SharedFunctionInfoData::SerializeScopeInfoChain(JSHeapBroker* broker) {
@ -2135,8 +2136,9 @@ void MapData::SerializeOwnDescriptor(JSHeapBroker* broker,
Handle<Map> map = Handle<Map>::cast(object());
if (instance_descriptors_ == nullptr) {
instance_descriptors_ = broker->GetOrCreateData(map->instance_descriptors())
->AsDescriptorArray();
instance_descriptors_ =
broker->GetOrCreateData(map->instance_descriptors(kRelaxedLoad))
->AsDescriptorArray();
}
ZoneMap<int, PropertyDescriptor>& contents =
@ -2147,7 +2149,7 @@ void MapData::SerializeOwnDescriptor(JSHeapBroker* broker,
Isolate* const isolate = broker->isolate();
auto descriptors =
Handle<DescriptorArray>::cast(instance_descriptors_->object());
CHECK_EQ(*descriptors, map->instance_descriptors());
CHECK_EQ(*descriptors, map->instance_descriptors(kRelaxedLoad));
PropertyDescriptor d;
d.key = broker->GetOrCreateData(descriptors->GetKey(descriptor_index));
@ -2261,8 +2263,8 @@ void JSObjectData::SerializeRecursiveAsBoilerplate(JSHeapBroker* broker,
CHECK_EQ(inobject_fields_.size(), 0u);
// Check the in-object properties.
Handle<DescriptorArray> descriptors(boilerplate->map().instance_descriptors(),
isolate);
Handle<DescriptorArray> descriptors(
boilerplate->map().instance_descriptors(kRelaxedLoad), isolate);
for (InternalIndex i : boilerplate->map().IterateOwnDescriptors()) {
PropertyDetails details = descriptors->GetDetails(i);
if (details.location() != kField) continue;
@ -3078,7 +3080,9 @@ PropertyDetails MapRef::GetPropertyDetails(
if (data_->should_access_heap()) {
AllowHandleDereferenceIfNeeded allow_handle_dereference(data()->kind(),
broker()->mode());
return object()->instance_descriptors().GetDetails(descriptor_index);
return object()
->instance_descriptors(kRelaxedLoad)
.GetDetails(descriptor_index);
}
DescriptorArrayData* descriptors = data()->AsMap()->instance_descriptors();
return descriptors->contents().at(descriptor_index.as_int()).details;
@ -3090,10 +3094,10 @@ NameRef MapRef::GetPropertyKey(InternalIndex descriptor_index) const {
broker()->mode());
AllowHandleDereferenceIfNeeded allow_handle_dereference(data()->kind(),
broker()->mode());
return NameRef(
broker(),
broker()->CanonicalPersistentHandle(
object()->instance_descriptors().GetKey(descriptor_index)));
return NameRef(broker(), broker()->CanonicalPersistentHandle(
object()
->instance_descriptors(kRelaxedLoad)
.GetKey(descriptor_index)));
}
DescriptorArrayData* descriptors = data()->AsMap()->instance_descriptors();
return NameRef(broker(),
@ -3133,9 +3137,10 @@ ObjectRef MapRef::GetFieldType(InternalIndex descriptor_index) const {
broker()->mode());
AllowHandleDereferenceIfNeeded allow_handle_dereference(data()->kind(),
broker()->mode());
Handle<FieldType> field_type(
object()->instance_descriptors().GetFieldType(descriptor_index),
broker()->isolate());
Handle<FieldType> field_type(object()
->instance_descriptors(kRelaxedLoad)
.GetFieldType(descriptor_index),
broker()->isolate());
return ObjectRef(broker(), field_type);
}
DescriptorArrayData* descriptors = data()->AsMap()->instance_descriptors();
@ -3420,9 +3425,8 @@ BIMODAL_ACCESSOR_C(PropertyCell, PropertyDetails, property_details)
base::Optional<CallHandlerInfoRef> FunctionTemplateInfoRef::call_code() const {
if (data_->should_access_heap()) {
return CallHandlerInfoRef(broker(),
broker()->CanonicalPersistentHandle(
object()->synchronized_call_code()));
return CallHandlerInfoRef(broker(), broker()->CanonicalPersistentHandle(
object()->call_code(kAcquireLoad)));
}
ObjectData* call_code = data()->AsFunctionTemplateInfo()->call_code();
if (!call_code) return base::nullopt;
@ -3544,7 +3548,7 @@ base::Optional<ObjectRef> MapRef::GetStrongValue(
AllowHandleDereferenceIfNeeded allow_handle_dereference(data()->kind(),
broker()->mode());
MaybeObject value =
object()->instance_descriptors().GetValue(descriptor_index);
object()->instance_descriptors(kRelaxedLoad).GetValue(descriptor_index);
HeapObject object;
if (value.GetHeapObjectIfStrong(&object)) {
return ObjectRef(broker(), broker()->CanonicalPersistentHandle((object)));
@ -4274,8 +4278,8 @@ SharedFunctionInfoRef::function_template_info() const {
if (data_->should_access_heap()) {
if (object()->IsApiFunction()) {
return FunctionTemplateInfoRef(
broker(),
broker()->CanonicalPersistentHandle(object()->function_data()));
broker(), broker()->CanonicalPersistentHandle(
object()->function_data(kAcquireLoad)));
}
return base::nullopt;
}

View File

@ -2246,7 +2246,8 @@ void SerializerForBackgroundCompilation::ProcessApiCall(
Builtins::kCallFunctionTemplate_CheckAccessAndCompatibleReceiver));
FunctionTemplateInfoRef target_template_info(
broker(), handle(target->function_data(), broker()->isolate()));
broker(),
handle(target->function_data(kAcquireLoad), broker()->isolate()));
if (!target_template_info.has_call_code()) return;
target_template_info.SerializeCallCode();

View File

@ -1327,7 +1327,8 @@ void Debug::InstallDebugBreakTrampoline() {
}
} else if (obj.IsJSObject()) {
JSObject object = JSObject::cast(obj);
DescriptorArray descriptors = object.map().instance_descriptors();
DescriptorArray descriptors =
object.map().instance_descriptors(kRelaxedLoad);
for (InternalIndex i : object.map().IterateOwnDescriptors()) {
if (descriptors.GetDetails(i).kind() == PropertyKind::kAccessor) {

View File

@ -1025,7 +1025,7 @@ void TranslateSourcePositionTable(Isolate* isolate, Handle<BytecodeArray> code,
Handle<ByteArray> new_source_position_table(
builder.ToSourcePositionTable(isolate));
code->set_synchronized_source_position_table(*new_source_position_table);
code->set_source_position_table(*new_source_position_table, kReleaseStore);
LOG_CODE_EVENT(isolate,
CodeLinePosInfoRecordEvent(code->GetFirstBytecodeAddress(),
*new_source_position_table));

View File

@ -4000,7 +4000,8 @@ void TranslatedState::EnsurePropertiesAllocatedAndMarked(
properties_slot->set_storage(object_storage);
// Set markers for out-of-object properties.
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate());
for (InternalIndex i : map->IterateOwnDescriptors()) {
FieldIndex index = FieldIndex::ForDescriptor(*map, i);
Representation representation = descriptors->GetDetails(i).representation();
@ -4033,7 +4034,8 @@ void TranslatedState::EnsureJSObjectAllocated(TranslatedValue* slot,
Handle<ByteArray> object_storage = AllocateStorageFor(slot);
// Now we handle the interesting (JSObject) case.
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate());
// Set markers for in-object properties.
for (InternalIndex i : map->IterateOwnDescriptors()) {

View File

@ -297,9 +297,11 @@ void BytecodeArray::BytecodeArrayVerify(Isolate* isolate) {
CHECK(IsBytecodeArray(isolate));
CHECK(constant_pool(isolate).IsFixedArray(isolate));
VerifyHeapPointer(isolate, constant_pool(isolate));
CHECK(synchronized_source_position_table(isolate).IsUndefined(isolate) ||
synchronized_source_position_table(isolate).IsException(isolate) ||
synchronized_source_position_table(isolate).IsByteArray(isolate));
{
Object table = source_position_table(isolate, kAcquireLoad);
CHECK(table.IsUndefined(isolate) || table.IsException(isolate) ||
table.IsByteArray(isolate));
}
CHECK(handler_table(isolate).IsByteArray(isolate));
for (int i = 0; i < constant_pool(isolate).length(); ++i) {
// No ThinStrings in the constant pool.
@ -377,7 +379,7 @@ void JSObject::JSObjectVerify(Isolate* isolate) {
int delta = actual_unused_property_fields - map().UnusedPropertyFields();
CHECK_EQ(0, delta % JSObject::kFieldsAdded);
}
DescriptorArray descriptors = map().instance_descriptors();
DescriptorArray descriptors = map().instance_descriptors(kRelaxedLoad);
bool is_transitionable_fast_elements_kind =
IsTransitionableFastElementsKind(map().elements_kind());
@ -451,13 +453,13 @@ void Map::MapVerify(Isolate* isolate) {
// Root maps must not have descriptors in the descriptor array that do not
// belong to the map.
CHECK_EQ(NumberOfOwnDescriptors(),
instance_descriptors().number_of_descriptors());
instance_descriptors(kRelaxedLoad).number_of_descriptors());
} else {
// If there is a parent map it must be non-stable.
Map parent = Map::cast(GetBackPointer());
CHECK(!parent.is_stable());
DescriptorArray descriptors = instance_descriptors();
if (descriptors == parent.instance_descriptors()) {
DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
if (descriptors == parent.instance_descriptors(kRelaxedLoad)) {
if (NumberOfOwnDescriptors() == parent.NumberOfOwnDescriptors() + 1) {
// Descriptors sharing through property transitions takes over
// ownership from the parent map.
@ -475,14 +477,14 @@ void Map::MapVerify(Isolate* isolate) {
}
}
}
SLOW_DCHECK(instance_descriptors().IsSortedNoDuplicates());
SLOW_DCHECK(instance_descriptors(kRelaxedLoad).IsSortedNoDuplicates());
DisallowHeapAllocation no_gc;
SLOW_DCHECK(
TransitionsAccessor(isolate, *this, &no_gc).IsSortedNoDuplicates());
SLOW_DCHECK(TransitionsAccessor(isolate, *this, &no_gc)
.IsConsistentWithBackPointers());
SLOW_DCHECK(!FLAG_unbox_double_fields ||
layout_descriptor().IsConsistentWithMap(*this));
layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
// Only JSFunction maps have has_prototype_slot() bit set and constructible
// JSFunction objects must have prototype slot.
CHECK_IMPLIES(has_prototype_slot(), instance_type() == JS_FUNCTION_TYPE);
@ -490,7 +492,7 @@ void Map::MapVerify(Isolate* isolate) {
CHECK(!has_named_interceptor());
CHECK(!is_dictionary_map());
CHECK(!is_access_check_needed());
DescriptorArray const descriptors = instance_descriptors();
DescriptorArray const descriptors = instance_descriptors(kRelaxedLoad);
for (InternalIndex i : IterateOwnDescriptors()) {
CHECK(!descriptors.GetKey(i).IsInterestingSymbol());
}
@ -514,7 +516,7 @@ void Map::DictionaryMapVerify(Isolate* isolate) {
CHECK(is_dictionary_map());
CHECK_EQ(kInvalidEnumCacheSentinel, EnumLength());
CHECK_EQ(ReadOnlyRoots(isolate).empty_descriptor_array(),
instance_descriptors());
instance_descriptors(kRelaxedLoad));
CHECK_EQ(0, UnusedPropertyFields());
CHECK_EQ(Map::GetVisitorId(*this), visitor_id());
}

View File

@ -288,7 +288,7 @@ void FreeSpace::FreeSpacePrint(std::ostream& os) { // NOLINT
bool JSObject::PrintProperties(std::ostream& os) { // NOLINT
if (HasFastProperties()) {
DescriptorArray descs = map().instance_descriptors();
DescriptorArray descs = map().instance_descriptors(kRelaxedLoad);
int nof_inobject_properties = map().GetInObjectProperties();
for (InternalIndex i : map().IterateOwnDescriptors()) {
os << "\n ";
@ -1373,7 +1373,7 @@ void SharedFunctionInfo::SharedFunctionInfoPrint(std::ostream& os) { // NOLINT
}
os << "\n - expected_nof_properties: " << expected_nof_properties();
os << "\n - language_mode: " << language_mode();
os << "\n - data: " << Brief(function_data());
os << "\n - data: " << Brief(function_data(kAcquireLoad));
os << "\n - code (from data): ";
os << Brief(GetCode());
PrintSourceCode(os);
@ -1917,7 +1917,7 @@ void FunctionTemplateInfo::FunctionTemplateInfoPrint(
os << "\n - tag: " << tag();
os << "\n - serial_number: " << serial_number();
os << "\n - property_list: " << Brief(property_list());
os << "\n - call_code: " << Brief(call_code());
os << "\n - call_code: " << Brief(call_code(kAcquireLoad));
os << "\n - property_accessors: " << Brief(property_accessors());
os << "\n - signature: " << Brief(signature());
os << "\n - cached_property_name: " << Brief(cached_property_name());
@ -2397,7 +2397,7 @@ int Name::NameShortPrint(Vector<char> str) {
void Map::PrintMapDetails(std::ostream& os) {
DisallowHeapAllocation no_gc;
this->MapPrint(os);
instance_descriptors().PrintDescriptors(os);
instance_descriptors(kRelaxedLoad).PrintDescriptors(os);
}
void Map::MapPrint(std::ostream& os) { // NOLINT
@ -2451,10 +2451,10 @@ void Map::MapPrint(std::ostream& os) { // NOLINT
os << "\n - prototype_validity cell: " << Brief(prototype_validity_cell());
os << "\n - instance descriptors " << (owns_descriptors() ? "(own) " : "")
<< "#" << NumberOfOwnDescriptors() << ": "
<< Brief(instance_descriptors());
<< Brief(instance_descriptors(kRelaxedLoad));
if (FLAG_unbox_double_fields) {
os << "\n - layout descriptor: ";
layout_descriptor().ShortPrint(os);
layout_descriptor(kAcquireLoad).ShortPrint(os);
}
// Read-only maps can't have transitions, which is fortunate because we need
@ -2567,7 +2567,7 @@ void TransitionsAccessor::PrintOneTransition(std::ostream& os, Name key,
DCHECK(!IsSpecialTransition(roots, key));
os << "(transition to ";
InternalIndex descriptor = target.LastAdded();
DescriptorArray descriptors = target.instance_descriptors();
DescriptorArray descriptors = target.instance_descriptors(kRelaxedLoad);
descriptors.PrintDescriptorDetails(os, descriptor,
PropertyDetails::kForTransitions);
os << ")";
@ -2645,7 +2645,7 @@ void TransitionsAccessor::PrintTransitionTree(std::ostream& os, int level,
DCHECK(!IsSpecialTransition(ReadOnlyRoots(isolate_), key));
os << "to ";
InternalIndex descriptor = target.LastAdded();
DescriptorArray descriptors = target.instance_descriptors();
DescriptorArray descriptors = target.instance_descriptors(kRelaxedLoad);
descriptors.PrintDescriptorDetails(os, descriptor,
PropertyDetails::kForTransitions);
}

View File

@ -195,8 +195,8 @@ Handle<BytecodeArray> FactoryBase<Impl>::NewBytecodeArray(
instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge);
instance->set_constant_pool(*constant_pool);
instance->set_handler_table(read_only_roots().empty_byte_array());
instance->set_synchronized_source_position_table(
read_only_roots().undefined_value());
instance->set_source_position_table(read_only_roots().undefined_value(),
kReleaseStore);
CopyBytes(reinterpret_cast<byte*>(instance->GetFirstBytecodeAddress()),
raw_bytecodes, length);
instance->clear_padding();
@ -325,7 +325,7 @@ Handle<SharedFunctionInfo> FactoryBase<Impl>::NewSharedFunctionInfo(
DCHECK(!Builtins::IsBuiltinId(maybe_builtin_index));
DCHECK_IMPLIES(function_data->IsCode(),
!Code::cast(*function_data).is_builtin());
shared->set_function_data(*function_data);
shared->set_function_data(*function_data, kReleaseStore);
} else if (Builtins::IsBuiltinId(maybe_builtin_index)) {
shared->set_builtin_id(maybe_builtin_index);
} else {

View File

@ -184,7 +184,7 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
stack_slots_, kIsNotOffHeapTrampoline);
code->set_builtin_index(builtin_index_);
code->set_inlined_bytecode_size(inlined_bytecode_size_);
code->set_code_data_container(*data_container);
code->set_code_data_container(*data_container, kReleaseStore);
code->set_deoptimization_data(*deoptimization_data_);
code->set_source_position_table(*source_position_table_);
code->set_safepoint_table_offset(code_desc_.safepoint_table_offset);
@ -1470,7 +1470,8 @@ Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
map.SetInObjectUnusedPropertyFields(inobject_properties);
map.SetInstanceDescriptors(isolate(), *empty_descriptor_array(), 0);
if (FLAG_unbox_double_fields) {
map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout(),
kReleaseStore);
}
// Must be called only after |instance_type|, |instance_size| and
// |layout_descriptor| are set.
@ -2070,7 +2071,7 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
Builtins::CodeObjectIsExecutable(code->builtin_index());
Handle<Code> result = Builtins::GenerateOffHeapTrampolineFor(
isolate(), off_heap_entry,
code->code_data_container().kind_specific_flags(),
code->code_data_container(kAcquireLoad).kind_specific_flags(),
generate_jump_to_instruction_stream);
// The CodeDataContainer should not be modified beyond this point since it's
// now possibly canonicalized.
@ -2115,7 +2116,8 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
Handle<Code> Factory::CopyCode(Handle<Code> code) {
Handle<CodeDataContainer> data_container = NewCodeDataContainer(
code->code_data_container().kind_specific_flags(), AllocationType::kOld);
code->code_data_container(kAcquireLoad).kind_specific_flags(),
AllocationType::kOld);
Heap* heap = isolate()->heap();
Handle<Code> new_code;
@ -2132,7 +2134,7 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) {
new_code = handle(Code::cast(result), isolate());
// Set the {CodeDataContainer}, it cannot be shared.
new_code->set_code_data_container(*data_container);
new_code->set_code_data_container(*data_container, kReleaseStore);
new_code->Relocate(new_addr - old_addr);
// We have to iterate over the object and process its pointers when black
@ -2168,8 +2170,8 @@ Handle<BytecodeArray> Factory::CopyBytecodeArray(
bytecode_array->incoming_new_target_or_generator_register());
copy->set_constant_pool(bytecode_array->constant_pool());
copy->set_handler_table(bytecode_array->handler_table());
copy->set_synchronized_source_position_table(
bytecode_array->synchronized_source_position_table());
copy->set_source_position_table(
bytecode_array->source_position_table(kAcquireLoad), kReleaseStore);
copy->set_osr_loop_nesting_level(bytecode_array->osr_loop_nesting_level());
copy->set_bytecode_age(bytecode_array->bytecode_age());
bytecode_array->CopyBytecodesTo(*copy);
@ -2220,7 +2222,8 @@ Handle<JSGlobalObject> Factory::NewJSGlobalObject(
// The global object might be created from an object template with accessors.
// Fill these accessors into the dictionary.
Handle<DescriptorArray> descs(map->instance_descriptors(), isolate());
Handle<DescriptorArray> descs(map->instance_descriptors(kRelaxedLoad),
isolate());
for (InternalIndex i : map->IterateOwnDescriptors()) {
PropertyDetails details = descs->GetDetails(i);
// Only accessors are expected.
@ -3471,7 +3474,8 @@ Handle<Map> Factory::CreateSloppyFunctionMap(
map->AppendDescriptor(isolate(), &d);
}
DCHECK_EQ(inobject_properties_count, field_index);
DCHECK_EQ(0, map->instance_descriptors().number_of_slack_descriptors());
DCHECK_EQ(
0, map->instance_descriptors(kRelaxedLoad).number_of_slack_descriptors());
LOG(isolate(), MapDetails(*map));
return map;
}
@ -3554,7 +3558,8 @@ Handle<Map> Factory::CreateStrictFunctionMap(
map->AppendDescriptor(isolate(), &d);
}
DCHECK_EQ(inobject_properties_count, field_index);
DCHECK_EQ(0, map->instance_descriptors().number_of_slack_descriptors());
DCHECK_EQ(
0, map->instance_descriptors(kRelaxedLoad).number_of_slack_descriptors());
LOG(isolate(), MapDetails(*map));
return map;
}

View File

@ -2147,8 +2147,8 @@ void MarkCompactCollector::ClearPotentialSimpleMapTransition(Map map,
DCHECK_EQ(map.raw_transitions(), HeapObjectReference::Weak(dead_target));
// Take ownership of the descriptor array.
int number_of_own_descriptors = map.NumberOfOwnDescriptors();
DescriptorArray descriptors = map.instance_descriptors();
if (descriptors == dead_target.instance_descriptors() &&
DescriptorArray descriptors = map.instance_descriptors(kRelaxedLoad);
if (descriptors == dead_target.instance_descriptors(kRelaxedLoad) &&
number_of_own_descriptors > 0) {
TrimDescriptorArray(map, descriptors);
DCHECK(descriptors.number_of_descriptors() == number_of_own_descriptors);
@ -2218,7 +2218,7 @@ void MarkCompactCollector::FlushBytecodeFromSFI(
// Use the raw function data setter to avoid validity checks, since we're
// performing the unusual task of decompiling.
shared_info.set_function_data(uncompiled_data);
shared_info.set_function_data(uncompiled_data, kReleaseStore);
DCHECK(!shared_info.is_compiled());
}
@ -2277,7 +2277,8 @@ void MarkCompactCollector::ClearFullMapTransitions() {
bool parent_is_alive =
non_atomic_marking_state()->IsBlackOrGrey(parent);
DescriptorArray descriptors =
parent_is_alive ? parent.instance_descriptors() : DescriptorArray();
parent_is_alive ? parent.instance_descriptors(kRelaxedLoad)
: DescriptorArray();
bool descriptors_owner_died =
CompactTransitionArray(parent, array, descriptors);
if (descriptors_owner_died) {
@ -2301,7 +2302,7 @@ bool MarkCompactCollector::CompactTransitionArray(Map map,
DCHECK_EQ(target.constructor_or_backpointer(), map);
if (non_atomic_marking_state()->IsWhite(target)) {
if (!descriptors.is_null() &&
target.instance_descriptors() == descriptors) {
target.instance_descriptors(kRelaxedLoad) == descriptors) {
DCHECK(!target.is_prototype_map());
descriptors_owner_died = true;
}
@ -2375,7 +2376,7 @@ void MarkCompactCollector::TrimDescriptorArray(Map map,
descriptors.Sort();
if (FLAG_unbox_double_fields) {
LayoutDescriptor layout_descriptor = map.layout_descriptor();
LayoutDescriptor layout_descriptor = map.layout_descriptor(kAcquireLoad);
layout_descriptor = layout_descriptor.Trim(heap_, map, descriptors,
number_of_own_descriptors);
SLOW_DCHECK(layout_descriptor.IsConsistentWithMap(map, true));

View File

@ -150,7 +150,7 @@ FieldStatsCollector::GetInobjectFieldStats(Map map) {
JSObjectFieldStats stats;
stats.embedded_fields_count_ = JSObject::GetEmbedderFieldCount(map);
if (!map.is_dictionary_map()) {
DescriptorArray descriptors = map.instance_descriptors();
DescriptorArray descriptors = map.instance_descriptors(kRelaxedLoad);
for (InternalIndex descriptor : map.IterateOwnDescriptors()) {
PropertyDetails details = descriptors.GetDetails(descriptor);
if (details.location() == kField) {
@ -565,9 +565,9 @@ void ObjectStatsCollectorImpl::RecordVirtualFunctionTemplateInfoDetails(
FunctionTemplateInfo fti) {
// named_property_handler and indexed_property_handler are recorded as
// INTERCEPTOR_INFO_TYPE.
if (!fti.call_code().IsUndefined(isolate())) {
if (!fti.call_code(kAcquireLoad).IsUndefined(isolate())) {
RecordSimpleVirtualObjectStats(
fti, CallHandlerInfo::cast(fti.call_code()),
fti, CallHandlerInfo::cast(fti.call_code(kAcquireLoad)),
ObjectStats::FUNCTION_TEMPLATE_INFO_ENTRIES_TYPE);
}
if (!fti.GetInstanceCallHandler().IsUndefined(isolate())) {
@ -883,7 +883,7 @@ void ObjectStatsCollectorImpl::RecordVirtualMapDetails(Map map) {
// This will be logged as MAP_TYPE in Phase2.
}
DescriptorArray array = map.instance_descriptors();
DescriptorArray array = map.instance_descriptors(kRelaxedLoad);
if (map.owns_descriptors() &&
array != ReadOnlyRoots(heap_).empty_descriptor_array()) {
// Generally DescriptorArrays have their own instance type already

View File

@ -87,16 +87,16 @@ static void ClearWeakList(Heap* heap, Object list) {
template <>
struct WeakListVisitor<Code> {
static void SetWeakNext(Code code, Object next) {
code.code_data_container().set_next_code_link(next,
UPDATE_WEAK_WRITE_BARRIER);
code.code_data_container(kAcquireLoad)
.set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
}
static Object WeakNext(Code code) {
return code.code_data_container().next_code_link();
return code.code_data_container(kAcquireLoad).next_code_link();
}
static HeapObject WeakNextHolder(Code code) {
return code.code_data_container();
return code.code_data_container(kAcquireLoad);
}
static int WeakNextOffset() { return CodeDataContainer::kNextCodeLinkOffset; }

View File

@ -123,7 +123,7 @@ bool Scavenger::MigrateObject(Map map, HeapObject source, HeapObject target,
heap()->CopyBlock(target.address() + kTaggedSize,
source.address() + kTaggedSize, size - kTaggedSize);
if (!source.synchronized_compare_and_swap_map_word(
if (!source.release_compare_and_swap_map_word(
MapWord::FromMap(map), MapWord::FromForwardingAddress(target))) {
// Other task migrated the object.
return false;
@ -228,7 +228,7 @@ bool Scavenger::HandleLargeObject(Map map, HeapObject object, int object_size,
BasicMemoryChunk::FromHeapObject(object)->InNewLargeObjectSpace())) {
DCHECK_EQ(NEW_LO_SPACE,
MemoryChunk::FromHeapObject(object)->owner_identity());
if (object.synchronized_compare_and_swap_map_word(
if (object.release_compare_and_swap_map_word(
MapWord::FromMap(map), MapWord::FromForwardingAddress(object))) {
surviving_new_large_objects_.insert({object, map});
promoted_size_ += object_size;

View File

@ -167,7 +167,8 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
map.set_instance_size(instance_size);
// Initialize to only containing tagged fields.
if (FLAG_unbox_double_fields) {
map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout(),
kReleaseStore);
}
// GetVisitorId requires a properly initialized LayoutDescriptor.
map.set_visitor_id(Map::GetVisitorId(map));
@ -194,7 +195,8 @@ void Heap::FinalizePartialMap(Map map) {
map.set_raw_transitions(MaybeObject::FromSmi(Smi::zero()));
map.SetInstanceDescriptors(isolate(), roots.empty_descriptor_array(), 0);
if (FLAG_unbox_double_fields) {
map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
map.set_layout_descriptor(LayoutDescriptor::FastPointerLayout(),
kReleaseStore);
}
map.set_prototype(roots.null_value());
map.set_constructor_or_backpointer(roots.null_value());

View File

@ -97,9 +97,11 @@ bool CallOptimization::IsCompatibleReceiverMap(Handle<Map> map,
void CallOptimization::Initialize(
Isolate* isolate, Handle<FunctionTemplateInfo> function_template_info) {
if (function_template_info->call_code().IsUndefined(isolate)) return;
if (function_template_info->call_code(kAcquireLoad).IsUndefined(isolate))
return;
api_call_info_ = handle(
CallHandlerInfo::cast(function_template_info->call_code()), isolate);
CallHandlerInfo::cast(function_template_info->call_code(kAcquireLoad)),
isolate);
if (!function_template_info->signature().IsUndefined(isolate)) {
expected_receiver_type_ =
@ -124,8 +126,9 @@ void CallOptimization::AnalyzePossibleApiFunction(Isolate* isolate,
isolate);
// Require a C++ callback.
if (info->call_code().IsUndefined(isolate)) return;
api_call_info_ = handle(CallHandlerInfo::cast(info->call_code()), isolate);
if (info->call_code(kAcquireLoad).IsUndefined(isolate)) return;
api_call_info_ =
handle(CallHandlerInfo::cast(info->call_code(kAcquireLoad)), isolate);
if (!info->signature().IsUndefined(isolate)) {
expected_receiver_type_ =

View File

@ -225,8 +225,8 @@ MaybeObjectHandle StoreHandler::StoreTransition(Isolate* isolate,
#ifdef DEBUG
if (!is_dictionary_map) {
InternalIndex descriptor = transition_map->LastAdded();
Handle<DescriptorArray> descriptors(transition_map->instance_descriptors(),
isolate);
Handle<DescriptorArray> descriptors(
transition_map->instance_descriptors(kRelaxedLoad), isolate);
PropertyDetails details = descriptors->GetDetails(descriptor);
if (descriptors->GetKey(descriptor).IsPrivate()) {
DCHECK_EQ(DONT_ENUM, details.attributes());

View File

@ -2711,7 +2711,7 @@ static bool CanFastCloneObject(Handle<Map> map) {
return false;
}
DescriptorArray descriptors = map->instance_descriptors();
DescriptorArray descriptors = map->instance_descriptors(kRelaxedLoad);
for (InternalIndex i : map->IterateOwnDescriptors()) {
PropertyDetails details = descriptors.GetDetails(i);
Name key = descriptors.GetKey(i);
@ -2760,8 +2760,8 @@ static Handle<Map> FastCloneObjectMap(Isolate* isolate, Handle<Map> source_map,
map = Map::Copy(isolate, map, "InitializeClonedDescriptors");
}
Handle<DescriptorArray> source_descriptors(source_map->instance_descriptors(),
isolate);
Handle<DescriptorArray> source_descriptors(
source_map->instance_descriptors(kRelaxedLoad), isolate);
int size = source_map->NumberOfOwnDescriptors();
int slack = 0;
Handle<DescriptorArray> descriptors = DescriptorArray::CopyForFastObjectClone(

View File

@ -1107,7 +1107,7 @@ namespace {
void ReplaceAccessors(Isolate* isolate, Handle<Map> map, Handle<String> name,
PropertyAttributes attributes,
Handle<AccessorPair> accessor_pair) {
DescriptorArray descriptors = map->instance_descriptors();
DescriptorArray descriptors = map->instance_descriptors(kRelaxedLoad);
InternalIndex entry = descriptors.SearchWithCache(isolate, *name, *map);
Descriptor d = Descriptor::AccessorConstant(name, accessor_pair, attributes);
descriptors.Replace(entry, &d);
@ -5028,8 +5028,8 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
// The global template must not create properties that already exist
// in the snapshotted global object.
if (from->HasFastProperties()) {
Handle<DescriptorArray> descs =
Handle<DescriptorArray>(from->map().instance_descriptors(), isolate());
Handle<DescriptorArray> descs = Handle<DescriptorArray>(
from->map().instance_descriptors(kRelaxedLoad), isolate());
for (InternalIndex i : from->map().IterateOwnDescriptors()) {
PropertyDetails details = descs->GetDetails(i);
if (details.location() == kField) {
@ -5158,7 +5158,8 @@ Handle<Map> Genesis::CreateInitialMapForArraySubclass(int size,
{
JSFunction array_function = native_context()->array_function();
Handle<DescriptorArray> array_descriptors(
array_function.initial_map().instance_descriptors(), isolate());
array_function.initial_map().instance_descriptors(kRelaxedLoad),
isolate());
Handle<String> length = factory()->length_string();
InternalIndex old = array_descriptors->SearchWithCache(
isolate(), *length, array_function.initial_map());

View File

@ -250,7 +250,7 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::DoFinalizeJobImpl(
SourcePositionTableBuilder::RecordingMode::RECORD_SOURCE_POSITIONS) {
Handle<ByteArray> source_position_table =
generator()->FinalizeSourcePositionTable(isolate);
bytecodes->set_synchronized_source_position_table(*source_position_table);
bytecodes->set_source_position_table(*source_position_table, kReleaseStore);
}
if (ShouldPrintBytecode(shared_info)) {

View File

@ -464,9 +464,10 @@ Handle<Object> JsonParser<Char>::BuildJsonObject(
Handle<Map> target;
InternalIndex descriptor_index(descriptor);
if (descriptor < feedback_descriptors) {
expected = handle(String::cast(feedback->instance_descriptors().GetKey(
descriptor_index)),
isolate_);
expected =
handle(String::cast(feedback->instance_descriptors(kRelaxedLoad)
.GetKey(descriptor_index)),
isolate_);
} else {
DisallowHeapAllocation no_gc;
TransitionsAccessor transitions(isolate(), *map, &no_gc);
@ -497,7 +498,7 @@ Handle<Object> JsonParser<Char>::BuildJsonObject(
Handle<Object> value = property.value;
PropertyDetails details =
target->instance_descriptors().GetDetails(descriptor_index);
target->instance_descriptors(kRelaxedLoad).GetDetails(descriptor_index);
Representation expected_representation = details.representation();
if (!value->FitsRepresentation(expected_representation)) {
@ -512,7 +513,7 @@ Handle<Object> JsonParser<Char>::BuildJsonObject(
Map::GeneralizeField(isolate(), target, descriptor_index,
details.constness(), representation, value_type);
} else if (expected_representation.IsHeapObject() &&
!target->instance_descriptors()
!target->instance_descriptors(kRelaxedLoad)
.GetFieldType(descriptor_index)
.NowContains(value)) {
Handle<FieldType> value_type =
@ -525,7 +526,7 @@ Handle<Object> JsonParser<Char>::BuildJsonObject(
new_mutable_double++;
}
DCHECK(target->instance_descriptors()
DCHECK(target->instance_descriptors(kRelaxedLoad)
.GetFieldType(descriptor_index)
.NowContains(value));
map = target;
@ -575,7 +576,7 @@ Handle<Object> JsonParser<Char>::BuildJsonObject(
if (property.string.is_index()) continue;
InternalIndex descriptor_index(descriptor);
PropertyDetails details =
map->instance_descriptors().GetDetails(descriptor_index);
map->instance_descriptors(kRelaxedLoad).GetDetails(descriptor_index);
Object value = *property.value;
FieldIndex index = FieldIndex::ForDescriptor(*map, descriptor_index);
descriptor++;

View File

@ -772,11 +772,13 @@ JsonStringifier::Result JsonStringifier::SerializeJSObject(
Indent();
bool comma = false;
for (InternalIndex i : map->IterateOwnDescriptors()) {
Handle<Name> name(map->instance_descriptors().GetKey(i), isolate_);
Handle<Name> name(map->instance_descriptors(kRelaxedLoad).GetKey(i),
isolate_);
// TODO(rossberg): Should this throw?
if (!name->IsString()) continue;
Handle<String> key = Handle<String>::cast(name);
PropertyDetails details = map->instance_descriptors().GetDetails(i);
PropertyDetails details =
map->instance_descriptors(kRelaxedLoad).GetDetails(i);
if (details.IsDontEnum()) continue;
Handle<Object> property;
if (details.location() == kField && *map == object->map()) {

View File

@ -2240,7 +2240,7 @@ void ExistingCodeLogger::LogCompiledFunctions() {
// GetScriptLineNumber call.
for (int i = 0; i < compiled_funcs_count; ++i) {
SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate_, sfis[i]);
if (sfis[i]->function_data().IsInterpreterData()) {
if (sfis[i]->function_data(kAcquireLoad).IsInterpreterData()) {
LogExistingFunction(
sfis[i],
Handle<AbstractCode>(
@ -2290,7 +2290,7 @@ void ExistingCodeLogger::LogExistingFunction(
} else if (shared->IsApiFunction()) {
// API function.
FunctionTemplateInfo fun_data = shared->get_api_func_data();
Object raw_call_data = fun_data.call_code();
Object raw_call_data = fun_data.call_code(kAcquireLoad);
if (!raw_call_data.IsUndefined(isolate_)) {
CallHandlerInfo call_data = CallHandlerInfo::cast(raw_call_data);
Object callback_obj = call_data.callback();

View File

@ -217,11 +217,11 @@ ByteArray Code::SourcePositionTable() const {
}
Object Code::next_code_link() const {
return code_data_container().next_code_link();
return code_data_container(kAcquireLoad).next_code_link();
}
void Code::set_next_code_link(Object value) {
code_data_container().set_next_code_link(value);
code_data_container(kAcquireLoad).set_next_code_link(value);
}
int Code::InstructionSize() const {
@ -388,41 +388,41 @@ inline bool Code::is_turbofanned() const {
inline bool Code::can_have_weak_objects() const {
DCHECK(CodeKindIsOptimizedJSFunction(kind()));
int32_t flags = code_data_container().kind_specific_flags();
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
return CanHaveWeakObjectsField::decode(flags);
}
inline void Code::set_can_have_weak_objects(bool value) {
DCHECK(CodeKindIsOptimizedJSFunction(kind()));
int32_t previous = code_data_container().kind_specific_flags();
int32_t previous = code_data_container(kAcquireLoad).kind_specific_flags();
int32_t updated = CanHaveWeakObjectsField::update(previous, value);
code_data_container().set_kind_specific_flags(updated);
code_data_container(kAcquireLoad).set_kind_specific_flags(updated);
}
inline bool Code::is_promise_rejection() const {
DCHECK(kind() == CodeKind::BUILTIN);
int32_t flags = code_data_container().kind_specific_flags();
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
return IsPromiseRejectionField::decode(flags);
}
inline void Code::set_is_promise_rejection(bool value) {
DCHECK(kind() == CodeKind::BUILTIN);
int32_t previous = code_data_container().kind_specific_flags();
int32_t previous = code_data_container(kAcquireLoad).kind_specific_flags();
int32_t updated = IsPromiseRejectionField::update(previous, value);
code_data_container().set_kind_specific_flags(updated);
code_data_container(kAcquireLoad).set_kind_specific_flags(updated);
}
inline bool Code::is_exception_caught() const {
DCHECK(kind() == CodeKind::BUILTIN);
int32_t flags = code_data_container().kind_specific_flags();
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
return IsExceptionCaughtField::decode(flags);
}
inline void Code::set_is_exception_caught(bool value) {
DCHECK(kind() == CodeKind::BUILTIN);
int32_t previous = code_data_container().kind_specific_flags();
int32_t previous = code_data_container(kAcquireLoad).kind_specific_flags();
int32_t updated = IsExceptionCaughtField::update(previous, value);
code_data_container().set_kind_specific_flags(updated);
code_data_container(kAcquireLoad).set_kind_specific_flags(updated);
}
inline bool Code::is_off_heap_trampoline() const {
@ -470,21 +470,21 @@ int Code::stack_slots() const {
bool Code::marked_for_deoptimization() const {
DCHECK(CodeKindCanDeoptimize(kind()));
int32_t flags = code_data_container().kind_specific_flags();
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
return MarkedForDeoptimizationField::decode(flags);
}
void Code::set_marked_for_deoptimization(bool flag) {
DCHECK(CodeKindCanDeoptimize(kind()));
DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
int32_t previous = code_data_container().kind_specific_flags();
int32_t previous = code_data_container(kAcquireLoad).kind_specific_flags();
int32_t updated = MarkedForDeoptimizationField::update(previous, flag);
code_data_container().set_kind_specific_flags(updated);
code_data_container(kAcquireLoad).set_kind_specific_flags(updated);
}
int Code::deoptimization_count() const {
DCHECK(CodeKindCanDeoptimize(kind()));
int32_t flags = code_data_container().kind_specific_flags();
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
int count = DeoptCountField::decode(flags);
DCHECK_GE(count, 0);
return count;
@ -492,40 +492,40 @@ int Code::deoptimization_count() const {
void Code::increment_deoptimization_count() {
DCHECK(CodeKindCanDeoptimize(kind()));
int32_t flags = code_data_container().kind_specific_flags();
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
int32_t count = DeoptCountField::decode(flags);
DCHECK_GE(count, 0);
CHECK_LE(count + 1, DeoptCountField::kMax);
int32_t updated = DeoptCountField::update(flags, count + 1);
code_data_container().set_kind_specific_flags(updated);
code_data_container(kAcquireLoad).set_kind_specific_flags(updated);
}
bool Code::embedded_objects_cleared() const {
DCHECK(CodeKindIsOptimizedJSFunction(kind()));
int32_t flags = code_data_container().kind_specific_flags();
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
return EmbeddedObjectsClearedField::decode(flags);
}
void Code::set_embedded_objects_cleared(bool flag) {
DCHECK(CodeKindIsOptimizedJSFunction(kind()));
DCHECK_IMPLIES(flag, marked_for_deoptimization());
int32_t previous = code_data_container().kind_specific_flags();
int32_t previous = code_data_container(kAcquireLoad).kind_specific_flags();
int32_t updated = EmbeddedObjectsClearedField::update(previous, flag);
code_data_container().set_kind_specific_flags(updated);
code_data_container(kAcquireLoad).set_kind_specific_flags(updated);
}
bool Code::deopt_already_counted() const {
DCHECK(CodeKindCanDeoptimize(kind()));
int32_t flags = code_data_container().kind_specific_flags();
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
return DeoptAlreadyCountedField::decode(flags);
}
void Code::set_deopt_already_counted(bool flag) {
DCHECK(CodeKindCanDeoptimize(kind()));
DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
int32_t previous = code_data_container().kind_specific_flags();
int32_t previous = code_data_container(kAcquireLoad).kind_specific_flags();
int32_t updated = DeoptAlreadyCountedField::update(previous, flag);
code_data_container().set_kind_specific_flags(updated);
code_data_container(kAcquireLoad).set_kind_specific_flags(updated);
}
bool Code::is_optimized_code() const {
@ -701,8 +701,8 @@ int32_t BytecodeArray::parameter_count() const {
ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
ACCESSORS(BytecodeArray, handler_table, ByteArray, kHandlerTableOffset)
SYNCHRONIZED_ACCESSORS(BytecodeArray, synchronized_source_position_table,
Object, kSourcePositionTableOffset)
SYNCHRONIZED_ACCESSORS(BytecodeArray, source_position_table, Object,
kSourcePositionTableOffset)
void BytecodeArray::clear_padding() {
int data_size = kHeaderSize + length();
@ -715,22 +715,22 @@ Address BytecodeArray::GetFirstBytecodeAddress() {
}
bool BytecodeArray::HasSourcePositionTable() const {
Object maybe_table = synchronized_source_position_table();
Object maybe_table = source_position_table(kAcquireLoad);
return !(maybe_table.IsUndefined() || DidSourcePositionGenerationFail());
}
bool BytecodeArray::DidSourcePositionGenerationFail() const {
return synchronized_source_position_table().IsException();
return source_position_table(kAcquireLoad).IsException();
}
void BytecodeArray::SetSourcePositionsFailedToCollect() {
set_synchronized_source_position_table(GetReadOnlyRoots().exception());
set_source_position_table(GetReadOnlyRoots().exception(), kReleaseStore);
}
ByteArray BytecodeArray::SourcePositionTable() const {
// WARNING: This function may be called from a background thread, hence
// changes to how it accesses the heap can easily lead to bugs.
Object maybe_table = synchronized_source_position_table();
Object maybe_table = source_position_table(kAcquireLoad);
if (maybe_table.IsByteArray()) return ByteArray::cast(maybe_table);
ReadOnlyRoots roots = GetReadOnlyRoots();
DCHECK(maybe_table.IsUndefined(roots) || maybe_table.IsException(roots));

View File

@ -115,7 +115,7 @@ class Code : public HeapObject {
inline ByteArray SourcePositionTable() const;
// [code_data_container]: A container indirection for all mutable fields.
DECL_ACCESSORS(code_data_container, CodeDataContainer)
DECL_RELEASE_ACQUIRE_ACCESSORS(code_data_container, CodeDataContainer)
// [next_code_link]: Link for lists of optimized or deoptimized code.
// Note that this field is stored in the {CodeDataContainer} to be mutable.
@ -774,7 +774,7 @@ class BytecodeArray : public FixedArrayBase {
// * ByteArray (when source positions have been collected for the bytecode)
// * exception (when an error occurred while explicitly collecting source
// positions for pre-existing bytecode).
DECL_SYNCHRONIZED_ACCESSORS(source_position_table, Object)
DECL_RELEASE_ACQUIRE_ACCESSORS(source_position_table, Object)
inline bool HasSourcePositionTable() const;
inline bool DidSourcePositionGenerationFail() const;

View File

@ -67,8 +67,8 @@ FieldIndex FieldIndex::ForDescriptor(Map map, InternalIndex descriptor_index) {
FieldIndex FieldIndex::ForDescriptor(const Isolate* isolate, Map map,
InternalIndex descriptor_index) {
PropertyDetails details =
map.instance_descriptors(isolate).GetDetails(descriptor_index);
PropertyDetails details = map.instance_descriptors(isolate, kRelaxedLoad)
.GetDetails(descriptor_index);
int field_index = details.field_index();
return ForPropertyIndex(map, field_index, details.representation());
}

View File

@ -45,8 +45,8 @@ class HeapObject : public Object {
// Compare-and-swaps map word using release store, returns true if the map
// word was actually swapped.
inline bool synchronized_compare_and_swap_map_word(MapWord old_map_word,
MapWord new_map_word);
inline bool release_compare_and_swap_map_word(MapWord old_map_word,
MapWord new_map_word);
// Initialize the map immediately after the object is allocated.
// Do not use this outside Heap.

View File

@ -221,7 +221,8 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastAssign(
return Just(false);
}
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate);
bool stable = true;
@ -233,7 +234,7 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastAssign(
// Directly decode from the descriptor array if |from| did not change shape.
if (stable) {
DCHECK_EQ(from->map(), *map);
DCHECK_EQ(*descriptors, map->instance_descriptors());
DCHECK_EQ(*descriptors, map->instance_descriptors(kRelaxedLoad));
PropertyDetails details = descriptors->GetDetails(i);
if (!details.IsEnumerable()) continue;
@ -252,7 +253,7 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastAssign(
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
isolate, prop_value, Object::GetProperty(&it), Nothing<bool>());
stable = from->map() == *map;
descriptors.PatchValue(map->instance_descriptors());
descriptors.PatchValue(map->instance_descriptors(kRelaxedLoad));
}
} else {
// If the map did change, do a slower lookup. We are still guaranteed that
@ -278,7 +279,7 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastAssign(
if (result.IsNothing()) return result;
if (stable) {
stable = from->map() == *map;
descriptors.PatchValue(map->instance_descriptors());
descriptors.PatchValue(map->instance_descriptors(kRelaxedLoad));
}
} else {
if (excluded_properties != nullptr &&
@ -1851,7 +1852,8 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
if (!map->OnlyHasSimpleProperties()) return Just(false);
Handle<JSObject> object(JSObject::cast(*receiver), isolate);
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate);
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
size_t number_of_own_elements =
@ -1879,7 +1881,7 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
// side-effects.
bool stable = *map == object->map();
if (stable) {
descriptors.PatchValue(map->instance_descriptors());
descriptors.PatchValue(map->instance_descriptors(kRelaxedLoad));
}
for (InternalIndex index : InternalIndex::Range(number_of_own_descriptors)) {
@ -1892,7 +1894,7 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
// Directly decode from the descriptor array if |from| did not change shape.
if (stable) {
DCHECK_EQ(object->map(), *map);
DCHECK_EQ(*descriptors, map->instance_descriptors());
DCHECK_EQ(*descriptors, map->instance_descriptors(kRelaxedLoad));
PropertyDetails details = descriptors->GetDetails(index);
if (!details.IsEnumerable()) continue;
@ -1913,7 +1915,7 @@ V8_WARN_UNUSED_RESULT Maybe<bool> FastGetOwnValuesOrEntries(
ASSIGN_RETURN_ON_EXCEPTION_VALUE(
isolate, prop_value, Object::GetProperty(&it), Nothing<bool>());
stable = object->map() == *map;
descriptors.PatchValue(map->instance_descriptors());
descriptors.PatchValue(map->instance_descriptors(kRelaxedLoad));
}
} else {
// If the map did change, do a slower lookup. We are still guaranteed that
@ -2535,8 +2537,8 @@ void JSObject::PrintInstanceMigration(FILE* file, Map original_map,
return;
}
PrintF(file, "[migrating]");
DescriptorArray o = original_map.instance_descriptors();
DescriptorArray n = new_map.instance_descriptors();
DescriptorArray o = original_map.instance_descriptors(kRelaxedLoad);
DescriptorArray n = new_map.instance_descriptors(kRelaxedLoad);
for (InternalIndex i : original_map.IterateOwnDescriptors()) {
Representation o_r = o.GetDetails(i).representation();
Representation n_r = n.GetDetails(i).representation();
@ -2724,9 +2726,9 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
isolate->factory()->NewFixedArray(inobject);
Handle<DescriptorArray> old_descriptors(
old_map->instance_descriptors(isolate), isolate);
old_map->instance_descriptors(isolate, kRelaxedLoad), isolate);
Handle<DescriptorArray> new_descriptors(
new_map->instance_descriptors(isolate), isolate);
new_map->instance_descriptors(isolate, kRelaxedLoad), isolate);
int old_nof = old_map->NumberOfOwnDescriptors();
int new_nof = new_map->NumberOfOwnDescriptors();
@ -2884,7 +2886,8 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
Handle<NameDictionary> dictionary =
NameDictionary::New(isolate, property_count);
Handle<DescriptorArray> descs(map->instance_descriptors(isolate), isolate);
Handle<DescriptorArray> descs(
map->instance_descriptors(isolate, kRelaxedLoad), isolate);
for (InternalIndex i : InternalIndex::Range(real_size)) {
PropertyDetails details = descs->GetDetails(i);
Handle<Name> key(descs->GetKey(isolate, i), isolate);
@ -3075,7 +3078,8 @@ void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
if (!FLAG_unbox_double_fields || external > 0) {
Isolate* isolate = object->GetIsolate();
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate);
Handle<FixedArray> storage;
if (!FLAG_unbox_double_fields) {
storage = isolate->factory()->NewFixedArray(inobject);
@ -3642,7 +3646,7 @@ bool TestFastPropertiesIntegrityLevel(Map map, PropertyAttributes level) {
DCHECK(!map.IsCustomElementsReceiverMap());
DCHECK(!map.is_dictionary_map());
DescriptorArray descriptors = map.instance_descriptors();
DescriptorArray descriptors = map.instance_descriptors(kRelaxedLoad);
for (InternalIndex i : map.IterateOwnDescriptors()) {
if (descriptors.GetKey(i).IsPrivate()) continue;
PropertyDetails details = descriptors.GetDetails(i);
@ -4177,7 +4181,7 @@ MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object,
Object JSObject::SlowReverseLookup(Object value) {
if (HasFastProperties()) {
DescriptorArray descs = map().instance_descriptors();
DescriptorArray descs = map().instance_descriptors(kRelaxedLoad);
bool value_is_number = value.IsNumber();
for (InternalIndex i : map().IterateOwnDescriptors()) {
PropertyDetails details = descs.GetDetails(i);

View File

@ -67,7 +67,8 @@ static Handle<FixedArray> CombineKeys(Isolate* isolate,
int nof_descriptors = map.NumberOfOwnDescriptors();
if (nof_descriptors == 0 && !may_have_elements) return prototype_chain_keys;
Handle<DescriptorArray> descs(map.instance_descriptors(), isolate);
Handle<DescriptorArray> descs(map.instance_descriptors(kRelaxedLoad),
isolate);
int own_keys_length = own_keys.is_null() ? 0 : own_keys->length();
Handle<FixedArray> combined_keys = isolate->factory()->NewFixedArray(
own_keys_length + prototype_chain_keys_length);
@ -369,8 +370,8 @@ Handle<FixedArray> ReduceFixedArrayTo(Isolate* isolate,
Handle<FixedArray> GetFastEnumPropertyKeys(Isolate* isolate,
Handle<JSObject> object) {
Handle<Map> map(object->map(), isolate);
Handle<FixedArray> keys(map->instance_descriptors().enum_cache().keys(),
isolate);
Handle<FixedArray> keys(
map->instance_descriptors(kRelaxedLoad).enum_cache().keys(), isolate);
// Check if the {map} has a valid enum length, which implies that it
// must have a valid enum cache as well.
@ -395,7 +396,7 @@ Handle<FixedArray> GetFastEnumPropertyKeys(Isolate* isolate,
}
Handle<DescriptorArray> descriptors =
Handle<DescriptorArray>(map->instance_descriptors(), isolate);
Handle<DescriptorArray>(map->instance_descriptors(kRelaxedLoad), isolate);
isolate->counters()->enum_cache_misses()->Increment();
// Create the keys array.
@ -840,8 +841,8 @@ Maybe<bool> KeyAccumulator::CollectOwnPropertyNames(Handle<JSReceiver> receiver,
if (enum_keys->length() != nof_descriptors) {
if (map.prototype(isolate_) != ReadOnlyRoots(isolate_).null_value()) {
AllowHeapAllocation allow_gc;
Handle<DescriptorArray> descs =
Handle<DescriptorArray>(map.instance_descriptors(), isolate_);
Handle<DescriptorArray> descs = Handle<DescriptorArray>(
map.instance_descriptors(kRelaxedLoad), isolate_);
for (InternalIndex i : InternalIndex::Range(nof_descriptors)) {
PropertyDetails details = descs->GetDetails(i);
if (!details.IsDontEnum()) continue;
@ -873,8 +874,8 @@ Maybe<bool> KeyAccumulator::CollectOwnPropertyNames(Handle<JSReceiver> receiver,
} else {
if (object->HasFastProperties()) {
int limit = object->map().NumberOfOwnDescriptors();
Handle<DescriptorArray> descs(object->map().instance_descriptors(),
isolate_);
Handle<DescriptorArray> descs(
object->map().instance_descriptors(kRelaxedLoad), isolate_);
// First collect the strings,
base::Optional<int> first_symbol =
CollectOwnPropertyNamesInternal<true>(object, this, descs, 0, limit);
@ -902,8 +903,8 @@ ExceptionStatus KeyAccumulator::CollectPrivateNames(Handle<JSReceiver> receiver,
DCHECK_EQ(mode_, KeyCollectionMode::kOwnOnly);
if (object->HasFastProperties()) {
int limit = object->map().NumberOfOwnDescriptors();
Handle<DescriptorArray> descs(object->map().instance_descriptors(),
isolate_);
Handle<DescriptorArray> descs(
object->map().instance_descriptors(kRelaxedLoad), isolate_);
CollectOwnPropertyNamesInternal<false>(object, this, descs, 0, limit);
} else if (object->IsJSGlobalObject()) {
RETURN_FAILURE_IF_NOT_SUCCESSFUL(GlobalDictionary::CollectKeysTo(

View File

@ -65,7 +65,7 @@ Handle<LayoutDescriptor> LayoutDescriptor::AppendIfFastOrUseFull(
Isolate* isolate, Handle<Map> map, PropertyDetails details,
Handle<LayoutDescriptor> full_layout_descriptor) {
DisallowHeapAllocation no_allocation;
LayoutDescriptor layout_descriptor = map->layout_descriptor();
LayoutDescriptor layout_descriptor = map->layout_descriptor(kAcquireLoad);
if (layout_descriptor.IsSlowLayout()) {
return full_layout_descriptor;
}
@ -257,7 +257,7 @@ LayoutDescriptor LayoutDescriptor::Trim(Heap* heap, Map map,
bool LayoutDescriptor::IsConsistentWithMap(Map map, bool check_tail) {
if (FLAG_unbox_double_fields) {
DescriptorArray descriptors = map.instance_descriptors();
DescriptorArray descriptors = map.instance_descriptors(kRelaxedLoad);
int last_field_index = 0;
for (InternalIndex i : map.IterateOwnDescriptors()) {
PropertyDetails details = descriptors.GetDetails(i);

View File

@ -437,8 +437,9 @@ void LookupIterator::PrepareForDataProperty(Handle<Object> value) {
if (old_map.is_identical_to(new_map)) {
// Update the property details if the representation was None.
if (constness() != new_constness || representation().IsNone()) {
property_details_ = new_map->instance_descriptors(isolate_).GetDetails(
descriptor_number());
property_details_ =
new_map->instance_descriptors(isolate_, kRelaxedLoad)
.GetDetails(descriptor_number());
}
return;
}
@ -851,9 +852,9 @@ Handle<Object> LookupIterator::FetchValue(
return JSObject::FastPropertyAt(holder, property_details_.representation(),
field_index);
} else {
result =
holder_->map(isolate_).instance_descriptors(isolate_).GetStrongValue(
isolate_, descriptor_number());
result = holder_->map(isolate_)
.instance_descriptors(isolate_, kRelaxedLoad)
.GetStrongValue(isolate_, descriptor_number());
}
return handle(result, isolate_);
}
@ -941,10 +942,10 @@ Handle<FieldType> LookupIterator::GetFieldType() const {
DCHECK(has_property_);
DCHECK(holder_->HasFastProperties(isolate_));
DCHECK_EQ(kField, property_details_.location());
return handle(
holder_->map(isolate_).instance_descriptors(isolate_).GetFieldType(
isolate_, descriptor_number()),
isolate_);
return handle(holder_->map(isolate_)
.instance_descriptors(isolate_, kRelaxedLoad)
.GetFieldType(isolate_, descriptor_number()),
isolate_);
}
Handle<PropertyCell> LookupIterator::GetPropertyCell() const {
@ -1131,7 +1132,8 @@ LookupIterator::State LookupIterator::LookupInRegularHolder(
property_details_ = property_details_.CopyAddAttributes(SEALED);
}
} else if (!map.is_dictionary_map()) {
DescriptorArray descriptors = map.instance_descriptors(isolate_);
DescriptorArray descriptors =
map.instance_descriptors(isolate_, kRelaxedLoad);
number_ = descriptors.SearchWithCache(isolate_, *name_, map);
if (number_.is_not_found()) return NotFound(holder);
property_details_ = descriptors.GetDetails(number_);

View File

@ -32,12 +32,9 @@ namespace internal {
OBJECT_CONSTRUCTORS_IMPL(Map, HeapObject)
CAST_ACCESSOR(Map)
DEF_GETTER(Map, instance_descriptors, DescriptorArray) {
return TaggedField<DescriptorArray, kInstanceDescriptorsOffset>::load(isolate,
*this);
}
SYNCHRONIZED_ACCESSORS(Map, synchronized_instance_descriptors, DescriptorArray,
RELAXED_ACCESSORS(Map, instance_descriptors, DescriptorArray,
kInstanceDescriptorsOffset)
SYNCHRONIZED_ACCESSORS(Map, instance_descriptors, DescriptorArray,
kInstanceDescriptorsOffset)
// A freshly allocated layout descriptor can be set on an existing map.
@ -165,7 +162,8 @@ bool Map::IsUnboxedDoubleField(FieldIndex index) const {
bool Map::IsUnboxedDoubleField(const Isolate* isolate, FieldIndex index) const {
if (!FLAG_unbox_double_fields) return false;
if (!index.is_inobject()) return false;
return !layout_descriptor(isolate).IsTagged(index.property_index());
return !layout_descriptor(isolate, kAcquireLoad)
.IsTagged(index.property_index());
}
bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
@ -187,7 +185,7 @@ bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
}
PropertyDetails Map::GetLastDescriptorDetails(Isolate* isolate) const {
return instance_descriptors(isolate).GetDetails(LastAdded());
return instance_descriptors(isolate, kRelaxedLoad).GetDetails(LastAdded());
}
InternalIndex Map::LastAdded() const {
@ -201,7 +199,7 @@ int Map::NumberOfOwnDescriptors() const {
}
void Map::SetNumberOfOwnDescriptors(int number) {
DCHECK_LE(number, instance_descriptors().number_of_descriptors());
DCHECK_LE(number, instance_descriptors(kRelaxedLoad).number_of_descriptors());
CHECK_LE(static_cast<unsigned>(number),
static_cast<unsigned>(kMaxNumberOfDescriptors));
set_bit_field3(
@ -564,7 +562,7 @@ bool Map::is_stable() const {
bool Map::CanBeDeprecated() const {
for (InternalIndex i : IterateOwnDescriptors()) {
PropertyDetails details = instance_descriptors().GetDetails(i);
PropertyDetails details = instance_descriptors(kRelaxedLoad).GetDetails(i);
if (details.representation().IsNone()) return true;
if (details.representation().IsSmi()) return true;
if (details.representation().IsDouble() && FLAG_unbox_double_fields)
@ -634,17 +632,17 @@ void Map::UpdateDescriptors(Isolate* isolate, DescriptorArray descriptors,
int number_of_own_descriptors) {
SetInstanceDescriptors(isolate, descriptors, number_of_own_descriptors);
if (FLAG_unbox_double_fields) {
if (layout_descriptor().IsSlowLayout()) {
set_layout_descriptor(layout_desc);
if (layout_descriptor(kAcquireLoad).IsSlowLayout()) {
set_layout_descriptor(layout_desc, kReleaseStore);
}
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
CHECK(layout_descriptor().IsConsistentWithMap(*this));
CHECK(layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
CHECK_EQ(Map::GetVisitorId(*this), visitor_id());
}
#else
SLOW_DCHECK(layout_descriptor().IsConsistentWithMap(*this));
SLOW_DCHECK(layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
DCHECK(visitor_id() == Map::GetVisitorId(*this));
#endif
}
@ -656,14 +654,14 @@ void Map::InitializeDescriptors(Isolate* isolate, DescriptorArray descriptors,
descriptors.number_of_descriptors());
if (FLAG_unbox_double_fields) {
set_layout_descriptor(layout_desc);
set_layout_descriptor(layout_desc, kReleaseStore);
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
CHECK(layout_descriptor().IsConsistentWithMap(*this));
CHECK(layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
}
#else
SLOW_DCHECK(layout_descriptor().IsConsistentWithMap(*this));
SLOW_DCHECK(layout_descriptor(kAcquireLoad).IsConsistentWithMap(*this));
#endif
set_visitor_id(Map::GetVisitorId(*this));
}
@ -685,12 +683,12 @@ void Map::clear_padding() {
}
LayoutDescriptor Map::GetLayoutDescriptor() const {
return FLAG_unbox_double_fields ? layout_descriptor()
return FLAG_unbox_double_fields ? layout_descriptor(kAcquireLoad)
: LayoutDescriptor::FastPointerLayout();
}
void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
DescriptorArray descriptors = instance_descriptors();
DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
int number_of_own_descriptors = NumberOfOwnDescriptors();
DCHECK(descriptors.number_of_descriptors() == number_of_own_descriptors);
{

View File

@ -28,7 +28,7 @@ inline bool EqualImmutableValues(Object obj1, Object obj2) {
MapUpdater::MapUpdater(Isolate* isolate, Handle<Map> old_map)
: isolate_(isolate),
old_map_(old_map),
old_descriptors_(old_map->instance_descriptors(), isolate_),
old_descriptors_(old_map->instance_descriptors(kRelaxedLoad), isolate_),
old_nof_(old_map_->NumberOfOwnDescriptors()),
new_elements_kind_(old_map_->elements_kind()),
is_transitionable_fast_elements_kind_(
@ -197,8 +197,9 @@ void MapUpdater::GeneralizeField(Handle<Map> map, InternalIndex modify_index,
Map::GeneralizeField(isolate_, map, modify_index, new_constness,
new_representation, new_field_type);
DCHECK(*old_descriptors_ == old_map_->instance_descriptors() ||
*old_descriptors_ == integrity_source_map_->instance_descriptors());
DCHECK(*old_descriptors_ == old_map_->instance_descriptors(kRelaxedLoad) ||
*old_descriptors_ ==
integrity_source_map_->instance_descriptors(kRelaxedLoad));
}
MapUpdater::State MapUpdater::Normalize(const char* reason) {
@ -287,8 +288,8 @@ bool MapUpdater::TrySaveIntegrityLevelTransitions() {
integrity_source_map_->NumberOfOwnDescriptors());
has_integrity_level_transition_ = true;
old_descriptors_ =
handle(integrity_source_map_->instance_descriptors(), isolate_);
old_descriptors_ = handle(
integrity_source_map_->instance_descriptors(kRelaxedLoad), isolate_);
return true;
}
@ -383,8 +384,8 @@ MapUpdater::State MapUpdater::FindTargetMap() {
if (transition.is_null()) break;
Handle<Map> tmp_map(transition, isolate_);
Handle<DescriptorArray> tmp_descriptors(tmp_map->instance_descriptors(),
isolate_);
Handle<DescriptorArray> tmp_descriptors(
tmp_map->instance_descriptors(kRelaxedLoad), isolate_);
// Check if target map is incompatible.
PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
@ -425,7 +426,8 @@ MapUpdater::State MapUpdater::FindTargetMap() {
if (target_nof == old_nof_) {
#ifdef DEBUG
if (modified_descriptor_.is_found()) {
DescriptorArray target_descriptors = target_map_->instance_descriptors();
DescriptorArray target_descriptors =
target_map_->instance_descriptors(kRelaxedLoad);
PropertyDetails details =
target_descriptors.GetDetails(modified_descriptor_);
DCHECK_EQ(new_kind_, details.kind());
@ -473,8 +475,8 @@ MapUpdater::State MapUpdater::FindTargetMap() {
old_details.attributes());
if (transition.is_null()) break;
Handle<Map> tmp_map(transition, isolate_);
Handle<DescriptorArray> tmp_descriptors(tmp_map->instance_descriptors(),
isolate_);
Handle<DescriptorArray> tmp_descriptors(
tmp_map->instance_descriptors(kRelaxedLoad), isolate_);
#ifdef DEBUG
// Check that target map is compatible.
PropertyDetails tmp_details = tmp_descriptors->GetDetails(i);
@ -498,7 +500,7 @@ Handle<DescriptorArray> MapUpdater::BuildDescriptorArray() {
InstanceType instance_type = old_map_->instance_type();
int target_nof = target_map_->NumberOfOwnDescriptors();
Handle<DescriptorArray> target_descriptors(
target_map_->instance_descriptors(), isolate_);
target_map_->instance_descriptors(kRelaxedLoad), isolate_);
// Allocate a new descriptor array large enough to hold the required
// descriptors, with minimally the exact same size as the old descriptor
@ -673,7 +675,7 @@ Handle<Map> MapUpdater::FindSplitMap(Handle<DescriptorArray> descriptors) {
TransitionsAccessor(isolate_, current, &no_allocation)
.SearchTransition(name, details.kind(), details.attributes());
if (next.is_null()) break;
DescriptorArray next_descriptors = next.instance_descriptors();
DescriptorArray next_descriptors = next.instance_descriptors(kRelaxedLoad);
PropertyDetails next_details = next_descriptors.GetDetails(i);
DCHECK_EQ(details.kind(), next_details.kind());

View File

@ -66,7 +66,7 @@ void Map::PrintReconfiguration(Isolate* isolate, FILE* file,
PropertyAttributes attributes) {
OFStream os(file);
os << "[reconfiguring]";
Name name = instance_descriptors().GetKey(modify_index);
Name name = instance_descriptors(kRelaxedLoad).GetKey(modify_index);
if (name.IsString()) {
String::cast(name).PrintOn(file);
} else {
@ -390,7 +390,7 @@ void Map::PrintGeneralization(
MaybeHandle<Object> new_value) {
OFStream os(file);
os << "[generalizing]";
Name name = instance_descriptors().GetKey(modify_index);
Name name = instance_descriptors(kRelaxedLoad).GetKey(modify_index);
if (name.IsString()) {
String::cast(name).PrintOn(file);
} else {
@ -451,7 +451,7 @@ MaybeHandle<Map> Map::CopyWithField(Isolate* isolate, Handle<Map> map,
PropertyConstness constness,
Representation representation,
TransitionFlag flag) {
DCHECK(map->instance_descriptors()
DCHECK(map->instance_descriptors(kRelaxedLoad)
.Search(*name, map->NumberOfOwnDescriptors())
.is_not_found());
@ -545,8 +545,8 @@ bool Map::InstancesNeedRewriting(Map target, int target_number_of_fields,
if (target_number_of_fields != *old_number_of_fields) return true;
// If smi descriptors were replaced by double descriptors, rewrite.
DescriptorArray old_desc = instance_descriptors();
DescriptorArray new_desc = target.instance_descriptors();
DescriptorArray old_desc = instance_descriptors(kRelaxedLoad);
DescriptorArray new_desc = target.instance_descriptors(kRelaxedLoad);
for (InternalIndex i : IterateOwnDescriptors()) {
if (new_desc.GetDetails(i).representation().IsDouble() !=
old_desc.GetDetails(i).representation().IsDouble()) {
@ -570,7 +570,7 @@ bool Map::InstancesNeedRewriting(Map target, int target_number_of_fields,
}
int Map::NumberOfFields() const {
DescriptorArray descriptors = instance_descriptors();
DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
int result = 0;
for (InternalIndex i : IterateOwnDescriptors()) {
if (descriptors.GetDetails(i).location() == kField) result++;
@ -579,7 +579,7 @@ int Map::NumberOfFields() const {
}
Map::FieldCounts Map::GetFieldCounts() const {
DescriptorArray descriptors = instance_descriptors();
DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
int mutable_count = 0;
int const_count = 0;
for (InternalIndex i : IterateOwnDescriptors()) {
@ -630,7 +630,7 @@ void Map::ReplaceDescriptors(Isolate* isolate, DescriptorArray new_descriptors,
return;
}
DescriptorArray to_replace = instance_descriptors();
DescriptorArray to_replace = instance_descriptors(kRelaxedLoad);
// Replace descriptors by new_descriptors in all maps that share it. The old
// descriptors will not be trimmed in the mark-compactor, we need to mark
// all its elements.
@ -638,7 +638,7 @@ void Map::ReplaceDescriptors(Isolate* isolate, DescriptorArray new_descriptors,
#ifndef V8_DISABLE_WRITE_BARRIERS
WriteBarrier::Marking(to_replace, to_replace.number_of_descriptors());
#endif
while (current.instance_descriptors(isolate) == to_replace) {
while (current.instance_descriptors(isolate, kRelaxedLoad) == to_replace) {
Object next = current.GetBackPointer(isolate);
if (next.IsUndefined(isolate)) break; // Stop overwriting at initial map.
current.SetEnumLength(kInvalidEnumCacheSentinel);
@ -656,8 +656,9 @@ Map Map::FindRootMap(Isolate* isolate) const {
if (back.IsUndefined(isolate)) {
// Initial map must not contain descriptors in the descriptors array
// that do not belong to the map.
DCHECK_LE(result.NumberOfOwnDescriptors(),
result.instance_descriptors().number_of_descriptors());
DCHECK_LE(
result.NumberOfOwnDescriptors(),
result.instance_descriptors(kRelaxedLoad).number_of_descriptors());
return result;
}
result = Map::cast(back);
@ -666,8 +667,9 @@ Map Map::FindRootMap(Isolate* isolate) const {
Map Map::FindFieldOwner(Isolate* isolate, InternalIndex descriptor) const {
DisallowHeapAllocation no_allocation;
DCHECK_EQ(kField,
instance_descriptors(isolate).GetDetails(descriptor).location());
DCHECK_EQ(kField, instance_descriptors(isolate, kRelaxedLoad)
.GetDetails(descriptor)
.location());
Map result = *this;
while (true) {
Object back = result.GetBackPointer(isolate);
@ -686,7 +688,8 @@ void Map::UpdateFieldType(Isolate* isolate, InternalIndex descriptor,
DCHECK(new_wrapped_type->IsSmi() || new_wrapped_type->IsWeak());
// We store raw pointers in the queue, so no allocations are allowed.
DisallowHeapAllocation no_allocation;
PropertyDetails details = instance_descriptors().GetDetails(descriptor);
PropertyDetails details =
instance_descriptors(kRelaxedLoad).GetDetails(descriptor);
if (details.location() != kField) return;
DCHECK_EQ(kData, details.kind());
@ -708,7 +711,7 @@ void Map::UpdateFieldType(Isolate* isolate, InternalIndex descriptor,
Map target = transitions.GetTarget(i);
backlog.push(target);
}
DescriptorArray descriptors = current.instance_descriptors();
DescriptorArray descriptors = current.instance_descriptors(kRelaxedLoad);
PropertyDetails details = descriptors.GetDetails(descriptor);
// It is allowed to change representation here only from None
@ -756,7 +759,8 @@ void Map::GeneralizeField(Isolate* isolate, Handle<Map> map,
Representation new_representation,
Handle<FieldType> new_field_type) {
// Check if we actually need to generalize the field type at all.
Handle<DescriptorArray> old_descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> old_descriptors(
map->instance_descriptors(kRelaxedLoad), isolate);
PropertyDetails old_details = old_descriptors->GetDetails(modify_index);
PropertyConstness old_constness = old_details.constness();
Representation old_representation = old_details.representation();
@ -779,8 +783,8 @@ void Map::GeneralizeField(Isolate* isolate, Handle<Map> map,
// Determine the field owner.
Handle<Map> field_owner(map->FindFieldOwner(isolate, modify_index), isolate);
Handle<DescriptorArray> descriptors(field_owner->instance_descriptors(),
isolate);
Handle<DescriptorArray> descriptors(
field_owner->instance_descriptors(kRelaxedLoad), isolate);
DCHECK_EQ(*old_field_type, descriptors->GetFieldType(modify_index));
new_field_type =
@ -866,7 +870,7 @@ Map SearchMigrationTarget(Isolate* isolate, Map old_map) {
// types instead of old_map's types.
// Go to slow map updating if the old_map has fast properties with cleared
// field types.
DescriptorArray old_descriptors = old_map.instance_descriptors();
DescriptorArray old_descriptors = old_map.instance_descriptors(kRelaxedLoad);
for (InternalIndex i : old_map.IterateOwnDescriptors()) {
PropertyDetails old_details = old_descriptors.GetDetails(i);
if (old_details.location() == kField && old_details.kind() == kData) {
@ -1029,7 +1033,7 @@ Map Map::TryReplayPropertyTransitions(Isolate* isolate, Map old_map) {
int root_nof = NumberOfOwnDescriptors();
int old_nof = old_map.NumberOfOwnDescriptors();
DescriptorArray old_descriptors = old_map.instance_descriptors();
DescriptorArray old_descriptors = old_map.instance_descriptors(kRelaxedLoad);
Map new_map = *this;
for (InternalIndex i : InternalIndex::Range(root_nof, old_nof)) {
@ -1040,7 +1044,8 @@ Map Map::TryReplayPropertyTransitions(Isolate* isolate, Map old_map) {
old_details.attributes());
if (transition.is_null()) return Map();
new_map = transition;
DescriptorArray new_descriptors = new_map.instance_descriptors();
DescriptorArray new_descriptors =
new_map.instance_descriptors(kRelaxedLoad);
PropertyDetails new_details = new_descriptors.GetDetails(i);
DCHECK_EQ(old_details.kind(), new_details.kind());
@ -1105,7 +1110,8 @@ void Map::EnsureDescriptorSlack(Isolate* isolate, Handle<Map> map, int slack) {
// Only supports adding slack to owned descriptors.
DCHECK(map->owns_descriptors());
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate);
int old_size = map->NumberOfOwnDescriptors();
if (slack <= descriptors->number_of_slack_descriptors()) return;
@ -1137,7 +1143,7 @@ void Map::EnsureDescriptorSlack(Isolate* isolate, Handle<Map> map, int slack) {
#endif
Map current = *map;
while (current.instance_descriptors() == *descriptors) {
while (current.instance_descriptors(kRelaxedLoad) == *descriptors) {
Object next = current.GetBackPointer();
if (next.IsUndefined(isolate)) break; // Stop overwriting at initial map.
current.UpdateDescriptors(isolate, *new_descriptors, layout_descriptor,
@ -1388,7 +1394,7 @@ Handle<Map> Map::AsElementsKind(Isolate* isolate, Handle<Map> map,
int Map::NumberOfEnumerableProperties() const {
int result = 0;
DescriptorArray descs = instance_descriptors();
DescriptorArray descs = instance_descriptors(kRelaxedLoad);
for (InternalIndex i : IterateOwnDescriptors()) {
if ((descs.GetDetails(i).attributes() & ONLY_ENUMERABLE) == 0 &&
!descs.GetKey(i).FilterKey(ENUMERABLE_STRINGS)) {
@ -1400,7 +1406,7 @@ int Map::NumberOfEnumerableProperties() const {
int Map::NextFreePropertyIndex() const {
int number_of_own_descriptors = NumberOfOwnDescriptors();
DescriptorArray descs = instance_descriptors();
DescriptorArray descs = instance_descriptors(kRelaxedLoad);
// Search properties backwards to find the last field.
for (int i = number_of_own_descriptors - 1; i >= 0; --i) {
PropertyDetails details = descs.GetDetails(InternalIndex(i));
@ -1609,7 +1615,7 @@ void EnsureInitialMap(Isolate* isolate, Handle<Map> map) {
// Initial maps must not contain descriptors in the descriptors array
// that do not belong to the map.
DCHECK_EQ(map->NumberOfOwnDescriptors(),
map->instance_descriptors().number_of_descriptors());
map->instance_descriptors(kRelaxedLoad).number_of_descriptors());
}
} // namespace
@ -1635,7 +1641,7 @@ Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map,
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
if (number_of_own_descriptors > 0) {
// The copy will use the same descriptors array without ownership.
DescriptorArray descriptors = map->instance_descriptors();
DescriptorArray descriptors = map->instance_descriptors(kRelaxedLoad);
result->set_owns_descriptors(false);
result->UpdateDescriptors(isolate, descriptors, map->GetLayoutDescriptor(),
number_of_own_descriptors);
@ -1667,7 +1673,7 @@ Handle<Map> Map::ShareDescriptor(Isolate* isolate, Handle<Map> map,
// array, implying that its NumberOfOwnDescriptors equals the number of
// descriptors in the descriptor array.
DCHECK_EQ(map->NumberOfOwnDescriptors(),
map->instance_descriptors().number_of_descriptors());
map->instance_descriptors(kRelaxedLoad).number_of_descriptors());
Handle<Map> result = CopyDropDescriptors(isolate, map);
Handle<Name> name = descriptor->GetKey();
@ -1685,7 +1691,7 @@ Handle<Map> Map::ShareDescriptor(Isolate* isolate, Handle<Map> map,
} else {
int slack = SlackForArraySize(old_size, kMaxNumberOfDescriptors);
EnsureDescriptorSlack(isolate, map, slack);
descriptors = handle(map->instance_descriptors(), isolate);
descriptors = handle(map->instance_descriptors(kRelaxedLoad), isolate);
}
}
@ -1719,8 +1725,9 @@ void Map::ConnectTransition(Isolate* isolate, Handle<Map> parent,
} else if (!parent->IsDetached(isolate)) {
// |parent| is initial map and it must not contain descriptors in the
// descriptors array that do not belong to the map.
DCHECK_EQ(parent->NumberOfOwnDescriptors(),
parent->instance_descriptors().number_of_descriptors());
DCHECK_EQ(
parent->NumberOfOwnDescriptors(),
parent->instance_descriptors(kRelaxedLoad).number_of_descriptors());
}
if (parent->IsDetached(isolate)) {
DCHECK(child->IsDetached(isolate));
@ -1844,14 +1851,15 @@ void Map::InstallDescriptors(Isolate* isolate, Handle<Map> parent,
Handle<LayoutDescriptor> layout_descriptor =
LayoutDescriptor::AppendIfFastOrUseFull(isolate, parent, details,
full_layout_descriptor);
child->set_layout_descriptor(*layout_descriptor);
child->set_layout_descriptor(*layout_descriptor, kReleaseStore);
#ifdef VERIFY_HEAP
// TODO(ishell): remove these checks from VERIFY_HEAP mode.
if (FLAG_verify_heap) {
CHECK(child->layout_descriptor().IsConsistentWithMap(*child));
CHECK(child->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*child));
}
#else
SLOW_DCHECK(child->layout_descriptor().IsConsistentWithMap(*child));
SLOW_DCHECK(
child->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*child));
#endif
child->set_visitor_id(Map::GetVisitorId(*child));
}
@ -1957,12 +1965,14 @@ Handle<Map> Map::CopyForElementsTransition(Isolate* isolate, Handle<Map> map) {
// transfer ownership to the new map.
// The properties did not change, so reuse descriptors.
map->set_owns_descriptors(false);
new_map->InitializeDescriptors(isolate, map->instance_descriptors(),
new_map->InitializeDescriptors(isolate,
map->instance_descriptors(kRelaxedLoad),
map->GetLayoutDescriptor());
} else {
// In case the map did not own its own descriptors, a split is forced by
// copying the map; creating a new descriptor array cell.
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate);
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
isolate, descriptors, number_of_own_descriptors);
@ -1975,7 +1985,8 @@ Handle<Map> Map::CopyForElementsTransition(Isolate* isolate, Handle<Map> map) {
}
Handle<Map> Map::Copy(Isolate* isolate, Handle<Map> map, const char* reason) {
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate);
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo(
isolate, descriptors, number_of_own_descriptors);
@ -2016,8 +2027,8 @@ Handle<Map> Map::CopyForPreventExtensions(
bool old_map_is_dictionary_elements_kind) {
int num_descriptors = map->NumberOfOwnDescriptors();
Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes(
isolate, handle(map->instance_descriptors(), isolate), num_descriptors,
attrs_to_add);
isolate, handle(map->instance_descriptors(kRelaxedLoad), isolate),
num_descriptors, attrs_to_add);
Handle<LayoutDescriptor> new_layout_descriptor(map->GetLayoutDescriptor(),
isolate);
// Do not track transitions during bootstrapping.
@ -2113,13 +2124,14 @@ Handle<Map> UpdateDescriptorForValue(Isolate* isolate, Handle<Map> map,
InternalIndex descriptor,
PropertyConstness constness,
Handle<Object> value) {
if (CanHoldValue(map->instance_descriptors(), descriptor, constness,
*value)) {
if (CanHoldValue(map->instance_descriptors(kRelaxedLoad), descriptor,
constness, *value)) {
return map;
}
PropertyAttributes attributes =
map->instance_descriptors().GetDetails(descriptor).attributes();
PropertyAttributes attributes = map->instance_descriptors(kRelaxedLoad)
.GetDetails(descriptor)
.attributes();
Representation representation = value->OptimalRepresentation(isolate);
Handle<FieldType> type = value->OptimalType(isolate, representation);
@ -2166,9 +2178,9 @@ Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
Handle<Map> transition(maybe_transition, isolate);
InternalIndex descriptor = transition->LastAdded();
DCHECK_EQ(
attributes,
transition->instance_descriptors().GetDetails(descriptor).attributes());
DCHECK_EQ(attributes, transition->instance_descriptors(kRelaxedLoad)
.GetDetails(descriptor)
.attributes());
return UpdateDescriptorForValue(isolate, transition, descriptor, constness,
value);
@ -2286,7 +2298,8 @@ Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
.SearchTransition(*name, kAccessor, attributes);
if (!maybe_transition.is_null()) {
Handle<Map> transition(maybe_transition, isolate);
DescriptorArray descriptors = transition->instance_descriptors();
DescriptorArray descriptors =
transition->instance_descriptors(kRelaxedLoad);
InternalIndex descriptor = transition->LastAdded();
DCHECK(descriptors.GetKey(descriptor).Equals(*name));
@ -2309,7 +2322,7 @@ Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
}
Handle<AccessorPair> pair;
DescriptorArray old_descriptors = map->instance_descriptors();
DescriptorArray old_descriptors = map->instance_descriptors(kRelaxedLoad);
if (descriptor.is_found()) {
if (descriptor != map->LastAdded()) {
return Map::Normalize(isolate, map, mode, "AccessorsOverwritingNonLast");
@ -2370,7 +2383,8 @@ Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
Handle<Map> Map::CopyAddDescriptor(Isolate* isolate, Handle<Map> map,
Descriptor* descriptor,
TransitionFlag flag) {
Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> descriptors(map->instance_descriptors(kRelaxedLoad),
isolate);
// Share descriptors only if map owns descriptors and it not an initial map.
if (flag == INSERT_TRANSITION && map->owns_descriptors() &&
@ -2397,7 +2411,8 @@ Handle<Map> Map::CopyAddDescriptor(Isolate* isolate, Handle<Map> map,
Handle<Map> Map::CopyInsertDescriptor(Isolate* isolate, Handle<Map> map,
Descriptor* descriptor,
TransitionFlag flag) {
Handle<DescriptorArray> old_descriptors(map->instance_descriptors(), isolate);
Handle<DescriptorArray> old_descriptors(
map->instance_descriptors(kRelaxedLoad), isolate);
// We replace the key if it is already present.
InternalIndex index =
@ -2478,8 +2493,8 @@ bool Map::EquivalentToForTransition(const Map other) const {
// JSFunctions require more checks to ensure that sloppy function is
// not equivalent to strict function.
int nof = Min(NumberOfOwnDescriptors(), other.NumberOfOwnDescriptors());
return instance_descriptors().IsEqualUpTo(other.instance_descriptors(),
nof);
return instance_descriptors(kRelaxedLoad)
.IsEqualUpTo(other.instance_descriptors(kRelaxedLoad), nof);
}
return true;
}
@ -2490,7 +2505,7 @@ bool Map::EquivalentToForElementsKindTransition(const Map other) const {
// Ensure that we don't try to generate elements kind transitions from maps
// with fields that may be generalized in-place. This must already be handled
// during addition of a new field.
DescriptorArray descriptors = instance_descriptors();
DescriptorArray descriptors = instance_descriptors(kRelaxedLoad);
for (InternalIndex i : IterateOwnDescriptors()) {
PropertyDetails details = descriptors.GetDetails(i);
if (details.location() == kField) {
@ -2571,7 +2586,7 @@ void Map::CompleteInobjectSlackTracking(Isolate* isolate) {
void Map::SetInstanceDescriptors(Isolate* isolate, DescriptorArray descriptors,
int number_of_own_descriptors) {
set_synchronized_instance_descriptors(descriptors);
set_instance_descriptors(descriptors, kReleaseStore);
SetNumberOfOwnDescriptors(number_of_own_descriptors);
#ifndef V8_DISABLE_WRITE_BARRIERS
WriteBarrier::Marking(descriptors, number_of_own_descriptors);

View File

@ -594,14 +594,14 @@ class Map : public HeapObject {
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// [instance descriptors]: describes the object.
DECL_GETTER(synchronized_instance_descriptors, DescriptorArray)
DECL_GETTER(instance_descriptors, DescriptorArray)
DECL_RELAXED_ACCESSORS(instance_descriptors, DescriptorArray)
DECL_RELEASE_ACQUIRE_ACCESSORS(instance_descriptors, DescriptorArray)
V8_EXPORT_PRIVATE void SetInstanceDescriptors(Isolate* isolate,
DescriptorArray descriptors,
int number_of_own_descriptors);
// [layout descriptor]: describes the object layout.
DECL_ACCESSORS(layout_descriptor, LayoutDescriptor)
DECL_RELEASE_ACQUIRE_ACCESSORS(layout_descriptor, LayoutDescriptor)
// |layout descriptor| accessor which can be used from GC.
inline LayoutDescriptor layout_descriptor_gc_safe() const;
inline bool HasFastPointerLayout() const;
@ -976,10 +976,6 @@ class Map : public HeapObject {
MaybeHandle<Object> old_value, MaybeHandle<FieldType> new_field_type,
MaybeHandle<Object> new_value);
// Use the high-level instance_descriptors/SetInstanceDescriptors instead.
inline void set_synchronized_instance_descriptors(
DescriptorArray value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
static const int kFastPropertiesSoftLimit = 12;
static const int kMaxFastProperties = 128;

View File

@ -96,11 +96,21 @@
inline void set_##name(type value, \
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// TODO(solanes, neis): Unify naming for synchronized accessor uses.
#define DECL_SYNCHRONIZED_ACCESSORS(name, type) \
DECL_GETTER(synchronized_##name, type) \
inline void set_synchronized_##name( \
type value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
#define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \
inline type name(tag_type tag) const; \
inline type name(const Isolate* isolate, tag_type) const;
#define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \
inline void set_##name(type value, tag_type, \
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
#define DECL_RELAXED_ACCESSORS(name, type) \
DECL_ACCESSORS_LOAD_TAG(name, type, RelaxedLoadTag) \
DECL_ACCESSORS_STORE_TAG(name, type, RelaxedStoreTag)
#define DECL_RELEASE_ACQUIRE_ACCESSORS(name, type) \
DECL_ACCESSORS_LOAD_TAG(name, type, AcquireLoadTag) \
DECL_ACCESSORS_STORE_TAG(name, type, ReleaseStoreTag)
#define DECL_CAST(Type) \
V8_INLINE static Type cast(Object object); \
@ -162,14 +172,43 @@
#define ACCESSORS(holder, name, type, offset) \
ACCESSORS_CHECKED(holder, name, type, offset, true)
#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
set_condition) \
type holder::name(RelaxedLoadTag tag) const { \
const Isolate* isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate, tag); \
} \
type holder::name(const Isolate* isolate, RelaxedLoadTag) const { \
type value = TaggedField<type, offset>::load(isolate, *this); \
DCHECK(get_condition); \
return value; \
} \
void holder::set_##name(type value, RelaxedStoreTag, \
WriteBarrierMode mode) { \
DCHECK(set_condition); \
TaggedField<type, offset>::store(*this, value); \
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
}
#define RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, condition) \
RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, condition, condition)
#define RELAXED_ACCESSORS(holder, name, type, offset) \
RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, true)
#define SYNCHRONIZED_ACCESSORS_CHECKED2(holder, name, type, offset, \
get_condition, set_condition) \
DEF_GETTER(holder, name, type) { \
type holder::name(AcquireLoadTag tag) const { \
const Isolate* isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate, tag); \
} \
type holder::name(const Isolate* isolate, AcquireLoadTag) const { \
type value = TaggedField<type, offset>::Acquire_Load(isolate, *this); \
DCHECK(get_condition); \
return value; \
} \
void holder::set_##name(type value, WriteBarrierMode mode) { \
void holder::set_##name(type value, ReleaseStoreTag, \
WriteBarrierMode mode) { \
DCHECK(set_condition); \
TaggedField<type, offset>::Release_Store(*this, value); \
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \

View File

@ -795,8 +795,8 @@ void HeapObject::synchronized_set_map_word(MapWord map_word) {
MapField::Release_Store(*this, map_word);
}
bool HeapObject::synchronized_compare_and_swap_map_word(MapWord old_map_word,
MapWord new_map_word) {
bool HeapObject::release_compare_and_swap_map_word(MapWord old_map_word,
MapWord new_map_word) {
Tagged_t result =
MapField::Release_CompareAndSwap(*this, old_map_word, new_map_word);
return result == static_cast<Tagged_t>(old_map_word.ptr());

View File

@ -1305,7 +1305,7 @@ bool FunctionTemplateInfo::IsTemplateFor(Map map) {
Object type;
if (cons_obj.IsJSFunction()) {
JSFunction fun = JSFunction::cast(cons_obj);
type = fun.shared().function_data();
type = fun.shared().function_data(kAcquireLoad);
} else if (cons_obj.IsFunctionTemplateInfo()) {
type = FunctionTemplateInfo::cast(cons_obj);
} else {
@ -5118,9 +5118,11 @@ bool JSArray::MayHaveReadOnlyLength(Map js_array_map) {
// dictionary properties. Since it's not configurable, it's guaranteed to be
// the first in the descriptor array.
InternalIndex first(0);
DCHECK(js_array_map.instance_descriptors().GetKey(first) ==
DCHECK(js_array_map.instance_descriptors(kRelaxedLoad).GetKey(first) ==
js_array_map.GetReadOnlyRoots().length_string());
return js_array_map.instance_descriptors().GetDetails(first).IsReadOnly();
return js_array_map.instance_descriptors(kRelaxedLoad)
.GetDetails(first)
.IsReadOnly();
}
bool JSArray::HasReadOnlyLength(Handle<JSArray> array) {

View File

@ -57,7 +57,7 @@ bool ToPropertyDescriptorFastPath(Isolate* isolate, Handle<JSReceiver> obj,
// TODO(jkummerow): support dictionary properties?
if (map->is_dictionary_map()) return false;
Handle<DescriptorArray> descs =
Handle<DescriptorArray>(map->instance_descriptors(), isolate);
Handle<DescriptorArray>(map->instance_descriptors(kRelaxedLoad), isolate);
for (InternalIndex i : map->IterateOwnDescriptors()) {
PropertyDetails details = descs->GetDetails(i);
Handle<Object> value;

View File

@ -415,7 +415,7 @@ void SharedFunctionInfo::set_feedback_metadata(FeedbackMetadata value,
}
bool SharedFunctionInfo::is_compiled() const {
Object data = function_data();
Object data = function_data(kAcquireLoad);
return data != Smi::FromEnum(Builtins::kCompileLazy) &&
!data.IsUncompiledData();
}
@ -450,56 +450,60 @@ bool SharedFunctionInfo::has_simple_parameters() {
}
bool SharedFunctionInfo::IsApiFunction() const {
return function_data().IsFunctionTemplateInfo();
return function_data(kAcquireLoad).IsFunctionTemplateInfo();
}
FunctionTemplateInfo SharedFunctionInfo::get_api_func_data() const {
DCHECK(IsApiFunction());
return FunctionTemplateInfo::cast(function_data());
return FunctionTemplateInfo::cast(function_data(kAcquireLoad));
}
bool SharedFunctionInfo::HasBytecodeArray() const {
return function_data().IsBytecodeArray() ||
function_data().IsInterpreterData();
Object data = function_data(kAcquireLoad);
return data.IsBytecodeArray() || data.IsInterpreterData();
}
BytecodeArray SharedFunctionInfo::GetBytecodeArray() const {
DCHECK(HasBytecodeArray());
if (HasDebugInfo() && GetDebugInfo().HasInstrumentedBytecodeArray()) {
return GetDebugInfo().OriginalBytecodeArray();
} else if (function_data().IsBytecodeArray()) {
return BytecodeArray::cast(function_data());
}
Object data = function_data(kAcquireLoad);
if (data.IsBytecodeArray()) {
return BytecodeArray::cast(data);
} else {
DCHECK(function_data().IsInterpreterData());
return InterpreterData::cast(function_data()).bytecode_array();
DCHECK(data.IsInterpreterData());
return InterpreterData::cast(data).bytecode_array();
}
}
BytecodeArray SharedFunctionInfo::GetDebugBytecodeArray() const {
DCHECK(HasBytecodeArray());
DCHECK(HasDebugInfo() && GetDebugInfo().HasInstrumentedBytecodeArray());
if (function_data().IsBytecodeArray()) {
return BytecodeArray::cast(function_data());
Object data = function_data(kAcquireLoad);
if (data.IsBytecodeArray()) {
return BytecodeArray::cast(data);
} else {
DCHECK(function_data().IsInterpreterData());
return InterpreterData::cast(function_data()).bytecode_array();
DCHECK(data.IsInterpreterData());
return InterpreterData::cast(data).bytecode_array();
}
}
void SharedFunctionInfo::SetDebugBytecodeArray(BytecodeArray bytecode) {
DCHECK(HasBytecodeArray());
if (function_data().IsBytecodeArray()) {
set_function_data(bytecode);
Object data = function_data(kAcquireLoad);
if (data.IsBytecodeArray()) {
set_function_data(bytecode, kReleaseStore);
} else {
DCHECK(function_data().IsInterpreterData());
DCHECK(data.IsInterpreterData());
interpreter_data().set_bytecode_array(bytecode);
}
}
void SharedFunctionInfo::set_bytecode_array(BytecodeArray bytecode) {
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy) ||
DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtins::kCompileLazy) ||
HasUncompiledData());
set_function_data(bytecode);
set_function_data(bytecode, kReleaseStore);
}
bool SharedFunctionInfo::ShouldFlushBytecode(BytecodeFlushMode mode) {
@ -513,7 +517,7 @@ bool SharedFunctionInfo::ShouldFlushBytecode(BytecodeFlushMode mode) {
// Get a snapshot of the function data field, and if it is a bytecode array,
// check if it is old. Note, this is done this way since this function can be
// called by the concurrent marker.
Object data = function_data();
Object data = function_data(kAcquireLoad);
if (!data.IsBytecodeArray()) return false;
if (mode == BytecodeFlushMode::kStressFlushBytecode) return true;
@ -529,86 +533,87 @@ Code SharedFunctionInfo::InterpreterTrampoline() const {
}
bool SharedFunctionInfo::HasInterpreterData() const {
return function_data().IsInterpreterData();
return function_data(kAcquireLoad).IsInterpreterData();
}
InterpreterData SharedFunctionInfo::interpreter_data() const {
DCHECK(HasInterpreterData());
return InterpreterData::cast(function_data());
return InterpreterData::cast(function_data(kAcquireLoad));
}
void SharedFunctionInfo::set_interpreter_data(
InterpreterData interpreter_data) {
DCHECK(FLAG_interpreted_frames_native_stack);
set_function_data(interpreter_data);
set_function_data(interpreter_data, kReleaseStore);
}
bool SharedFunctionInfo::HasAsmWasmData() const {
return function_data().IsAsmWasmData();
return function_data(kAcquireLoad).IsAsmWasmData();
}
AsmWasmData SharedFunctionInfo::asm_wasm_data() const {
DCHECK(HasAsmWasmData());
return AsmWasmData::cast(function_data());
return AsmWasmData::cast(function_data(kAcquireLoad));
}
void SharedFunctionInfo::set_asm_wasm_data(AsmWasmData data) {
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy) ||
DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtins::kCompileLazy) ||
HasUncompiledData() || HasAsmWasmData());
set_function_data(data);
set_function_data(data, kReleaseStore);
}
bool SharedFunctionInfo::HasBuiltinId() const {
return function_data().IsSmi();
return function_data(kAcquireLoad).IsSmi();
}
int SharedFunctionInfo::builtin_id() const {
DCHECK(HasBuiltinId());
int id = Smi::ToInt(function_data());
int id = Smi::ToInt(function_data(kAcquireLoad));
DCHECK(Builtins::IsBuiltinId(id));
return id;
}
void SharedFunctionInfo::set_builtin_id(int builtin_id) {
DCHECK(Builtins::IsBuiltinId(builtin_id));
set_function_data(Smi::FromInt(builtin_id), SKIP_WRITE_BARRIER);
set_function_data(Smi::FromInt(builtin_id), kReleaseStore,
SKIP_WRITE_BARRIER);
}
bool SharedFunctionInfo::HasUncompiledData() const {
return function_data().IsUncompiledData();
return function_data(kAcquireLoad).IsUncompiledData();
}
UncompiledData SharedFunctionInfo::uncompiled_data() const {
DCHECK(HasUncompiledData());
return UncompiledData::cast(function_data());
return UncompiledData::cast(function_data(kAcquireLoad));
}
void SharedFunctionInfo::set_uncompiled_data(UncompiledData uncompiled_data) {
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy) ||
DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtins::kCompileLazy) ||
HasUncompiledData());
DCHECK(uncompiled_data.IsUncompiledData());
set_function_data(uncompiled_data);
set_function_data(uncompiled_data, kReleaseStore);
}
bool SharedFunctionInfo::HasUncompiledDataWithPreparseData() const {
return function_data().IsUncompiledDataWithPreparseData();
return function_data(kAcquireLoad).IsUncompiledDataWithPreparseData();
}
UncompiledDataWithPreparseData
SharedFunctionInfo::uncompiled_data_with_preparse_data() const {
DCHECK(HasUncompiledDataWithPreparseData());
return UncompiledDataWithPreparseData::cast(function_data());
return UncompiledDataWithPreparseData::cast(function_data(kAcquireLoad));
}
void SharedFunctionInfo::set_uncompiled_data_with_preparse_data(
UncompiledDataWithPreparseData uncompiled_data_with_preparse_data) {
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy));
DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtins::kCompileLazy));
DCHECK(uncompiled_data_with_preparse_data.IsUncompiledDataWithPreparseData());
set_function_data(uncompiled_data_with_preparse_data);
set_function_data(uncompiled_data_with_preparse_data, kReleaseStore);
}
bool SharedFunctionInfo::HasUncompiledDataWithoutPreparseData() const {
return function_data().IsUncompiledDataWithoutPreparseData();
return function_data(kAcquireLoad).IsUncompiledDataWithoutPreparseData();
}
void SharedFunctionInfo::ClearPreparseData() {
@ -670,15 +675,15 @@ void UncompiledDataWithPreparseData::Init(LocalIsolate* isolate,
}
bool SharedFunctionInfo::HasWasmExportedFunctionData() const {
return function_data().IsWasmExportedFunctionData();
return function_data(kAcquireLoad).IsWasmExportedFunctionData();
}
bool SharedFunctionInfo::HasWasmJSFunctionData() const {
return function_data().IsWasmJSFunctionData();
return function_data(kAcquireLoad).IsWasmJSFunctionData();
}
bool SharedFunctionInfo::HasWasmCapiFunctionData() const {
return function_data().IsWasmCapiFunctionData();
return function_data(kAcquireLoad).IsWasmCapiFunctionData();
}
HeapObject SharedFunctionInfo::script() const {

View File

@ -72,7 +72,7 @@ Code SharedFunctionInfo::GetCode() const {
// ======
Isolate* isolate = GetIsolate();
Object data = function_data();
Object data = function_data(kAcquireLoad);
if (data.IsSmi()) {
// Holding a Smi means we are a builtin.
DCHECK(HasBuiltinId());
@ -113,17 +113,17 @@ Code SharedFunctionInfo::GetCode() const {
WasmExportedFunctionData SharedFunctionInfo::wasm_exported_function_data()
const {
DCHECK(HasWasmExportedFunctionData());
return WasmExportedFunctionData::cast(function_data());
return WasmExportedFunctionData::cast(function_data(kAcquireLoad));
}
WasmJSFunctionData SharedFunctionInfo::wasm_js_function_data() const {
DCHECK(HasWasmJSFunctionData());
return WasmJSFunctionData::cast(function_data());
return WasmJSFunctionData::cast(function_data(kAcquireLoad));
}
WasmCapiFunctionData SharedFunctionInfo::wasm_capi_function_data() const {
DCHECK(HasWasmCapiFunctionData());
return WasmCapiFunctionData::cast(function_data());
return WasmCapiFunctionData::cast(function_data(kAcquireLoad));
}
SharedFunctionInfo::ScriptIterator::ScriptIterator(Isolate* isolate,
@ -310,7 +310,7 @@ void SharedFunctionInfo::DiscardCompiled(
Handle<UncompiledData> data =
isolate->factory()->NewUncompiledDataWithoutPreparseData(
inferred_name_val, start_position, end_position);
shared_info->set_function_data(*data);
shared_info->set_function_data(*data, kReleaseStore);
}
}

View File

@ -304,7 +304,7 @@ class SharedFunctionInfo : public HeapObject {
// - a UncompiledDataWithPreparseData for lazy compilation
// [HasUncompiledDataWithPreparseData()]
// - a WasmExportedFunctionData for Wasm [HasWasmExportedFunctionData()]
DECL_ACCESSORS(function_data, Object)
DECL_RELEASE_ACQUIRE_ACCESSORS(function_data, Object)
inline bool IsApiFunction() const;
inline bool is_class_constructor() const;

View File

@ -36,7 +36,7 @@ BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache, DoNotCacheBit::kShift)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
AcceptAnyReceiverBit::kShift)
SYNCHRONIZED_ACCESSORS(FunctionTemplateInfo, synchronized_call_code, HeapObject,
SYNCHRONIZED_ACCESSORS(FunctionTemplateInfo, call_code, HeapObject,
kCallCodeOffset)
// static

View File

@ -85,7 +85,7 @@ class FunctionTemplateInfo
DECL_RARE_ACCESSORS(c_signature, CSignature, Object)
#undef DECL_RARE_ACCESSORS
DECL_SYNCHRONIZED_ACCESSORS(call_code, HeapObject)
DECL_RELEASE_ACQUIRE_ACCESSORS(call_code, HeapObject)
// Begin flag bits ---------------------
DECL_BOOLEAN_ACCESSORS(undetectable)

View File

@ -100,7 +100,7 @@ HeapObjectSlot TransitionArray::GetTargetSlot(int transition_number) {
PropertyDetails TransitionsAccessor::GetTargetDetails(Name name, Map target) {
DCHECK(!IsSpecialTransition(name.GetReadOnlyRoots(), name));
InternalIndex descriptor = target.LastAdded();
DescriptorArray descriptors = target.instance_descriptors();
DescriptorArray descriptors = target.instance_descriptors(kRelaxedLoad);
// Transitions are allowed only for the last added property.
DCHECK(descriptors.GetKey(descriptor).Equals(name));
return descriptors.GetDetails(descriptor);
@ -113,7 +113,7 @@ PropertyDetails TransitionsAccessor::GetSimpleTargetDetails(Map transition) {
// static
Name TransitionsAccessor::GetSimpleTransitionKey(Map transition) {
InternalIndex descriptor = transition.LastAdded();
return transition.instance_descriptors().GetKey(descriptor);
return transition.instance_descriptors(kRelaxedLoad).GetKey(descriptor);
}
// static

View File

@ -285,7 +285,7 @@ bool TransitionsAccessor::IsMatchingMap(Map target, Name name,
PropertyKind kind,
PropertyAttributes attributes) {
InternalIndex descriptor = target.LastAdded();
DescriptorArray descriptors = target.instance_descriptors();
DescriptorArray descriptors = target.instance_descriptors(kRelaxedLoad);
Name key = descriptors.GetKey(descriptor);
if (key != name) return false;
return descriptors.GetDetails(descriptor)
@ -530,7 +530,8 @@ void TransitionsAccessor::CheckNewTransitionsAreConsistent(
TransitionArray new_transitions = TransitionArray::cast(transitions);
for (int i = 0; i < old_transitions.number_of_transitions(); i++) {
Map target = old_transitions.GetTarget(i);
if (target.instance_descriptors() == map_.instance_descriptors()) {
if (target.instance_descriptors(kRelaxedLoad) ==
map_.instance_descriptors(kRelaxedLoad)) {
Name key = old_transitions.GetKey(i);
int new_target_index;
if (IsSpecialTransition(ReadOnlyRoots(isolate_), key)) {

View File

@ -588,9 +588,11 @@ Maybe<bool> ValueSerializer::WriteJSObject(Handle<JSObject> object) {
uint32_t properties_written = 0;
bool map_changed = false;
for (InternalIndex i : map->IterateOwnDescriptors()) {
Handle<Name> key(map->instance_descriptors().GetKey(i), isolate_);
Handle<Name> key(map->instance_descriptors(kRelaxedLoad).GetKey(i),
isolate_);
if (!key->IsString()) continue;
PropertyDetails details = map->instance_descriptors().GetDetails(i);
PropertyDetails details =
map->instance_descriptors(kRelaxedLoad).GetDetails(i);
if (details.IsDontEnum()) continue;
Handle<Object> value;
@ -1975,7 +1977,8 @@ static void CommitProperties(Handle<JSObject> object, Handle<Map> map,
DCHECK(!object->map().is_dictionary_map());
DisallowHeapAllocation no_gc;
DescriptorArray descriptors = object->map().instance_descriptors();
DescriptorArray descriptors =
object->map().instance_descriptors(kRelaxedLoad);
for (InternalIndex i : InternalIndex::Range(properties.size())) {
// Initializing store.
object->WriteToField(i, descriptors.GetDetails(i),
@ -1997,7 +2000,8 @@ Maybe<uint32_t> ValueDeserializer::ReadJSObjectProperties(
bool transitioning = true;
Handle<Map> map(object->map(), isolate_);
DCHECK(!map->is_dictionary_map());
DCHECK_EQ(0, map->instance_descriptors().number_of_descriptors());
DCHECK_EQ(0,
map->instance_descriptors(kRelaxedLoad).number_of_descriptors());
std::vector<Handle<Object>> properties;
properties.reserve(8);
@ -2048,11 +2052,11 @@ Maybe<uint32_t> ValueDeserializer::ReadJSObjectProperties(
if (transitioning) {
InternalIndex descriptor(properties.size());
PropertyDetails details =
target->instance_descriptors().GetDetails(descriptor);
target->instance_descriptors(kRelaxedLoad).GetDetails(descriptor);
Representation expected_representation = details.representation();
if (value->FitsRepresentation(expected_representation)) {
if (expected_representation.IsHeapObject() &&
!target->instance_descriptors()
!target->instance_descriptors(kRelaxedLoad)
.GetFieldType(descriptor)
.NowContains(value)) {
Handle<FieldType> value_type =
@ -2061,7 +2065,7 @@ Maybe<uint32_t> ValueDeserializer::ReadJSObjectProperties(
details.constness(), expected_representation,
value_type);
}
DCHECK(target->instance_descriptors()
DCHECK(target->instance_descriptors(kRelaxedLoad)
.GetFieldType(descriptor)
.NowContains(value));
properties.push_back(value);

View File

@ -1066,14 +1066,15 @@ void V8HeapExplorer::ExtractMapReferences(HeapEntry* entry, Map map) {
Map::kTransitionsOrPrototypeInfoOffset);
}
}
DescriptorArray descriptors = map.instance_descriptors();
DescriptorArray descriptors = map.instance_descriptors(kRelaxedLoad);
TagObject(descriptors, "(map descriptors)");
SetInternalReference(entry, "descriptors", descriptors,
Map::kInstanceDescriptorsOffset);
SetInternalReference(entry, "prototype", map.prototype(),
Map::kPrototypeOffset);
if (FLAG_unbox_double_fields) {
SetInternalReference(entry, "layout_descriptor", map.layout_descriptor(),
SetInternalReference(entry, "layout_descriptor",
map.layout_descriptor(kAcquireLoad),
Map::kLayoutDescriptorOffset);
}
if (map.IsContextMap()) {
@ -1123,7 +1124,8 @@ void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
SetInternalReference(entry, "script_or_debug_info",
shared.script_or_debug_info(),
SharedFunctionInfo::kScriptOrDebugInfoOffset);
SetInternalReference(entry, "function_data", shared.function_data(),
SetInternalReference(entry, "function_data",
shared.function_data(kAcquireLoad),
SharedFunctionInfo::kFunctionDataOffset);
SetInternalReference(
entry, "raw_outer_scope_info_or_feedback_metadata",
@ -1324,7 +1326,7 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject js_obj,
HeapEntry* entry) {
Isolate* isolate = js_obj.GetIsolate();
if (js_obj.HasFastProperties()) {
DescriptorArray descs = js_obj.map().instance_descriptors();
DescriptorArray descs = js_obj.map().instance_descriptors(kRelaxedLoad);
for (InternalIndex i : js_obj.map().IterateOwnDescriptors()) {
PropertyDetails details = descs.GetDetails(i);
switch (details.location()) {

View File

@ -173,9 +173,10 @@ bool RegExpUtils::IsUnmodifiedRegExp(Isolate* isolate, Handle<Object> obj) {
// with the init order in the bootstrapper).
InternalIndex kExecIndex(JSRegExp::kExecFunctionDescriptorIndex);
DCHECK_EQ(*(isolate->factory()->exec_string()),
proto_map.instance_descriptors().GetKey(kExecIndex));
if (proto_map.instance_descriptors().GetDetails(kExecIndex).constness() !=
PropertyConstness::kConst) {
proto_map.instance_descriptors(kRelaxedLoad).GetKey(kExecIndex));
if (proto_map.instance_descriptors(kRelaxedLoad)
.GetDetails(kExecIndex)
.constness() != PropertyConstness::kConst) {
return false;
}

View File

@ -138,8 +138,9 @@ inline void SetHomeObject(Isolate* isolate, JSFunction method,
if (method.shared().needs_home_object()) {
const InternalIndex kPropertyIndex(
JSFunction::kMaybeHomeObjectDescriptorIndex);
CHECK_EQ(method.map().instance_descriptors().GetKey(kPropertyIndex),
ReadOnlyRoots(isolate).home_object_symbol());
CHECK_EQ(
method.map().instance_descriptors(kRelaxedLoad).GetKey(kPropertyIndex),
ReadOnlyRoots(isolate).home_object_symbol());
FieldIndex field_index =
FieldIndex::ForDescriptor(method.map(), kPropertyIndex);

View File

@ -110,7 +110,8 @@ MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk(
if (!copy->IsJSArray(isolate)) {
if (copy->HasFastProperties(isolate)) {
Handle<DescriptorArray> descriptors(
copy->map(isolate).instance_descriptors(isolate), isolate);
copy->map(isolate).instance_descriptors(isolate, kRelaxedLoad),
isolate);
for (InternalIndex i : copy->map(isolate).IterateOwnDescriptors()) {
PropertyDetails details = descriptors->GetDetails(i);
DCHECK_EQ(kField, details.location());

View File

@ -108,8 +108,8 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
int nof = receiver_map->NumberOfOwnDescriptors();
if (nof == 0) return false;
InternalIndex descriptor(nof - 1);
Handle<DescriptorArray> descriptors(receiver_map->instance_descriptors(),
isolate);
Handle<DescriptorArray> descriptors(
receiver_map->instance_descriptors(kRelaxedLoad), isolate);
if (descriptors->GetKey(descriptor) != *key) return false;
// (3) The property to be deleted must be deletable.
PropertyDetails details = descriptors->GetDetails(descriptor);

View File

@ -298,7 +298,7 @@ void StringStream::PrintName(Object name) {
void StringStream::PrintUsingMap(JSObject js_object) {
Map map = js_object.map();
DescriptorArray descs = map.instance_descriptors();
DescriptorArray descs = map.instance_descriptors(kRelaxedLoad);
for (InternalIndex i : map.IterateOwnDescriptors()) {
PropertyDetails details = descs.GetDetails(i);
if (details.location() == kField) {

View File

@ -1511,7 +1511,8 @@ auto Func::call(const Val args[], Val results[]) const -> own<Trap> {
auto store = func->store();
auto isolate = store->i_isolate();
i::HandleScope handle_scope(isolate);
i::Object raw_function_data = func->v8_object()->shared().function_data();
i::Object raw_function_data =
func->v8_object()->shared().function_data(v8::kAcquireLoad);
// WasmCapiFunctions can be called directly.
if (raw_function_data.IsWasmCapiFunctionData()) {
@ -1544,7 +1545,7 @@ auto Func::call(const Val args[], Val results[]) const -> own<Trap> {
if (object_ref->IsTuple2()) {
i::JSFunction jsfunc =
i::JSFunction::cast(i::Tuple2::cast(*object_ref).value2());
i::Object data = jsfunc.shared().function_data();
i::Object data = jsfunc.shared().function_data(v8::kAcquireLoad);
if (data.IsWasmCapiFunctionData()) {
return CallWasmCapiFunction(i::WasmCapiFunctionData::cast(data), args,
results);

View File

@ -148,8 +148,8 @@ TEST(StressJS) {
// Patch the map to have an accessor for "get".
Handle<Map> map(function->initial_map(), isolate);
Handle<DescriptorArray> instance_descriptors(map->instance_descriptors(),
isolate);
Handle<DescriptorArray> instance_descriptors(
map->instance_descriptors(kRelaxedLoad), isolate);
CHECK_EQ(0, instance_descriptors->number_of_descriptors());
PropertyAttributes attrs = NONE;

View File

@ -4477,7 +4477,7 @@ TEST(NextCodeLinkInCodeDataContainerIsCleared) {
OptimizeDummyFunction(CcTest::isolate(), "mortal2");
CHECK_EQ(mortal2->code().next_code_link(), mortal1->code());
code_data_container = scope.CloseAndEscape(Handle<CodeDataContainer>(
mortal2->code().code_data_container(), isolate));
mortal2->code().code_data_container(kAcquireLoad), isolate));
CompileRun("mortal1 = null; mortal2 = null;");
}
CcTest::CollectAllAvailableGarbage();

View File

@ -152,7 +152,8 @@ class InterpreterTester {
}
if (!bytecode_.is_null()) {
function->shared().set_function_data(*bytecode_.ToHandleChecked());
function->shared().set_function_data(*bytecode_.ToHandleChecked(),
kReleaseStore);
is_compiled_scope = function->shared().is_compiled_scope(isolate_);
}
if (HasFeedbackMetadata()) {

View File

@ -2655,9 +2655,13 @@ THREADED_TEST(AccessorIsPreservedOnAttributeChange) {
LocalContext env;
v8::Local<v8::Value> res = CompileRun("var a = []; a;");
i::Handle<i::JSReceiver> a(v8::Utils::OpenHandle(v8::Object::Cast(*res)));
CHECK_EQ(1, a->map().instance_descriptors().number_of_descriptors());
CHECK_EQ(
1,
a->map().instance_descriptors(v8::kRelaxedLoad).number_of_descriptors());
CompileRun("Object.defineProperty(a, 'length', { writable: false });");
CHECK_EQ(0, a->map().instance_descriptors().number_of_descriptors());
CHECK_EQ(
0,
a->map().instance_descriptors(v8::kRelaxedLoad).number_of_descriptors());
// But we should still have an AccessorInfo.
i::Handle<i::String> name = CcTest::i_isolate()->factory()->length_string();
i::LookupIterator it(CcTest::i_isolate(), a, name,

View File

@ -48,7 +48,7 @@ class ConcurrentSearchThread final : public v8::base::Thread {
Handle<Map> map(handle->map(), &local_heap);
Handle<DescriptorArray> descriptors(
map->synchronized_instance_descriptors(), &local_heap);
map->instance_descriptors(kAcquireLoad), &local_heap);
bool is_background_thread = true;
InternalIndex const number =
descriptors->Search(*name_, *map, is_background_thread);

View File

@ -274,7 +274,7 @@ class Expectations {
CHECK_EQ(expected_nof, map.NumberOfOwnDescriptors());
CHECK(!map.is_dictionary_map());
DescriptorArray descriptors = map.instance_descriptors();
DescriptorArray descriptors = map.instance_descriptors(kRelaxedLoad);
CHECK(expected_nof <= number_of_properties_);
for (InternalIndex i : InternalIndex::Range(expected_nof)) {
if (!Check(descriptors, i)) {
@ -443,8 +443,8 @@ class Expectations {
Handle<Object> getter(pair->getter(), isolate);
Handle<Object> setter(pair->setter(), isolate);
InternalIndex descriptor =
map->instance_descriptors().SearchWithCache(isolate, *name, *map);
InternalIndex descriptor = map->instance_descriptors(kRelaxedLoad)
.SearchWithCache(isolate, *name, *map);
map = Map::TransitionToAccessorProperty(isolate, map, name, descriptor,
getter, setter, attributes);
CHECK(!map->is_deprecated());
@ -551,8 +551,10 @@ TEST(ReconfigureAccessorToNonExistingDataFieldHeavy) {
CHECK_EQ(1, obj->map().NumberOfOwnDescriptors());
InternalIndex first(0);
CHECK(
obj->map().instance_descriptors().GetStrongValue(first).IsAccessorPair());
CHECK(obj->map()
.instance_descriptors(kRelaxedLoad)
.GetStrongValue(first)
.IsAccessorPair());
Handle<Object> value(Smi::FromInt(42), isolate);
JSObject::SetOwnPropertyIgnoreAttributes(obj, foo_str, value, NONE).Check();
@ -2863,10 +2865,13 @@ void TestStoreToConstantField(const char* store_func_source,
CHECK(!map->is_deprecated());
CHECK_EQ(1, map->NumberOfOwnDescriptors());
InternalIndex first(0);
CHECK(map->instance_descriptors().GetDetails(first).representation().Equals(
expected_rep));
CHECK_EQ(PropertyConstness::kConst,
map->instance_descriptors().GetDetails(first).constness());
CHECK(map->instance_descriptors(kRelaxedLoad)
.GetDetails(first)
.representation()
.Equals(expected_rep));
CHECK_EQ(
PropertyConstness::kConst,
map->instance_descriptors(kRelaxedLoad).GetDetails(first).constness());
// Store value2 to obj2 and check that it got same map and property details
// did not change.
@ -2878,10 +2883,13 @@ void TestStoreToConstantField(const char* store_func_source,
CHECK(!map->is_deprecated());
CHECK_EQ(1, map->NumberOfOwnDescriptors());
CHECK(map->instance_descriptors().GetDetails(first).representation().Equals(
expected_rep));
CHECK_EQ(PropertyConstness::kConst,
map->instance_descriptors().GetDetails(first).constness());
CHECK(map->instance_descriptors(kRelaxedLoad)
.GetDetails(first)
.representation()
.Equals(expected_rep));
CHECK_EQ(
PropertyConstness::kConst,
map->instance_descriptors(kRelaxedLoad).GetDetails(first).constness());
// Store value2 to obj1 and check that property became mutable.
Call(isolate, store_func, obj1, value2).Check();
@ -2891,10 +2899,13 @@ void TestStoreToConstantField(const char* store_func_source,
CHECK(!map->is_deprecated());
CHECK_EQ(1, map->NumberOfOwnDescriptors());
CHECK(map->instance_descriptors().GetDetails(first).representation().Equals(
expected_rep));
CHECK_EQ(expected_constness,
map->instance_descriptors().GetDetails(first).constness());
CHECK(map->instance_descriptors(kRelaxedLoad)
.GetDetails(first)
.representation()
.Equals(expected_rep));
CHECK_EQ(
expected_constness,
map->instance_descriptors(kRelaxedLoad).GetDetails(first).constness());
}
void TestStoreToConstantField_PlusMinusZero(const char* store_func_source,

View File

@ -115,10 +115,14 @@ TEST(EnumCache) {
*env->Global()->Get(env.local(), v8_str("cc")).ToLocalChecked()));
// Check the transition tree.
CHECK_EQ(a->map().instance_descriptors(), b->map().instance_descriptors());
CHECK_EQ(b->map().instance_descriptors(), c->map().instance_descriptors());
CHECK_NE(c->map().instance_descriptors(), cc->map().instance_descriptors());
CHECK_NE(b->map().instance_descriptors(), cc->map().instance_descriptors());
CHECK_EQ(a->map().instance_descriptors(kRelaxedLoad),
b->map().instance_descriptors(kRelaxedLoad));
CHECK_EQ(b->map().instance_descriptors(kRelaxedLoad),
c->map().instance_descriptors(kRelaxedLoad));
CHECK_NE(c->map().instance_descriptors(kRelaxedLoad),
cc->map().instance_descriptors(kRelaxedLoad));
CHECK_NE(b->map().instance_descriptors(kRelaxedLoad),
cc->map().instance_descriptors(kRelaxedLoad));
// Check that the EnumLength is unset.
CHECK_EQ(a->map().EnumLength(), kInvalidEnumCacheSentinel);
@ -127,13 +131,13 @@ TEST(EnumCache) {
CHECK_EQ(cc->map().EnumLength(), kInvalidEnumCacheSentinel);
// Check that the EnumCache is empty.
CHECK_EQ(a->map().instance_descriptors().enum_cache(),
CHECK_EQ(a->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
CHECK_EQ(b->map().instance_descriptors().enum_cache(),
CHECK_EQ(b->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
CHECK_EQ(c->map().instance_descriptors().enum_cache(),
CHECK_EQ(c->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
CHECK_EQ(cc->map().instance_descriptors().enum_cache(),
CHECK_EQ(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
// The EnumCache is shared on the DescriptorArray, creating it on {cc} has no
@ -145,14 +149,15 @@ TEST(EnumCache) {
CHECK_EQ(c->map().EnumLength(), kInvalidEnumCacheSentinel);
CHECK_EQ(cc->map().EnumLength(), 3);
CHECK_EQ(a->map().instance_descriptors().enum_cache(),
CHECK_EQ(a->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
CHECK_EQ(b->map().instance_descriptors().enum_cache(),
CHECK_EQ(b->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
CHECK_EQ(c->map().instance_descriptors().enum_cache(),
CHECK_EQ(c->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
EnumCache enum_cache = cc->map().instance_descriptors().enum_cache();
EnumCache enum_cache =
cc->map().instance_descriptors(kRelaxedLoad).enum_cache();
CHECK_NE(enum_cache, *factory->empty_enum_cache());
CHECK_EQ(enum_cache.keys().length(), 3);
CHECK_EQ(enum_cache.indices().length(), 3);
@ -169,14 +174,19 @@ TEST(EnumCache) {
// The enum cache is shared on the descriptor array of maps {a}, {b} and
// {c} only.
EnumCache enum_cache = a->map().instance_descriptors().enum_cache();
EnumCache enum_cache =
a->map().instance_descriptors(kRelaxedLoad).enum_cache();
CHECK_NE(enum_cache, *factory->empty_enum_cache());
CHECK_NE(cc->map().instance_descriptors().enum_cache(),
CHECK_NE(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
CHECK_NE(cc->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(a->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(b->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(c->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_NE(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_EQ(a->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_EQ(b->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_EQ(c->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_EQ(enum_cache.keys().length(), 1);
CHECK_EQ(enum_cache.indices().length(), 1);
@ -185,7 +195,8 @@ TEST(EnumCache) {
// Creating the EnumCache for {c} will create a new EnumCache on the shared
// DescriptorArray.
Handle<EnumCache> previous_enum_cache(
a->map().instance_descriptors().enum_cache(), a->GetIsolate());
a->map().instance_descriptors(kRelaxedLoad).enum_cache(),
a->GetIsolate());
Handle<FixedArray> previous_keys(previous_enum_cache->keys(),
a->GetIsolate());
Handle<FixedArray> previous_indices(previous_enum_cache->indices(),
@ -197,7 +208,8 @@ TEST(EnumCache) {
CHECK_EQ(c->map().EnumLength(), 3);
CHECK_EQ(cc->map().EnumLength(), 3);
EnumCache enum_cache = c->map().instance_descriptors().enum_cache();
EnumCache enum_cache =
c->map().instance_descriptors(kRelaxedLoad).enum_cache();
CHECK_NE(enum_cache, *factory->empty_enum_cache());
// The keys and indices caches are updated.
CHECK_EQ(enum_cache, *previous_enum_cache);
@ -210,20 +222,25 @@ TEST(EnumCache) {
// The enum cache is shared on the descriptor array of maps {a}, {b} and
// {c} only.
CHECK_NE(cc->map().instance_descriptors().enum_cache(),
CHECK_NE(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
CHECK_NE(cc->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_NE(cc->map().instance_descriptors().enum_cache(),
CHECK_NE(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_NE(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*previous_enum_cache);
CHECK_EQ(a->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(b->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(c->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(a->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_EQ(b->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_EQ(c->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
}
// {b} can reuse the existing EnumCache, hence we only need to set the correct
// EnumLength on the map without modifying the cache itself.
previous_enum_cache =
handle(a->map().instance_descriptors().enum_cache(), a->GetIsolate());
handle(a->map().instance_descriptors(kRelaxedLoad).enum_cache(),
a->GetIsolate());
previous_keys = handle(previous_enum_cache->keys(), a->GetIsolate());
previous_indices = handle(previous_enum_cache->indices(), a->GetIsolate());
CompileRun("var s = 0; for (let key in b) { s += b[key] };");
@ -233,7 +250,8 @@ TEST(EnumCache) {
CHECK_EQ(c->map().EnumLength(), 3);
CHECK_EQ(cc->map().EnumLength(), 3);
EnumCache enum_cache = c->map().instance_descriptors().enum_cache();
EnumCache enum_cache =
c->map().instance_descriptors(kRelaxedLoad).enum_cache();
CHECK_NE(enum_cache, *factory->empty_enum_cache());
// The keys and indices caches are not updated.
CHECK_EQ(enum_cache, *previous_enum_cache);
@ -244,14 +262,18 @@ TEST(EnumCache) {
// The enum cache is shared on the descriptor array of maps {a}, {b} and
// {c} only.
CHECK_NE(cc->map().instance_descriptors().enum_cache(),
CHECK_NE(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*factory->empty_enum_cache());
CHECK_NE(cc->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_NE(cc->map().instance_descriptors().enum_cache(),
CHECK_NE(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_NE(cc->map().instance_descriptors(kRelaxedLoad).enum_cache(),
*previous_enum_cache);
CHECK_EQ(a->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(b->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(c->map().instance_descriptors().enum_cache(), enum_cache);
CHECK_EQ(a->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_EQ(b->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
CHECK_EQ(c->map().instance_descriptors(kRelaxedLoad).enum_cache(),
enum_cache);
}
}

View File

@ -65,7 +65,7 @@ static double GetDoubleFieldValue(JSObject obj, FieldIndex field_index) {
}
void WriteToField(JSObject object, int index, Object value) {
DescriptorArray descriptors = object.map().instance_descriptors();
DescriptorArray descriptors = object.map().instance_descriptors(kRelaxedLoad);
InternalIndex descriptor(index);
PropertyDetails details = descriptors.GetDetails(descriptor);
object.WriteToField(descriptor, details, value);
@ -673,7 +673,8 @@ static Handle<LayoutDescriptor> TestLayoutDescriptorAppend(
}
map->InitializeDescriptors(isolate, *descriptors, *layout_descriptor);
}
Handle<LayoutDescriptor> layout_descriptor(map->layout_descriptor(), isolate);
Handle<LayoutDescriptor> layout_descriptor(
map->layout_descriptor(kAcquireLoad), isolate);
CHECK(layout_descriptor->IsConsistentWithMap(*map, true));
return layout_descriptor;
}
@ -800,7 +801,7 @@ static Handle<LayoutDescriptor> TestLayoutDescriptorAppendIfFastOrUseFull(
for (int i = 0; i < number_of_descriptors; i++) {
PropertyDetails details = descriptors->GetDetails(InternalIndex(i));
map = maps[i];
LayoutDescriptor layout_desc = map->layout_descriptor();
LayoutDescriptor layout_desc = map->layout_descriptor(kAcquireLoad);
if (layout_desc.IsSlowLayout()) {
switched_to_slow_mode = true;
@ -820,7 +821,7 @@ static Handle<LayoutDescriptor> TestLayoutDescriptorAppendIfFastOrUseFull(
CHECK(layout_desc.IsTagged(field_index + field_width_in_words));
}
}
CHECK(map->layout_descriptor().IsConsistentWithMap(*map));
CHECK(map->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*map));
}
Handle<LayoutDescriptor> layout_descriptor(map->GetLayoutDescriptor(),
@ -991,10 +992,10 @@ TEST(DescriptorArrayTrimming) {
NONE, PropertyConstness::kMutable,
Representation::Double(), INSERT_TRANSITION)
.ToHandleChecked();
CHECK(map->layout_descriptor().IsConsistentWithMap(*map, true));
CHECK(map->layout_descriptor().IsSlowLayout());
CHECK(map->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*map, true));
CHECK(map->layout_descriptor(kAcquireLoad).IsSlowLayout());
CHECK(map->owns_descriptors());
CHECK_EQ(8, map->layout_descriptor().length());
CHECK_EQ(8, map->layout_descriptor(kAcquireLoad).length());
{
// Add transitions to double fields.
@ -1006,35 +1007,38 @@ TEST(DescriptorArrayTrimming) {
any_type, NONE, PropertyConstness::kMutable,
Representation::Double(), INSERT_TRANSITION)
.ToHandleChecked();
CHECK(tmp_map->layout_descriptor().IsConsistentWithMap(*tmp_map, true));
CHECK(tmp_map->layout_descriptor(kAcquireLoad)
.IsConsistentWithMap(*tmp_map, true));
}
// Check that descriptors are shared.
CHECK(tmp_map->owns_descriptors());
CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
CHECK_EQ(map->layout_descriptor(), tmp_map->layout_descriptor());
CHECK_EQ(map->instance_descriptors(kRelaxedLoad),
tmp_map->instance_descriptors(kRelaxedLoad));
CHECK_EQ(map->layout_descriptor(kAcquireLoad),
tmp_map->layout_descriptor(kAcquireLoad));
}
CHECK(map->layout_descriptor().IsSlowLayout());
CHECK_EQ(16, map->layout_descriptor().length());
CHECK(map->layout_descriptor(kAcquireLoad).IsSlowLayout());
CHECK_EQ(16, map->layout_descriptor(kAcquireLoad).length());
// The unused tail of the layout descriptor is now "durty" because of sharing.
CHECK(map->layout_descriptor().IsConsistentWithMap(*map));
CHECK(map->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*map));
for (int i = kSplitFieldIndex + 1; i < kTrimmedLayoutDescriptorLength; i++) {
CHECK(!map->layout_descriptor().IsTagged(i));
CHECK(!map->layout_descriptor(kAcquireLoad).IsTagged(i));
}
CHECK_LT(map->NumberOfOwnDescriptors(),
map->instance_descriptors().number_of_descriptors());
map->instance_descriptors(kRelaxedLoad).number_of_descriptors());
// Call GC that should trim both |map|'s descriptor array and layout
// descriptor.
CcTest::CollectAllGarbage();
// The unused tail of the layout descriptor is now "clean" again.
CHECK(map->layout_descriptor().IsConsistentWithMap(*map, true));
CHECK(map->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*map, true));
CHECK(map->owns_descriptors());
CHECK_EQ(map->NumberOfOwnDescriptors(),
map->instance_descriptors().number_of_descriptors());
CHECK(map->layout_descriptor().IsSlowLayout());
CHECK_EQ(8, map->layout_descriptor().length());
map->instance_descriptors(kRelaxedLoad).number_of_descriptors());
CHECK(map->layout_descriptor(kAcquireLoad).IsSlowLayout());
CHECK_EQ(8, map->layout_descriptor(kAcquireLoad).length());
{
// Add transitions to tagged fields.
@ -1047,18 +1051,21 @@ TEST(DescriptorArrayTrimming) {
any_type, NONE, PropertyConstness::kMutable,
Representation::Tagged(), INSERT_TRANSITION)
.ToHandleChecked();
CHECK(tmp_map->layout_descriptor().IsConsistentWithMap(*tmp_map, true));
CHECK(tmp_map->layout_descriptor(kAcquireLoad)
.IsConsistentWithMap(*tmp_map, true));
}
tmp_map = Map::CopyWithField(isolate, tmp_map, CcTest::MakeString("dbl"),
any_type, NONE, PropertyConstness::kMutable,
Representation::Double(), INSERT_TRANSITION)
.ToHandleChecked();
CHECK(tmp_map->layout_descriptor().IsConsistentWithMap(*tmp_map, true));
CHECK(tmp_map->layout_descriptor(kAcquireLoad)
.IsConsistentWithMap(*tmp_map, true));
// Check that descriptors are shared.
CHECK(tmp_map->owns_descriptors());
CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
CHECK_EQ(map->instance_descriptors(kRelaxedLoad),
tmp_map->instance_descriptors(kRelaxedLoad));
}
CHECK(map->layout_descriptor().IsSlowLayout());
CHECK(map->layout_descriptor(kAcquireLoad).IsSlowLayout());
}
@ -1390,7 +1397,7 @@ TEST(LayoutDescriptorSharing) {
.ToHandleChecked();
}
Handle<LayoutDescriptor> split_layout_descriptor(
split_map->layout_descriptor(), isolate);
split_map->layout_descriptor(kAcquireLoad), isolate);
CHECK(split_layout_descriptor->IsConsistentWithMap(*split_map, true));
CHECK(split_layout_descriptor->IsSlowLayout());
CHECK(split_map->owns_descriptors());
@ -1401,12 +1408,13 @@ TEST(LayoutDescriptorSharing) {
Representation::Double(), INSERT_TRANSITION)
.ToHandleChecked();
CHECK(!split_map->owns_descriptors());
CHECK_EQ(*split_layout_descriptor, split_map->layout_descriptor());
CHECK_EQ(*split_layout_descriptor,
split_map->layout_descriptor(kAcquireLoad));
// Layout descriptors should be shared with |split_map|.
CHECK(map1->owns_descriptors());
CHECK_EQ(*split_layout_descriptor, map1->layout_descriptor());
CHECK(map1->layout_descriptor().IsConsistentWithMap(*map1, true));
CHECK_EQ(*split_layout_descriptor, map1->layout_descriptor(kAcquireLoad));
CHECK(map1->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*map1, true));
Handle<Map> map2 =
Map::CopyWithField(isolate, split_map, CcTest::MakeString("bar"),
@ -1416,8 +1424,8 @@ TEST(LayoutDescriptorSharing) {
// Layout descriptors should not be shared with |split_map|.
CHECK(map2->owns_descriptors());
CHECK_NE(*split_layout_descriptor, map2->layout_descriptor());
CHECK(map2->layout_descriptor().IsConsistentWithMap(*map2, true));
CHECK_NE(*split_layout_descriptor, map2->layout_descriptor(kAcquireLoad));
CHECK(map2->layout_descriptor(kAcquireLoad).IsConsistentWithMap(*map2, true));
}
static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,

View File

@ -142,8 +142,8 @@ TEST_F(BytecodeArrayWriterUnittest, SimpleExample) {
Handle<BytecodeArray> bytecode_array =
writer()->ToBytecodeArray(isolate(), 0, 0, factory()->empty_byte_array());
bytecode_array->set_synchronized_source_position_table(
*writer()->ToSourcePositionTable(isolate()));
bytecode_array->set_source_position_table(
*writer()->ToSourcePositionTable(isolate()), kReleaseStore);
CHECK_EQ(bytecodes()->size(), arraysize(expected_bytes));
PositionTableEntry expected_positions[] = {{0, 55, true}, {8, 70, true}};
@ -229,8 +229,8 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
Handle<BytecodeArray> bytecode_array =
writer()->ToBytecodeArray(isolate(), 0, 0, factory()->empty_byte_array());
bytecode_array->set_synchronized_source_position_table(
*writer()->ToSourcePositionTable(isolate()));
bytecode_array->set_source_position_table(
*writer()->ToSourcePositionTable(isolate()), kReleaseStore);
SourcePositionTableIterator source_iterator(
bytecode_array->SourcePositionTable());
for (size_t i = 0; i < arraysize(expected_positions); ++i) {
@ -278,8 +278,8 @@ TEST_F(BytecodeArrayWriterUnittest, ElideNoneffectfulBytecodes) {
Handle<BytecodeArray> bytecode_array =
writer()->ToBytecodeArray(isolate(), 0, 0, factory()->empty_byte_array());
bytecode_array->set_synchronized_source_position_table(
*writer()->ToSourcePositionTable(isolate()));
bytecode_array->set_source_position_table(
*writer()->ToSourcePositionTable(isolate()), kReleaseStore);
SourcePositionTableIterator source_iterator(
bytecode_array->SourcePositionTable());
for (size_t i = 0; i < arraysize(expected_positions); ++i) {
@ -346,8 +346,8 @@ TEST_F(BytecodeArrayWriterUnittest, DeadcodeElimination) {
Handle<BytecodeArray> bytecode_array =
writer()->ToBytecodeArray(isolate(), 0, 0, factory()->empty_byte_array());
bytecode_array->set_synchronized_source_position_table(
*writer()->ToSourcePositionTable(isolate()));
bytecode_array->set_source_position_table(
*writer()->ToSourcePositionTable(isolate()), kReleaseStore);
SourcePositionTableIterator source_iterator(
bytecode_array->SourcePositionTable());
for (size_t i = 0; i < arraysize(expected_positions); ++i) {