[cleanup] Remove synchronized_ from map accessors

Continuing the cleanups and using the tags rather than synchronized_
in the name of the accessors.

Bug: v8:7790
Change-Id: I3c2d0ccf54fa6161dbd9d12b1b9743a046534521
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2897095
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Georg Neis <neis@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74609}
This commit is contained in:
Santiago Aboy Solanes 2021-05-14 17:12:27 +01:00 committed by V8 LUCI CQ
parent 34ba5f0438
commit f4d362b6fd
23 changed files with 60 additions and 53 deletions

View File

@ -753,7 +753,7 @@ class ArrayConcatVisitor {
array, fast_elements() ? HOLEY_ELEMENTS : DICTIONARY_ELEMENTS);
array->set_length(*length);
array->set_elements(*storage_fixed_array());
array->synchronized_set_map(*map);
array->set_map(*map, kReleaseStore);
return array;
}

View File

@ -866,8 +866,8 @@ PropertyAccessInfo AccessInfoFactory::ComputePropertyAccessInfo(
// Acquire synchronously the map's prototype's map to guarantee that every
// time we use it, we use the same Map.
Handle<Map> map_prototype_map = broker()->CanonicalPersistentHandle(
map->prototype().synchronized_map());
Handle<Map> map_prototype_map =
broker()->CanonicalPersistentHandle(map->prototype().map(kAcquireLoad));
if (!map_prototype_map->IsJSObjectMap()) {
// Perform the implicit ToObject for primitives here.
// Implemented according to ES6 section 7.3.2 GetV (V, P).
@ -879,7 +879,7 @@ PropertyAccessInfo AccessInfoFactory::ComputePropertyAccessInfo(
map = broker()->CanonicalPersistentHandle(
maybe_constructor->initial_map());
map_prototype_map = broker()->CanonicalPersistentHandle(
map->prototype().synchronized_map());
map->prototype().map(kAcquireLoad));
DCHECK(map_prototype_map->IsJSObjectMap());
} else if (map->prototype().IsNull()) {
if (dictionary_prototype_on_chain) {

View File

@ -1275,7 +1275,7 @@ HeapObjectData::HeapObjectData(JSHeapBroker* broker, ObjectData** storage,
// instance_type_ member. In the case of constructing the MapData for the
// meta map (whose map is itself), this member has not yet been
// initialized.
map_(broker->GetOrCreateData(object->synchronized_map())) {
map_(broker->GetOrCreateData(object->map(kAcquireLoad))) {
CHECK_IMPLIES(kind == kSerializedHeapObject,
broker->mode() == JSHeapBroker::kSerializing);
CHECK_IMPLIES(broker->mode() == JSHeapBroker::kSerialized,

View File

@ -1883,7 +1883,7 @@ void TranslatedState::InitializeJSObjectAt(
WRITE_BARRIER(*object_storage, offset, *field_value);
}
}
object_storage->synchronized_set_map(*map);
object_storage->set_map(*map, kReleaseStore);
}
void TranslatedState::InitializeObjectWithTaggedFieldsAt(
@ -1920,7 +1920,7 @@ void TranslatedState::InitializeObjectWithTaggedFieldsAt(
WRITE_BARRIER(*object_storage, offset, *field_value);
}
object_storage->synchronized_set_map(*map);
object_storage->set_map(*map, kReleaseStore);
}
TranslatedValue* TranslatedState::ResolveCapturedObject(TranslatedValue* slot) {

View File

@ -8,6 +8,7 @@
#include <unordered_map>
#include "include/v8config.h"
#include "src/common/globals.h"
#include "src/execution/isolate.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/heap-inl.h"
@ -481,7 +482,7 @@ void ConcurrentMarking::Run(JobDelegate* delegate,
addr == new_large_object) {
local_marking_worklists.PushOnHold(object);
} else {
Map map = object.synchronized_map(isolate);
Map map = object.map(isolate, kAcquireLoad);
if (is_per_context_mode) {
Address context;
if (native_context_inferrer.Infer(isolate, map, object, &context)) {

View File

@ -2210,7 +2210,7 @@ Handle<JSGlobalObject> Factory::NewJSGlobalObject(
// Set up the global object as a normalized object.
global->set_global_dictionary(*dictionary, kReleaseStore);
global->synchronized_set_map(raw_map);
global->set_map(raw_map, kReleaseStore);
// Make sure result is a global object with properties in dictionary.
DCHECK(global->IsJSGlobalObject() && !global->HasFastProperties());
@ -2841,7 +2841,7 @@ void Factory::ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object,
// Reset the map for the object.
JSGlobalProxy raw = *object;
raw.synchronized_set_map(*map);
raw.set_map(*map, kReleaseStore);
// Reinitialize the object from the constructor map.
InitializeJSObjectFromMap(raw, *raw_properties_or_hash, *map);

View File

@ -205,7 +205,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
// make sure that we skip all set bits in the black area until the
// object ends.
HeapObject black_object = HeapObject::FromAddress(addr);
Object map_object = black_object.synchronized_map();
Object map_object = black_object.map(kAcquireLoad);
CHECK(map_object.IsMap());
map = Map::cast(map_object);
DCHECK(map.IsMap());
@ -237,7 +237,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
}
} else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
object = HeapObject::FromAddress(addr);
Object map_object = object.synchronized_map();
Object map_object = object.map(kAcquireLoad);
CHECK(map_object.IsMap());
map = Map::cast(map_object);
DCHECK(map.IsMap());

View File

@ -4630,7 +4630,7 @@ void MinorMarkCompactCollector::MakeIterable(
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
ClearRecordedSlots::kNo);
}
Map map = object.synchronized_map();
Map map = object.map(kAcquireLoad);
int size = object.SizeFromMap(map);
free_start = free_end + size;
}
@ -5085,7 +5085,7 @@ void MinorMarkCompactCollector::TraceFragmentation() {
free_bytes_index++;
}
}
Map map = object.synchronized_map();
Map map = object.map(kAcquireLoad);
int size = object.SizeFromMap(map);
live_bytes += size;
free_start = free_end + size;

View File

@ -336,7 +336,7 @@ std::unique_ptr<v8::MeasureMemoryDelegate> MemoryMeasurement::DefaultDelegate(
bool NativeContextInferrer::InferForContext(Isolate* isolate, Context context,
Address* native_context) {
Map context_map = context.synchronized_map();
Map context_map = context.map(kAcquireLoad);
Object maybe_native_context =
TaggedField<Object, Map::kConstructorOrBackPointerOrNativeContextOffset>::
Acquire_Load(isolate, context_map);

View File

@ -390,7 +390,7 @@ int Sweeper::RawSweep(
free_start, free_end, p, non_empty_typed_slots, &free_ranges_map,
&old_to_new_cleanup);
}
Map map = object.synchronized_map();
Map map = object.map(kAcquireLoad);
DCHECK(map.IsMap());
int size = object.SizeFromMap(map);
live_bytes += size;

View File

@ -603,8 +603,8 @@ Handle<Object> JsonParser<Char>::BuildJsonObject(
mutable_double_address += kMutableDoubleSize;
} else {
DCHECK(value.IsHeapNumber());
HeapObject::cast(value).synchronized_set_map(
*factory()->heap_number_map());
HeapObject::cast(value).set_map(*factory()->heap_number_map(),
kReleaseStore);
}
}
object->RawFastInobjectPropertyAtPut(index, value, mode);

View File

@ -715,7 +715,7 @@ bool Code::IsWeakObject(HeapObject object) {
}
bool Code::IsWeakObjectInOptimizedCode(HeapObject object) {
Map map = object.synchronized_map();
Map map = object.map(kAcquireLoad);
InstanceType instance_type = map.instance_type();
if (InstanceTypeChecker::IsMap(instance_type)) {
return Map::cast(object).CanTransition();

View File

@ -43,8 +43,8 @@ class HeapObject : public Object {
inline void set_map_no_write_barrier(Map value);
// Access the map using acquire load and release store.
DECL_GETTER(synchronized_map, Map)
inline void synchronized_set_map(Map value);
DECL_ACQUIRE_GETTER(map, Map)
inline void set_map(Map value, ReleaseStoreTag);
// Compare-and-swaps map word using release store, returns true if the map
// word was actually swapped.

View File

@ -2766,14 +2766,14 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
// If the map does not add named properties, simply set the map.
if (old_map->NumberOfOwnDescriptors() ==
new_map->NumberOfOwnDescriptors()) {
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
return;
}
// If the map adds a new kDescriptor property, simply set the map.
PropertyDetails details = new_map->GetLastDescriptorDetails(isolate);
if (details.location() == kDescriptor) {
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
return;
}
@ -2789,7 +2789,7 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
auto value = isolate->factory()->NewHeapNumberWithHoleNaN();
object->FastPropertyAtPut(index, *value);
}
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
return;
}
@ -2817,7 +2817,7 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
// Set the new property value and do the map transition.
object->SetProperties(*new_storage);
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
return;
}
@ -2830,7 +2830,7 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
// converted to doubles.
if (!old_map->InstancesNeedRewriting(*new_map, number_of_fields, inobject,
unused, &old_number_of_fields)) {
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
return;
}
@ -2945,7 +2945,7 @@ void MigrateFastToFast(Isolate* isolate, Handle<JSObject> object,
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
}
void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
@ -3043,7 +3043,7 @@ void MigrateFastToSlow(Isolate* isolate, Handle<JSObject> object,
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
object->SetProperties(*ord_dictionary);
@ -3087,7 +3087,7 @@ void JSObject::MigrateToMap(Isolate* isolate, Handle<JSObject> object,
CHECK(new_map->is_dictionary_map());
// Slow-to-slow migration is trivial.
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
} else if (!new_map->is_dictionary_map()) {
MigrateFastToFast(isolate, object, new_map);
if (old_map->is_prototype_map()) {
@ -3197,7 +3197,7 @@ void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) {
Object value = storage->get(i);
object->FastPropertyAtPut(index, value);
}
object->synchronized_set_map(*map);
object->set_map(*map, kReleaseStore);
}
void JSObject::MigrateInstance(Isolate* isolate, Handle<JSObject> object) {
@ -3487,7 +3487,7 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object,
DCHECK_LE(unused_property_fields, inobject_props);
// Transform the object.
new_map->SetInObjectUnusedPropertyFields(inobject_props);
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
object->SetProperties(ReadOnlyRoots(isolate).empty_fixed_array());
// Check that it really works.
DCHECK(object->HasFastProperties());
@ -3606,7 +3606,7 @@ void JSObject::MigrateSlowToFast(Handle<JSObject> object,
LOG(isolate, MapEvent("SlowToFast", old_map, new_map, reason));
}
// Transform the object.
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
object->SetProperties(*fields);
DCHECK(object->IsJSObject());
@ -4772,7 +4772,7 @@ void JSObject::SetImmutableProto(Handle<JSObject> object) {
Handle<Map> new_map =
Map::TransitionToImmutableProto(object->GetIsolate(), map);
object->synchronized_set_map(*new_map);
object->set_map(*new_map, kReleaseStore);
}
void JSObject::EnsureCanContainElements(Handle<JSObject> object,

View File

@ -1364,7 +1364,7 @@ ConcurrentLookupIterator::TryGetOwnConstantElement(
// The access guard below protects only internalized string accesses.
// TODO(jgruber): Support other string kinds.
Map wrapped_string_map = wrapped_string.synchronized_map(isolate);
Map wrapped_string_map = wrapped_string.map(isolate, kAcquireLoad);
if (!InstanceTypeChecker::IsInternalizedString(
wrapped_string_map.instance_type())) {
return kGaveUp;

View File

@ -731,17 +731,21 @@ void HeapObject::set_map(Map value) {
#endif
}
DEF_GETTER(HeapObject, synchronized_map, Map) {
return map_word(cage_base, kAcquireLoad).ToMap();
Map HeapObject::map(AcquireLoadTag tag) const {
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return HeapObject::map(cage_base, tag);
}
Map HeapObject::map(PtrComprCageBase cage_base, AcquireLoadTag tag) const {
return map_word(cage_base, tag).ToMap();
}
void HeapObject::synchronized_set_map(Map value) {
void HeapObject::set_map(Map value, ReleaseStoreTag tag) {
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !value.is_null()) {
GetHeapFromWritableObject(*this)->VerifyObjectLayoutChange(*this, value);
}
#endif
set_map_word(MapWord::FromMap(value), kReleaseStore);
set_map_word(MapWord::FromMap(value), tag);
#ifndef V8_DISABLE_WRITE_BARRIERS
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never

View File

@ -5581,8 +5581,9 @@ Handle<Object> JSPromise::TriggerPromiseReactions(Isolate* isolate,
static_cast<int>(
PromiseReactionJobTask::kSizeOfAllPromiseReactionJobTasks));
if (type == PromiseReaction::kFulfill) {
task->synchronized_set_map(
ReadOnlyRoots(isolate).promise_fulfill_reaction_job_task_map());
task->set_map(
ReadOnlyRoots(isolate).promise_fulfill_reaction_job_task_map(),
kReleaseStore);
Handle<PromiseFulfillReactionJobTask>::cast(task)->set_argument(
*argument);
Handle<PromiseFulfillReactionJobTask>::cast(task)->set_context(
@ -5601,8 +5602,9 @@ Handle<Object> JSPromise::TriggerPromiseReactions(Isolate* isolate,
kContinuationPreservedEmbedderDataOffset));
} else {
DisallowGarbageCollection no_gc;
task->synchronized_set_map(
ReadOnlyRoots(isolate).promise_reject_reaction_job_task_map());
task->set_map(
ReadOnlyRoots(isolate).promise_reject_reaction_job_task_map(),
kReleaseStore);
Handle<PromiseRejectReactionJobTask>::cast(task)->set_argument(*argument);
Handle<PromiseRejectReactionJobTask>::cast(task)->set_context(
*handler_context);

View File

@ -733,8 +733,8 @@ void SharedFunctionInfo::ClearPreparseData() {
UncompiledDataWithPreparseData::kSize);
STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize ==
UncompiledData::kHeaderSize);
data.synchronized_set_map(
GetReadOnlyRoots().uncompiled_data_without_preparse_data_map());
data.set_map(GetReadOnlyRoots().uncompiled_data_without_preparse_data_map(),
kReleaseStore);
// Fill the remaining space with filler.
heap->CreateFillerObjectAt(

View File

@ -119,7 +119,7 @@ CAST_ACCESSOR(ExternalString)
CAST_ACCESSOR(ExternalTwoByteString)
StringShape::StringShape(const String str)
: type_(str.synchronized_map().instance_type()) {
: type_(str.map(kAcquireLoad).instance_type()) {
set_valid();
DCHECK_EQ(type_ & kIsNotStringMask, kStringTag);
}

View File

@ -127,7 +127,7 @@ void String::MakeThin(Isolate* isolate, String internalized) {
ThinString thin = ThinString::unchecked_cast(*this);
thin.set_actual(internalized);
DCHECK_GE(old_size, ThinString::kSize);
this->synchronized_set_map(*map);
this->set_map(*map, kReleaseStore);
Address thin_end = thin.address() + ThinString::kSize;
int size_delta = old_size - ThinString::kSize;
if (size_delta != 0) {
@ -200,7 +200,7 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
this->synchronized_set_map(new_map);
this->set_map(new_map, kReleaseStore);
ExternalTwoByteString self = ExternalTwoByteString::cast(*this);
self.AllocateExternalPointerEntries(isolate);
@ -277,7 +277,7 @@ bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) {
// We are storing the new map using release store after creating a filler for
// the left-over space to avoid races with the sweeper thread.
this->synchronized_set_map(new_map);
this->set_map(new_map, kReleaseStore);
ExternalOneByteString self = ExternalOneByteString::cast(*this);
self.AllocateExternalPointerEntries(isolate);

View File

@ -374,7 +374,7 @@ bool AddDescriptorsByTemplate(
}
// Atomically commit the changes.
receiver->synchronized_set_map(*map);
receiver->set_map(*map, kReleaseStore);
if (elements_dictionary->NumberOfElements() > 0) {
receiver->set_elements(*elements_dictionary);
}
@ -468,7 +468,7 @@ bool AddDescriptorsByTemplate(
}
// Atomically commit the changes.
receiver->synchronized_set_map(*map);
receiver->set_map(*map, kReleaseStore);
receiver->set_raw_properties_or_hash(*properties_dictionary);
if (elements_dictionary->NumberOfElements() > 0) {
receiver->set_elements(*elements_dictionary);

View File

@ -238,7 +238,7 @@ bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
// the "deoptimize dependent code" mechanism.
receiver_map->NotifyLeafMapLayoutChange(isolate);
// Finally, perform the map rollback.
receiver->synchronized_set_map(*parent_map);
receiver->set_map(*parent_map, kReleaseStore);
#if VERIFY_HEAP
receiver->HeapObjectVerify(isolate);
receiver->property_array().PropertyArrayVerify(isolate);

View File

@ -45,9 +45,9 @@ class ConcurrentSearchThread final : public v8::base::Thread {
for (Handle<JSObject> js_obj : handles_) {
// Walk up the prototype chain all the way to the top.
Handle<Map> map(js_obj->synchronized_map(), &local_heap);
Handle<Map> map(js_obj->map(kAcquireLoad), &local_heap);
while (!map->prototype().IsNull()) {
Handle<Map> map_prototype_map(map->prototype().synchronized_map(),
Handle<Map> map_prototype_map(map->prototype().map(kAcquireLoad),
&local_heap);
if (!map_prototype_map->IsJSObjectMap()) {
break;