[ext-code-space] Make HeapObject::IsBlah() predicates faster
... by using cage-friendly versions of HeapObject::IsBlah(), HeapObject::map(), HeapObject::map_word() and HeapObject::Size() on hot paths. Bug: v8:11880 Change-Id: I70b72e46cc867b6b2ddbc48cd5e6a74ae4208397 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3308800 Reviewed-by: Camillo Bruni <cbruni@chromium.org> Reviewed-by: Jakob Gruber <jgruber@chromium.org> Commit-Queue: Igor Sheludko <ishell@chromium.org> Cr-Commit-Position: refs/heads/main@{#78161}
This commit is contained in:
parent
3abe95def3
commit
a421ac639d
@ -170,22 +170,23 @@ void TaggedIndex::TaggedIndexVerify(Isolate* isolate) {
|
||||
|
||||
void HeapObject::HeapObjectVerify(Isolate* isolate) {
|
||||
CHECK(IsHeapObject());
|
||||
VerifyPointer(isolate, map(isolate));
|
||||
CHECK(map(isolate).IsMap());
|
||||
PtrComprCageBase cage_base(isolate);
|
||||
VerifyPointer(isolate, map(cage_base));
|
||||
CHECK(map(cage_base).IsMap(cage_base));
|
||||
|
||||
switch (map().instance_type()) {
|
||||
switch (map(cage_base).instance_type()) {
|
||||
#define STRING_TYPE_CASE(TYPE, size, name, CamelName) case TYPE:
|
||||
STRING_TYPE_LIST(STRING_TYPE_CASE)
|
||||
#undef STRING_TYPE_CASE
|
||||
if (IsConsString()) {
|
||||
if (IsConsString(cage_base)) {
|
||||
ConsString::cast(*this).ConsStringVerify(isolate);
|
||||
} else if (IsSlicedString()) {
|
||||
} else if (IsSlicedString(cage_base)) {
|
||||
SlicedString::cast(*this).SlicedStringVerify(isolate);
|
||||
} else if (IsThinString()) {
|
||||
} else if (IsThinString(cage_base)) {
|
||||
ThinString::cast(*this).ThinStringVerify(isolate);
|
||||
} else if (IsSeqString()) {
|
||||
} else if (IsSeqString(cage_base)) {
|
||||
SeqString::cast(*this).SeqStringVerify(isolate);
|
||||
} else if (IsExternalString()) {
|
||||
} else if (IsExternalString(cage_base)) {
|
||||
ExternalString::cast(*this).ExternalStringVerify(isolate);
|
||||
} else {
|
||||
String::cast(*this).StringVerify(isolate);
|
||||
|
@ -56,11 +56,12 @@ namespace {
|
||||
|
||||
void PrintHeapObjectHeaderWithoutMap(HeapObject object, std::ostream& os,
|
||||
const char* id) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(object);
|
||||
os << reinterpret_cast<void*>(object.ptr()) << ": [";
|
||||
if (id != nullptr) {
|
||||
os << id;
|
||||
} else {
|
||||
os << object.map().instance_type();
|
||||
os << object.map(cage_base).instance_type();
|
||||
}
|
||||
os << "]";
|
||||
if (ReadOnlyHeap::Contains(object)) {
|
||||
@ -101,11 +102,14 @@ void PrintDictionaryContents(std::ostream& os, T dict) {
|
||||
|
||||
void HeapObject::PrintHeader(std::ostream& os, const char* id) {
|
||||
PrintHeapObjectHeaderWithoutMap(*this, os, id);
|
||||
if (!IsMap()) os << "\n - map: " << Brief(map());
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
|
||||
if (!IsMap(cage_base)) os << "\n - map: " << Brief(map(cage_base));
|
||||
}
|
||||
|
||||
void HeapObject::HeapObjectPrint(std::ostream& os) {
|
||||
InstanceType instance_type = map().instance_type();
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
|
||||
|
||||
InstanceType instance_type = map(cage_base).instance_type();
|
||||
|
||||
if (instance_type < FIRST_NONSTRING_TYPE) {
|
||||
String::cast(*this).StringPrint(os);
|
||||
|
@ -4151,8 +4151,9 @@ bool Isolate::use_optimizer() {
|
||||
}
|
||||
|
||||
void Isolate::IncreaseTotalRegexpCodeGenerated(Handle<HeapObject> code) {
|
||||
DCHECK(code->IsCode() || code->IsByteArray());
|
||||
total_regexp_code_generated_ += code->Size();
|
||||
PtrComprCageBase cage_base(this);
|
||||
DCHECK(code->IsCode(cage_base) || code->IsByteArray(cage_base));
|
||||
total_regexp_code_generated_ += code->Size(cage_base);
|
||||
}
|
||||
|
||||
bool Isolate::NeedsDetailedOptimizedCodeLineInfo() const {
|
||||
|
@ -121,7 +121,6 @@ class HeapObject : public Object {
|
||||
// Iterates over pointers contained in the object (including the Map).
|
||||
// If it's not performance critical iteration use the non-templatized
|
||||
// version.
|
||||
void Iterate(ObjectVisitor* v);
|
||||
void Iterate(PtrComprCageBase cage_base, ObjectVisitor* v);
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
@ -133,7 +132,6 @@ class HeapObject : public Object {
|
||||
// object, and so is safe to call while the map pointer is modified.
|
||||
// If it's not performance critical iteration use the non-templatized
|
||||
// version.
|
||||
inline void IterateBody(ObjectVisitor* v);
|
||||
void IterateBody(PtrComprCageBase cage_base, ObjectVisitor* v);
|
||||
void IterateBody(Map map, int object_size, ObjectVisitor* v);
|
||||
|
||||
|
@ -102,41 +102,21 @@ HEAP_OBJECT_TYPE_LIST(DECL_TYPE)
|
||||
|
||||
} // namespace InstanceTypeTraits
|
||||
|
||||
#define TYPE_CHECKER(type, ...) \
|
||||
bool HeapObject::Is##type() const { \
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||
return HeapObject::Is##type(cage_base); \
|
||||
} \
|
||||
/* The cage_base passed here is supposed to be the base of the pointer */ \
|
||||
/* compression cage where the Map space is allocated. */ \
|
||||
/* However when external code space is enabled it's not always the case */ \
|
||||
/* yet and the predicate has to work if the cage_base corresponds to the */ \
|
||||
/* cage containing external code space. */ \
|
||||
/* TODO(v8:11880): Ensure that the cage_base value always corresponds to */ \
|
||||
/* the main pointer compression cage. */ \
|
||||
bool HeapObject::Is##type(PtrComprCageBase cage_base) const { \
|
||||
if (V8_EXTERNAL_CODE_SPACE_BOOL) { \
|
||||
if (IsCodeSpaceObject(*this)) { \
|
||||
/* Code space contains only Code objects and free space fillers. */ \
|
||||
if (std::is_same<InstanceTypeTraits::type, \
|
||||
InstanceTypeTraits::Code>::value || \
|
||||
std::is_same<InstanceTypeTraits::type, \
|
||||
InstanceTypeTraits::FreeSpace>::value || \
|
||||
std::is_same<InstanceTypeTraits::type, \
|
||||
InstanceTypeTraits::FreeSpaceOrFiller>::value) { \
|
||||
/* Code space objects are never read-only, so it's safe to query */ \
|
||||
/* heap value in order to compute proper cage base. */ \
|
||||
Heap* heap = GetHeapFromWritableObject(*this); \
|
||||
Map map_object = map(Isolate::FromHeap(heap)); \
|
||||
return InstanceTypeChecker::Is##type(map_object.instance_type()); \
|
||||
} \
|
||||
/* For all the other queries we can return false. */ \
|
||||
return false; \
|
||||
} \
|
||||
/* Fallback to checking map instance type. */ \
|
||||
} \
|
||||
Map map_object = map(cage_base); \
|
||||
return InstanceTypeChecker::Is##type(map_object.instance_type()); \
|
||||
#define TYPE_CHECKER(type, ...) \
|
||||
bool HeapObject::Is##type() const { \
|
||||
/* In general, parameterless IsBlah() must not be used for objects */ \
|
||||
/* that might be located in external code space. Note that this version */ \
|
||||
/* is still called from Blah::cast() methods but it's fine because in */ \
|
||||
/* production builds these checks are not enabled anyway and debug */ \
|
||||
/* builds are allowed to be a bit slower. */ \
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this); \
|
||||
return HeapObject::Is##type(cage_base); \
|
||||
} \
|
||||
/* The cage_base passed here is must to be the base of the pointer */ \
|
||||
/* compression cage where the Map space is allocated. */ \
|
||||
bool HeapObject::Is##type(PtrComprCageBase cage_base) const { \
|
||||
Map map_object = map(cage_base); \
|
||||
return InstanceTypeChecker::Is##type(map_object.instance_type()); \
|
||||
}
|
||||
|
||||
// TODO(v8:7786): For instance types that have a single map instance on the
|
||||
|
@ -600,13 +600,14 @@ void Map::ReplaceDescriptors(Isolate* isolate,
|
||||
Map Map::FindRootMap(Isolate* isolate) const {
|
||||
DisallowGarbageCollection no_gc;
|
||||
Map result = *this;
|
||||
PtrComprCageBase cage_base(isolate);
|
||||
while (true) {
|
||||
Object back = result.GetBackPointer(isolate);
|
||||
Object back = result.GetBackPointer(cage_base);
|
||||
if (back.IsUndefined(isolate)) {
|
||||
// Initial map must not contain descriptors in the descriptors array
|
||||
// that do not belong to the map.
|
||||
DCHECK_LE(result.NumberOfOwnDescriptors(),
|
||||
result.instance_descriptors(isolate, kRelaxedLoad)
|
||||
result.instance_descriptors(cage_base, kRelaxedLoad)
|
||||
.number_of_descriptors());
|
||||
return result;
|
||||
}
|
||||
|
@ -316,7 +316,13 @@ DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
|
||||
return IsWeakFixedArray(cage_base);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsAbstractCode, bool) {
|
||||
bool HeapObject::IsAbstractCode() const {
|
||||
// TODO(v8:11880): Either make AbstractCode be ByteArray|CodeT or
|
||||
// ensure this version is not called for hot code.
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
|
||||
return HeapObject::IsAbstractCode(cage_base);
|
||||
}
|
||||
bool HeapObject::IsAbstractCode(PtrComprCageBase cage_base) const {
|
||||
return IsBytecodeArray(cage_base) || IsCode(cage_base);
|
||||
}
|
||||
|
||||
@ -752,11 +758,11 @@ ReadOnlyRoots HeapObject::GetReadOnlyRoots(PtrComprCageBase cage_base) const {
|
||||
}
|
||||
|
||||
Map HeapObject::map() const {
|
||||
// TODO(v8:11880): Ensure that cage friendly version is used for the cases
|
||||
// when this could be a Code object. Add
|
||||
// DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
|
||||
// and use GetPtrComprCageBase(*this) here.
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
|
||||
// This method is never used for objects located in code space (Code and
|
||||
// free space fillers) and thus it is fine to use auto-computed cage base
|
||||
// value.
|
||||
DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return HeapObject::map(cage_base);
|
||||
}
|
||||
Map HeapObject::map(PtrComprCageBase cage_base) const {
|
||||
@ -836,11 +842,11 @@ ObjectSlot HeapObject::map_slot() const {
|
||||
}
|
||||
|
||||
MapWord HeapObject::map_word(RelaxedLoadTag tag) const {
|
||||
// TODO(v8:11880): Ensure that cage friendly version is used for the cases
|
||||
// when this could be a Code object. Add
|
||||
// DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
|
||||
// and use GetPtrComprCageBase(*this) here.
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
|
||||
// This method is never used for objects located in code space (Code and
|
||||
// free space fillers) and thus it is fine to use auto-computed cage base
|
||||
// value.
|
||||
DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return HeapObject::map_word(cage_base, tag);
|
||||
}
|
||||
MapWord HeapObject::map_word(PtrComprCageBase cage_base,
|
||||
@ -878,11 +884,8 @@ bool HeapObject::release_compare_and_swap_map_word(MapWord old_map_word,
|
||||
|
||||
// TODO(v8:11880): consider dropping parameterless version.
|
||||
int HeapObject::Size() const {
|
||||
// TODO(v8:11880): Ensure that cage friendly version is used for the cases
|
||||
// when this could be a Code object. Add
|
||||
// DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
|
||||
// and use GetPtrComprCageBase(*this) here.
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
|
||||
DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(*this));
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return HeapObject::Size(cage_base);
|
||||
}
|
||||
int HeapObject::Size(PtrComprCageBase cage_base) const {
|
||||
|
@ -1838,23 +1838,24 @@ std::ostream& operator<<(std::ostream& os, const Brief& v) {
|
||||
void Smi::SmiPrint(std::ostream& os) const { os << value(); }
|
||||
|
||||
void HeapObject::HeapObjectShortPrint(std::ostream& os) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBaseSlow(*this);
|
||||
os << AsHex::Address(this->ptr()) << " ";
|
||||
|
||||
if (IsString()) {
|
||||
if (IsString(cage_base)) {
|
||||
HeapStringAllocator allocator;
|
||||
StringStream accumulator(&allocator);
|
||||
String::cast(*this).StringShortPrint(&accumulator);
|
||||
os << accumulator.ToCString().get();
|
||||
return;
|
||||
}
|
||||
if (IsJSObject()) {
|
||||
if (IsJSObject(cage_base)) {
|
||||
HeapStringAllocator allocator;
|
||||
StringStream accumulator(&allocator);
|
||||
JSObject::cast(*this).JSObjectShortPrint(&accumulator);
|
||||
os << accumulator.ToCString().get();
|
||||
return;
|
||||
}
|
||||
switch (map().instance_type()) {
|
||||
switch (map(cage_base).instance_type()) {
|
||||
case MAP_TYPE: {
|
||||
os << "<Map";
|
||||
Map mapInstance = Map::cast(*this);
|
||||
@ -2158,21 +2159,10 @@ void CallableTask::BriefPrintDetails(std::ostream& os) {
|
||||
os << " callable=" << Brief(callable());
|
||||
}
|
||||
|
||||
// TODO(v8:11880): drop this version if favor of cage friendly one.
|
||||
void HeapObject::Iterate(ObjectVisitor* v) {
|
||||
IterateFast<ObjectVisitor>(GetPtrComprCageBaseSlow(*this), v);
|
||||
}
|
||||
|
||||
void HeapObject::Iterate(PtrComprCageBase cage_base, ObjectVisitor* v) {
|
||||
IterateFast<ObjectVisitor>(cage_base, v);
|
||||
}
|
||||
|
||||
// TODO(v8:11880): drop this version if favor of cage friendly one.
|
||||
void HeapObject::IterateBody(ObjectVisitor* v) {
|
||||
Map m = map();
|
||||
IterateBodyFast<ObjectVisitor>(m, SizeFromMap(m), v);
|
||||
}
|
||||
|
||||
void HeapObject::IterateBody(PtrComprCageBase cage_base, ObjectVisitor* v) {
|
||||
Map m = map(cage_base);
|
||||
IterateBodyFast<ObjectVisitor>(m, SizeFromMap(m), v);
|
||||
|
@ -424,14 +424,16 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
|
||||
}
|
||||
heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
|
||||
GarbageCollectionReason::kHeapProfiler);
|
||||
PtrComprCageBase cage_base(heap_->isolate());
|
||||
CombinedHeapObjectIterator iterator(heap_);
|
||||
for (HeapObject obj = iterator.Next(); !obj.is_null();
|
||||
obj = iterator.Next()) {
|
||||
FindOrAddEntry(obj.address(), obj.Size());
|
||||
int object_size = obj.Size(cage_base);
|
||||
FindOrAddEntry(obj.address(), object_size);
|
||||
if (FLAG_heap_profiler_trace_objects) {
|
||||
PrintF("Update object : %p %6d. Next address is %p\n",
|
||||
reinterpret_cast<void*>(obj.address()), obj.Size(),
|
||||
reinterpret_cast<void*>(obj.address() + obj.Size()));
|
||||
reinterpret_cast<void*>(obj.address()), object_size,
|
||||
reinterpret_cast<void*>(obj.address() + object_size));
|
||||
}
|
||||
}
|
||||
RemoveDeadEntries();
|
||||
@ -660,8 +662,8 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type,
|
||||
if (FLAG_heap_profiler_show_hidden_objects && type == HeapEntry::kHidden) {
|
||||
type = HeapEntry::kNative;
|
||||
}
|
||||
|
||||
return AddEntry(object.address(), type, name, object.Size());
|
||||
PtrComprCageBase cage_base(isolate());
|
||||
return AddEntry(object.address(), type, name, object.Size(cage_base));
|
||||
}
|
||||
|
||||
HeapEntry* V8HeapExplorer::AddEntry(Address address,
|
||||
@ -732,7 +734,8 @@ class IndexedReferencesExtractor : public ObjectVisitorWithCageBases {
|
||||
generator_(generator),
|
||||
parent_obj_(parent_obj),
|
||||
parent_start_(parent_obj_.RawMaybeWeakField(0)),
|
||||
parent_end_(parent_obj_.RawMaybeWeakField(parent_obj_.Size())),
|
||||
parent_end_(
|
||||
parent_obj_.RawMaybeWeakField(parent_obj_.Size(cage_base()))),
|
||||
parent_(parent),
|
||||
next_index_(0) {}
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
@ -1615,7 +1618,7 @@ class RootsReferencesExtractor : public RootVisitor {
|
||||
OffHeapObjectSlot start,
|
||||
OffHeapObjectSlot end) override {
|
||||
DCHECK_EQ(root, Root::kStringTable);
|
||||
PtrComprCageBase cage_base = Isolate::FromHeap(explorer_->heap_);
|
||||
PtrComprCageBase cage_base(explorer_->heap_->isolate());
|
||||
for (OffHeapObjectSlot p = start; p < end; ++p) {
|
||||
explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
|
||||
p.load(cage_base));
|
||||
@ -1680,12 +1683,13 @@ bool V8HeapExplorer::IterateAndExtractReferences(
|
||||
|
||||
CombinedHeapObjectIterator iterator(heap_,
|
||||
HeapObjectIterator::kFilterUnreachable);
|
||||
PtrComprCageBase cage_base(heap_->isolate());
|
||||
// Heap iteration with filtering must be finished in any case.
|
||||
for (HeapObject obj = iterator.Next(); !obj.is_null();
|
||||
obj = iterator.Next(), progress_->ProgressStep()) {
|
||||
if (interrupted) continue;
|
||||
|
||||
size_t max_pointer = obj.Size() / kTaggedSize;
|
||||
size_t max_pointer = obj.Size(cage_base) / kTaggedSize;
|
||||
if (max_pointer > visited_fields_.size()) {
|
||||
// Clear the current bits.
|
||||
std::vector<bool>().swap(visited_fields_);
|
||||
@ -1695,11 +1699,12 @@ bool V8HeapExplorer::IterateAndExtractReferences(
|
||||
|
||||
HeapEntry* entry = GetEntry(obj);
|
||||
ExtractReferences(entry, obj);
|
||||
SetInternalReference(entry, "map", obj.map(), HeapObject::kMapOffset);
|
||||
SetInternalReference(entry, "map", obj.map(cage_base),
|
||||
HeapObject::kMapOffset);
|
||||
// Extract unvisited fields as hidden references and restore tags
|
||||
// of visited fields.
|
||||
IndexedReferencesExtractor refs_extractor(this, obj, entry);
|
||||
obj.Iterate(&refs_extractor);
|
||||
obj.Iterate(cage_base, &refs_extractor);
|
||||
|
||||
// Ensure visited_fields_ doesn't leak to the next object.
|
||||
for (size_t i = 0; i < max_pointer; ++i) {
|
||||
|
@ -75,7 +75,7 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
|
||||
DisallowGarbageCollection no_gc;
|
||||
|
||||
// Check if the area is iterable by confirming that it starts with a map.
|
||||
DCHECK(HeapObject::FromAddress(soon_object).map().IsMap());
|
||||
DCHECK(HeapObject::FromAddress(soon_object).map(isolate_).IsMap(isolate_));
|
||||
|
||||
HandleScope scope(isolate_);
|
||||
HeapObject heap_object = HeapObject::FromAddress(soon_object);
|
||||
|
Loading…
Reference in New Issue
Block a user