Revert "[static-roots] Use static map range checks instead of instance types"
This reverts commit 77d08fcde5
.
Reason for revert: compile failures on Arm64 bots https://ci.chromium.org/ui/p/v8/builders/ci/V8%20Arm64%20-%20builder/24010/overview
Original change's description:
> [static-roots] Use static map range checks instead of instance types
>
> Some instance types, or type ranges, corresponds to a range of pointers
> in the static read only roots table. Instead of loading the instance
> type of a map it can therefore be beneficial to compare the map itself
> against this range.
>
> This CL adds:
>
> * Add infrastructure to compute and output a mapping of
> `(instance_type_first, instance_type_last) ->
> (map_ptr_first, map_ptr_last)` for interesting ranges.
> * Extend InstanceTypeChecker to use these ranges.
>
> For single instance types that map onto a range of maps it is not
> obvious which check is faster. Checking the map range saves a load,
> whereas checking the instance type saves an additional jump.
>
> Bug: v8:13466
> Change-Id: I670fc10fad9920645c0ce0d976ae7e7a13a86e60
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4188379
> Reviewed-by: Jakob Linke <jgruber@chromium.org>
> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
> Commit-Queue: Olivier Flückiger <olivf@chromium.org>
> Auto-Submit: Olivier Flückiger <olivf@chromium.org>
> Cr-Commit-Position: refs/heads/main@{#85599}
Bug: v8:13466
Change-Id: I88afb05948d934d15f8512bcd37d56429aac23a6
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4214812
Auto-Submit: Deepti Gandluri <gdeepti@chromium.org>
Owners-Override: Deepti Gandluri <gdeepti@chromium.org>
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Commit-Queue: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Cr-Commit-Position: refs/heads/main@{#85600}
This commit is contained in:
parent
77d08fcde5
commit
11bc0f61b7
@ -166,6 +166,7 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol };
|
||||
V(FixedCOWArrayMap, fixed_cow_array_map, FixedCOWArrayMap) \
|
||||
V(Function_string, function_string, FunctionString) \
|
||||
V(function_to_string, function_to_string, FunctionToString) \
|
||||
V(GlobalPropertyCellMap, global_property_cell_map, PropertyCellMap) \
|
||||
V(has_instance_symbol, has_instance_symbol, HasInstanceSymbol) \
|
||||
V(Infinity_string, Infinity_string, InfinityString) \
|
||||
V(is_concat_spreadable_symbol, is_concat_spreadable_symbol, \
|
||||
|
@ -79,15 +79,12 @@ Address V8HeapCompressionScheme::DecompressTaggedPointer(
|
||||
// For V8_ASSUME_ALIGNED to be considered for optimizations the following
|
||||
// addition has to happen on a pointer type.
|
||||
Address result = reinterpret_cast<Address>(cage_base + raw_value);
|
||||
V8_ASSUME(static_cast<uint32_t>(result) == raw_value);
|
||||
return result;
|
||||
#else
|
||||
Address cage_base = GetPtrComprCageBaseAddress(on_heap_addr);
|
||||
Address result = cage_base + static_cast<Address>(raw_value);
|
||||
return cage_base + static_cast<Address>(raw_value);
|
||||
#endif
|
||||
// Allows to remove compress(decompress(...))
|
||||
V8_ASSUME(static_cast<uint32_t>(result) == raw_value);
|
||||
// Allows to remove SMI checks when the result is compared against a constant.
|
||||
V8_ASSUME(HAS_SMI_TAG(result) == HAS_SMI_TAG(raw_value));
|
||||
return result;
|
||||
}
|
||||
|
||||
// static
|
||||
@ -175,15 +172,12 @@ Address ExternalCodeCompressionScheme::DecompressTaggedPointer(
|
||||
// For V8_ASSUME_ALIGNED to be considered for optimizations the following
|
||||
// addition has to happen on a pointer type.
|
||||
Address result = reinterpret_cast<Address>(cage_base + raw_value);
|
||||
V8_ASSUME(static_cast<uint32_t>(result) == raw_value);
|
||||
return result;
|
||||
#else
|
||||
Address cage_base = GetPtrComprCageBaseAddress(on_heap_addr);
|
||||
Address result = cage_base + static_cast<Address>(raw_value);
|
||||
return cage_base + static_cast<Address>(raw_value);
|
||||
#endif
|
||||
// Allows to remove compress(decompress(...))
|
||||
V8_ASSUME(static_cast<uint32_t>(result) == raw_value);
|
||||
// Allows to remove SMI checks when the result is compared against a constant.
|
||||
V8_ASSUME(HAS_SMI_TAG(result) == HAS_SMI_TAG(raw_value));
|
||||
return result;
|
||||
}
|
||||
|
||||
// static
|
||||
|
@ -2028,7 +2028,6 @@ Handle<Map> Factory::NewMap(InstanceType type, int instance_size,
|
||||
ElementsKind elements_kind, int inobject_properties,
|
||||
AllocationType allocation_type) {
|
||||
static_assert(LAST_JS_OBJECT_TYPE == LAST_TYPE);
|
||||
DCHECK(!InstanceTypeChecker::UniqueMapOfInstanceType(type).has_value());
|
||||
DCHECK_IMPLIES(InstanceTypeChecker::IsJSObject(type) &&
|
||||
!Map::CanHaveFastTransitionableElementsKind(type),
|
||||
IsDictionaryElementsKind(elements_kind) ||
|
||||
|
@ -440,6 +440,8 @@ bool Heap::CreateInitialReadOnlyMaps() {
|
||||
ALLOCATE_VARSIZE_MAP(FEEDBACK_VECTOR_TYPE, feedback_vector)
|
||||
ALLOCATE_PRIMITIVE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number,
|
||||
Context::NUMBER_FUNCTION_INDEX)
|
||||
ALLOCATE_PRIMITIVE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol,
|
||||
Context::SYMBOL_FUNCTION_INDEX)
|
||||
ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign)
|
||||
ALLOCATE_MAP(MEGA_DOM_HANDLER_TYPE, MegaDomHandler::kSize, mega_dom_handler)
|
||||
|
||||
@ -455,9 +457,6 @@ bool Heap::CreateInitialReadOnlyMaps() {
|
||||
ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, basic_block_counters_marker);
|
||||
ALLOCATE_VARSIZE_MAP(BIGINT_TYPE, bigint);
|
||||
|
||||
ALLOCATE_PRIMITIVE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol,
|
||||
Context::SYMBOL_FUNCTION_INDEX)
|
||||
|
||||
for (unsigned i = 0; i < arraysize(string_type_table); i++) {
|
||||
const StringTypeTable& entry = string_type_table[i];
|
||||
Map map;
|
||||
|
@ -9,7 +9,6 @@
|
||||
#include "src/execution/isolate-utils-inl.h"
|
||||
#include "src/objects/instance-type.h"
|
||||
#include "src/objects/map-inl.h"
|
||||
#include "src/roots/static-roots.h"
|
||||
|
||||
// Has to be the last include (doesn't have include guards):
|
||||
#include "src/objects/object-macros.h"
|
||||
@ -37,96 +36,47 @@ HEAP_OBJECT_TYPE_LIST(DECL_TYPE)
|
||||
} // namespace InstanceTypeTraits
|
||||
|
||||
template <class type>
|
||||
inline constexpr base::Optional<RootIndex> UniqueMapOfInstanceType() {
|
||||
return {};
|
||||
inline constexpr Tagged_t StaticSingleMapOfInstanceType() {
|
||||
return kNullAddress;
|
||||
}
|
||||
|
||||
#if V8_STATIC_ROOTS_BOOL
|
||||
|
||||
inline bool CheckInstanceMap(Tagged_t expected, Map map) {
|
||||
return V8HeapCompressionScheme::CompressTagged(map.ptr()) == expected;
|
||||
}
|
||||
|
||||
#define INSTANCE_TYPE_MAP(V, rootIndexName, rootAccessorName, class_name) \
|
||||
template <> \
|
||||
inline constexpr base::Optional<RootIndex> \
|
||||
UniqueMapOfInstanceType<InstanceTypeTraits::class_name>() { \
|
||||
return {RootIndex::k##rootIndexName}; \
|
||||
inline constexpr Tagged_t \
|
||||
StaticSingleMapOfInstanceType<InstanceTypeTraits::class_name>() { \
|
||||
return StaticReadOnlyRoot::k##rootIndexName; \
|
||||
}
|
||||
UNIQUE_INSTANCE_TYPE_MAP_LIST_GENERATOR(INSTANCE_TYPE_MAP, _)
|
||||
#undef INSTANCE_TYPE_MAP
|
||||
|
||||
inline constexpr base::Optional<RootIndex> UniqueMapOfInstanceType(
|
||||
InstanceType type) {
|
||||
switch (type) {
|
||||
#define INSTANCE_TYPE_CHECK(it, forinstancetype) \
|
||||
case forinstancetype: \
|
||||
return InstanceTypeChecker::UniqueMapOfInstanceType< \
|
||||
InstanceTypeChecker::InstanceTypeTraits::it>(); \
|
||||
INSTANCE_TYPE_CHECKERS_SINGLE(INSTANCE_TYPE_CHECK);
|
||||
#undef INSTANCE_TYPE_CHECK
|
||||
default: {
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
#else
|
||||
|
||||
#if V8_STATIC_ROOTS_BOOL
|
||||
|
||||
inline bool CheckInstanceMap(RootIndex expected, Map map) {
|
||||
return V8HeapCompressionScheme::CompressTagged(map.ptr()) ==
|
||||
StaticReadOnlyRootsPointerTable[static_cast<size_t>(expected)];
|
||||
}
|
||||
|
||||
inline bool CheckInstanceMapRange(std::pair<RootIndex, RootIndex> expected,
|
||||
Map map) {
|
||||
Tagged_t ptr = V8HeapCompressionScheme::CompressTagged(map.ptr());
|
||||
Tagged_t first =
|
||||
StaticReadOnlyRootsPointerTable[static_cast<size_t>(expected.first)];
|
||||
Tagged_t last =
|
||||
StaticReadOnlyRootsPointerTable[static_cast<size_t>(expected.second)];
|
||||
return ptr >= first && ptr <= last;
|
||||
}
|
||||
inline bool CheckInstanceMap(Tagged_t expected, Map map) { UNREACHABLE(); }
|
||||
|
||||
#endif // V8_STATIC_ROOTS_BOOL
|
||||
|
||||
// Define type checkers for classes with single instance type.
|
||||
// INSTANCE_TYPE_CHECKER1 is to be used if the instance type is already loaded.
|
||||
// INSTANCE_TYPE_CHECKER2 is preferred since it can sometimes avoid loading the
|
||||
// instance type from the map, if the checked instance type corresponds to a
|
||||
// known map or range of maps.
|
||||
|
||||
#define INSTANCE_TYPE_CHECKER1(type, forinstancetype) \
|
||||
V8_INLINE constexpr bool Is##type(InstanceType instance_type) { \
|
||||
return instance_type == forinstancetype; \
|
||||
#define INSTANCE_TYPE_CHECKER(type, forinstancetype) \
|
||||
V8_INLINE constexpr bool Is##type(InstanceType instance_type) { \
|
||||
return instance_type == forinstancetype; \
|
||||
} \
|
||||
V8_INLINE bool Is##type(Map map_object) { \
|
||||
if (Tagged_t expected = \
|
||||
StaticSingleMapOfInstanceType<InstanceTypeTraits::type>()) { \
|
||||
bool res = CheckInstanceMap(expected, map_object); \
|
||||
SLOW_DCHECK(Is##type(map_object.instance_type()) == res); \
|
||||
return res; \
|
||||
} \
|
||||
return Is##type(map_object.instance_type()); \
|
||||
}
|
||||
|
||||
#if V8_STATIC_ROOTS_BOOL
|
||||
|
||||
#define INSTANCE_TYPE_CHECKER2(type, forinstancetype) \
|
||||
V8_INLINE bool Is##type(Map map_object) { \
|
||||
if (base::Optional<RootIndex> expected = \
|
||||
UniqueMapOfInstanceType<InstanceTypeTraits::type>()) { \
|
||||
bool res = CheckInstanceMap(*expected, map_object); \
|
||||
SLOW_DCHECK(Is##type(map_object.instance_type()) == res); \
|
||||
return res; \
|
||||
} \
|
||||
if (base::Optional<std::pair<RootIndex, RootIndex>> range = \
|
||||
StaticReadOnlyRootMapRange(forinstancetype)) { \
|
||||
bool res = CheckInstanceMapRange(*range, map_object); \
|
||||
SLOW_DCHECK(Is##type(map_object.instance_type()) == res); \
|
||||
return res; \
|
||||
} \
|
||||
return Is##type(map_object.instance_type()); \
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
#define INSTANCE_TYPE_CHECKER2(type, forinstancetype) \
|
||||
V8_INLINE bool Is##type(Map map_object) { \
|
||||
return Is##type(map_object.instance_type()); \
|
||||
}
|
||||
|
||||
#endif // V8_STATIC_ROOTS_BOOL
|
||||
|
||||
INSTANCE_TYPE_CHECKERS_SINGLE(INSTANCE_TYPE_CHECKER1)
|
||||
INSTANCE_TYPE_CHECKERS_SINGLE(INSTANCE_TYPE_CHECKER2)
|
||||
#undef INSTANCE_TYPE_CHECKER1
|
||||
#undef INSTANCE_TYPE_CHECKER2
|
||||
INSTANCE_TYPE_CHECKERS_SINGLE(INSTANCE_TYPE_CHECKER)
|
||||
#undef INSTANCE_TYPE_CHECKER
|
||||
|
||||
// Checks if value is in range [lower_limit, higher_limit] using a single
|
||||
// branch. Assumes that the input instance type is valid.
|
||||
@ -152,45 +102,17 @@ struct InstanceRangeChecker<lower_limit, LAST_TYPE> {
|
||||
};
|
||||
|
||||
// Define type checkers for classes with ranges of instance types.
|
||||
// INSTANCE_TYPE_CHECKER_RANGE1 is to be used if the instance type is already
|
||||
// loaded. INSTANCE_TYPE_CHECKER_RANGE2 is preferred since it can sometimes
|
||||
// avoid loading the instance type from the map, if the checked instance type
|
||||
// range corresponds to a known range of maps.
|
||||
|
||||
#define INSTANCE_TYPE_CHECKER_RANGE1(type, first_instance_type, \
|
||||
last_instance_type) \
|
||||
#define INSTANCE_TYPE_CHECKER_RANGE(type, first_instance_type, \
|
||||
last_instance_type) \
|
||||
V8_INLINE constexpr bool Is##type(InstanceType instance_type) { \
|
||||
return InstanceRangeChecker<first_instance_type, \
|
||||
last_instance_type>::Check(instance_type); \
|
||||
} \
|
||||
V8_INLINE bool Is##type(Map map_object) { \
|
||||
return Is##type(map_object.instance_type()); \
|
||||
}
|
||||
|
||||
#if V8_STATIC_ROOTS_BOOL
|
||||
|
||||
#define INSTANCE_TYPE_CHECKER_RANGE2(type, first_instance_type, \
|
||||
last_instance_type) \
|
||||
V8_INLINE bool Is##type(Map map_object) { \
|
||||
if (base::Optional<std::pair<RootIndex, RootIndex>> range = \
|
||||
StaticReadOnlyRootMapRange(first_instance_type, \
|
||||
last_instance_type)) { \
|
||||
return CheckInstanceMapRange(*range, map_object); \
|
||||
} \
|
||||
return Is##type(map_object.instance_type()); \
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
#define INSTANCE_TYPE_CHECKER_RANGE2(type, first_instance_type, \
|
||||
last_instance_type) \
|
||||
V8_INLINE bool Is##type(Map map_object) { \
|
||||
return Is##type(map_object.instance_type()); \
|
||||
}
|
||||
|
||||
#endif // V8_STATIC_ROOTS_BOOL
|
||||
|
||||
INSTANCE_TYPE_CHECKERS_RANGE(INSTANCE_TYPE_CHECKER_RANGE1)
|
||||
INSTANCE_TYPE_CHECKERS_RANGE(INSTANCE_TYPE_CHECKER_RANGE2)
|
||||
#undef INSTANCE_TYPE_CHECKER_RANGE1
|
||||
#undef INSTANCE_TYPE_CHECKER_RANGE2
|
||||
INSTANCE_TYPE_CHECKERS_RANGE(INSTANCE_TYPE_CHECKER_RANGE)
|
||||
#undef INSTANCE_TYPE_CHECKER_RANGE
|
||||
|
||||
V8_INLINE constexpr bool IsHeapObject(InstanceType instance_type) {
|
||||
return true;
|
||||
@ -203,12 +125,7 @@ V8_INLINE constexpr bool IsInternalizedString(InstanceType instance_type) {
|
||||
}
|
||||
|
||||
V8_INLINE bool IsInternalizedString(Map map_object) {
|
||||
#if V8_STATIC_ROOTS_BOOL
|
||||
return CheckInstanceMapRange(
|
||||
*StaticReadOnlyRootMapRange(INTERNALIZED_STRING_TYPE), map_object);
|
||||
#else
|
||||
return IsInternalizedString(map_object.instance_type());
|
||||
#endif
|
||||
}
|
||||
|
||||
V8_INLINE constexpr bool IsExternalString(InstanceType instance_type) {
|
||||
|
@ -339,16 +339,10 @@ INSTANCE_TYPE_CHECKERS(IS_TYPE_FUNCTION_DECL)
|
||||
|
||||
// This list must contain only maps that are shared by all objects of their
|
||||
// instance type.
|
||||
#define UNIQUE_INSTANCE_TYPE_MAP_LIST_GENERATOR(V, _) \
|
||||
UNIQUE_LEAF_INSTANCE_TYPE_MAP_LIST_GENERATOR(V, _) \
|
||||
V(_, ByteArrayMap, byte_array_map, ByteArray) \
|
||||
V(_, NameDictionaryMap, name_dictionary_map, NameDictionary) \
|
||||
V(_, OrderedNameDictionaryMap, ordered_name_dictionary_map, \
|
||||
OrderedNameDictionary) \
|
||||
V(_, GlobalDictionaryMap, global_dictionary_map, GlobalDictionary) \
|
||||
V(_, GlobalPropertyCellMap, global_property_cell_map, PropertyCell) \
|
||||
V(_, HeapNumberMap, heap_number_map, HeapNumber) \
|
||||
V(_, WeakFixedArrayMap, weak_fixed_array_map, WeakFixedArray) \
|
||||
#define UNIQUE_INSTANCE_TYPE_MAP_LIST_GENERATOR(V, _) \
|
||||
UNIQUE_LEAF_INSTANCE_TYPE_MAP_LIST_GENERATOR(V, _) \
|
||||
V(_, HeapNumberMap, heap_number_map, HeapNumber) \
|
||||
V(_, WeakFixedArrayMap, weak_fixed_array_map, WeakFixedArray) \
|
||||
TORQUE_DEFINED_MAP_CSA_LIST_GENERATOR(V, _)
|
||||
|
||||
} // namespace internal
|
||||
|
@ -12,9 +12,6 @@
|
||||
|
||||
#if V8_STATIC_ROOTS_BOOL
|
||||
|
||||
#include "src/objects/instance-type.h"
|
||||
#include "src/roots/roots.h"
|
||||
|
||||
// Disabling Wasm or Intl invalidates the contents of static-roots.h.
|
||||
// TODO(olivf): To support static roots for multiple build configurations we
|
||||
// will need to generate target specific versions of this file.
|
||||
@ -95,19 +92,19 @@ struct StaticReadOnlyRoot {
|
||||
static constexpr Tagged_t kClosureFeedbackCellArrayMap = 0x2b05;
|
||||
static constexpr Tagged_t kFeedbackVectorMap = 0x2b2d;
|
||||
static constexpr Tagged_t kHeapNumberMap = 0x2b55;
|
||||
static constexpr Tagged_t kForeignMap = 0x2b7d;
|
||||
static constexpr Tagged_t kMegaDomHandlerMap = 0x2ba5;
|
||||
static constexpr Tagged_t kBooleanMap = 0x2bcd;
|
||||
static constexpr Tagged_t kUninitializedMap = 0x2bf5;
|
||||
static constexpr Tagged_t kArgumentsMarkerMap = 0x2c1d;
|
||||
static constexpr Tagged_t kExceptionMap = 0x2c45;
|
||||
static constexpr Tagged_t kTerminationExceptionMap = 0x2c6d;
|
||||
static constexpr Tagged_t kOptimizedOutMap = 0x2c95;
|
||||
static constexpr Tagged_t kStaleRegisterMap = 0x2cbd;
|
||||
static constexpr Tagged_t kSelfReferenceMarkerMap = 0x2ce5;
|
||||
static constexpr Tagged_t kBasicBlockCountersMarkerMap = 0x2d0d;
|
||||
static constexpr Tagged_t kBigIntMap = 0x2d35;
|
||||
static constexpr Tagged_t kSymbolMap = 0x2d5d;
|
||||
static constexpr Tagged_t kSymbolMap = 0x2b7d;
|
||||
static constexpr Tagged_t kForeignMap = 0x2ba5;
|
||||
static constexpr Tagged_t kMegaDomHandlerMap = 0x2bcd;
|
||||
static constexpr Tagged_t kBooleanMap = 0x2bf5;
|
||||
static constexpr Tagged_t kUninitializedMap = 0x2c1d;
|
||||
static constexpr Tagged_t kArgumentsMarkerMap = 0x2c45;
|
||||
static constexpr Tagged_t kExceptionMap = 0x2c6d;
|
||||
static constexpr Tagged_t kTerminationExceptionMap = 0x2c95;
|
||||
static constexpr Tagged_t kOptimizedOutMap = 0x2cbd;
|
||||
static constexpr Tagged_t kStaleRegisterMap = 0x2ce5;
|
||||
static constexpr Tagged_t kSelfReferenceMarkerMap = 0x2d0d;
|
||||
static constexpr Tagged_t kBasicBlockCountersMarkerMap = 0x2d35;
|
||||
static constexpr Tagged_t kBigIntMap = 0x2d5d;
|
||||
static constexpr Tagged_t kStringMap = 0x2d85;
|
||||
static constexpr Tagged_t kOneByteStringMap = 0x2dad;
|
||||
static constexpr Tagged_t kConsStringMap = 0x2dd5;
|
||||
@ -1507,34 +1504,6 @@ static constexpr std::array<Tagged_t, 737> StaticReadOnlyRootsPointerTable = {
|
||||
StaticReadOnlyRoot::kStoreHandler3Map,
|
||||
};
|
||||
|
||||
inline constexpr base::Optional<std::pair<RootIndex, RootIndex>>
|
||||
StaticReadOnlyRootMapRange(InstanceType type) {
|
||||
switch (type) {
|
||||
case INTERNALIZED_STRING_TYPE:
|
||||
return {{RootIndex::kInternalizedStringMap,
|
||||
RootIndex::kUncachedExternalOneByteInternalizedStringMap}};
|
||||
case ALLOCATION_SITE_TYPE:
|
||||
return {{RootIndex::kAllocationSiteWithWeakNextMap,
|
||||
RootIndex::kAllocationSiteWithoutWeakNextMap}};
|
||||
default: {
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
inline constexpr base::Optional<std::pair<RootIndex, RootIndex>>
|
||||
StaticReadOnlyRootMapRange(InstanceType first, InstanceType last) {
|
||||
if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
|
||||
return {{RootIndex::kStringMap, RootIndex::kSharedThinOneByteStringMap}};
|
||||
}
|
||||
if (first == FIRST_NAME_TYPE && last == LAST_NAME_TYPE) {
|
||||
return {{RootIndex::kSymbolMap, RootIndex::kSharedThinOneByteStringMap}};
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
static constexpr size_t kStaticReadOnlyRootRangesHash = 4014968950881612012UL;
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
#endif // V8_STATIC_ROOTS_BOOL
|
||||
|
@ -15,7 +15,6 @@
|
||||
#include "src/base/platform/wrappers.h"
|
||||
#include "src/base/vector.h"
|
||||
#include "src/codegen/cpu-features.h"
|
||||
#include "src/common/globals.h"
|
||||
#include "src/flags/flags.h"
|
||||
#include "src/snapshot/embedded/embedded-file-writer.h"
|
||||
#include "src/snapshot/snapshot.h"
|
||||
@ -294,8 +293,6 @@ int main(int argc, char** argv) {
|
||||
|
||||
if (i::v8_flags.static_roots_src) {
|
||||
i::StaticRootsTableGen::write(i_isolate, i::v8_flags.static_roots_src);
|
||||
} else if (V8_STATIC_ROOTS_BOOL) {
|
||||
i::StaticRootsTableGen::VerifyRanges(i_isolate);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6,199 +6,14 @@
|
||||
|
||||
#include <fstream>
|
||||
|
||||
#include "src/common/globals.h"
|
||||
#include "src/common/ptr-compr-inl.h"
|
||||
#include "src/execution/isolate.h"
|
||||
#include "src/objects/instance-type-inl.h"
|
||||
#include "src/objects/instance-type.h"
|
||||
#include "src/objects/objects-definitions.h"
|
||||
#include "src/objects/visitors.h"
|
||||
#include "src/roots/roots-inl.h"
|
||||
#include "src/roots/roots.h"
|
||||
#include "src/roots/static-roots.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class StaticRootsTableGenImpl {
|
||||
public:
|
||||
explicit StaticRootsTableGenImpl(Isolate* isolate) {
|
||||
// Define some object type ranges of interest
|
||||
//
|
||||
// These are manually curated lists of objects that are explicitly placed
|
||||
// next to each other on the read only heap and also correspond to important
|
||||
// instance type ranges.
|
||||
|
||||
std::list<RootIndex> string, internalized_string;
|
||||
#define ELEMENT(type, size, name, CamelName) \
|
||||
string.push_back(RootIndex::k##CamelName##Map); \
|
||||
if (InstanceTypeChecker::IsInternalizedString(type)) { \
|
||||
internalized_string.push_back(RootIndex::k##CamelName##Map); \
|
||||
}
|
||||
STRING_TYPE_LIST(ELEMENT)
|
||||
#undef ELEMENT
|
||||
|
||||
root_ranges_.emplace_back("FIRST_STRING_TYPE", "LAST_STRING_TYPE", string);
|
||||
root_ranges_.emplace_back("INTERNALIZED_STRING_TYPE", internalized_string);
|
||||
|
||||
CHECK_EQ(LAST_NAME_TYPE, SYMBOL_TYPE);
|
||||
CHECK_EQ(LAST_STRING_TYPE + 1, SYMBOL_TYPE);
|
||||
string.push_back(RootIndex::kSymbolMap);
|
||||
root_ranges_.emplace_back("FIRST_NAME_TYPE", "LAST_NAME_TYPE", string);
|
||||
|
||||
std::list<RootIndex> allocation_site;
|
||||
#define ELEMENT(_1, _2, CamelName) \
|
||||
allocation_site.push_back(RootIndex::k##CamelName);
|
||||
ALLOCATION_SITE_MAPS_LIST(ELEMENT);
|
||||
#undef ELEMENT
|
||||
root_ranges_.emplace_back("ALLOCATION_SITE_TYPE", allocation_site);
|
||||
|
||||
// Collect all roots
|
||||
ReadOnlyRoots ro_roots(isolate);
|
||||
{
|
||||
RootIndex pos = RootIndex::kFirstReadOnlyRoot;
|
||||
#define ADD_ROOT(_, value, CamelName) \
|
||||
{ \
|
||||
Tagged_t ptr = V8HeapCompressionScheme::CompressTagged( \
|
||||
ro_roots.unchecked_##value().ptr()); \
|
||||
sorted_roots_[ptr].push_back(pos); \
|
||||
camel_names_[RootIndex::k##CamelName] = #CamelName; \
|
||||
++pos; \
|
||||
}
|
||||
READ_ONLY_ROOT_LIST(ADD_ROOT)
|
||||
#undef ADD_ROOT
|
||||
}
|
||||
|
||||
// Compute start and end of ranges
|
||||
for (auto& entry : sorted_roots_) {
|
||||
Tagged_t ptr = entry.first;
|
||||
std::list<RootIndex>& roots = entry.second;
|
||||
|
||||
for (RootIndex pos : roots) {
|
||||
std::string& name = camel_names_.at(pos);
|
||||
for (ObjectRange& range : root_ranges_) {
|
||||
range.VisitNextRoot(name, pos, ptr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Used to compute ranges of objects next to each other on the r/o heap. A
|
||||
// range contains a set of RootIndex's and computes the one with the lowest
|
||||
// and highest address, aborting if they are not continuous (i.e. there is
|
||||
// some other object in between).
|
||||
class ObjectRange {
|
||||
public:
|
||||
ObjectRange(const std::string& instance_type,
|
||||
const std::list<RootIndex> objects)
|
||||
: ObjectRange(instance_type, instance_type, objects) {}
|
||||
ObjectRange(const std::string& first, const std::string& last,
|
||||
const std::list<RootIndex> objects)
|
||||
: first_instance_type_(first),
|
||||
last_instance_type_(last),
|
||||
objects_(objects) {}
|
||||
~ObjectRange() {
|
||||
CHECK(!open_);
|
||||
CHECK(first_ != RootIndex::kRootListLength);
|
||||
CHECK(last_ != RootIndex::kRootListLength);
|
||||
}
|
||||
|
||||
ObjectRange(ObjectRange& range) = delete;
|
||||
ObjectRange& operator=(ObjectRange& range) = delete;
|
||||
ObjectRange(ObjectRange&& range) V8_NOEXCEPT = default;
|
||||
ObjectRange& operator=(ObjectRange&& range) V8_NOEXCEPT = default;
|
||||
|
||||
// Needs to be called in order of addresses.
|
||||
void VisitNextRoot(const std::string& root_name, RootIndex idx,
|
||||
Tagged_t ptr) {
|
||||
auto test = [&](RootIndex obj) {
|
||||
return std::find(objects_.begin(), objects_.end(), obj) !=
|
||||
objects_.end();
|
||||
};
|
||||
if (open_) {
|
||||
if (test(idx)) {
|
||||
last_ = idx;
|
||||
} else {
|
||||
open_ = false;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (first_ == RootIndex::kRootListLength) {
|
||||
if (test(idx)) {
|
||||
first_ = idx;
|
||||
open_ = true;
|
||||
}
|
||||
} else {
|
||||
// If this check fails then the read only space was rearranged and what
|
||||
// used to be a set of objects with continuous addresses is not anymore.
|
||||
CHECK_WITH_MSG(!test(idx),
|
||||
(first_instance_type_ + "-" + last_instance_type_ +
|
||||
" does not specify a continuous range of "
|
||||
"objects. There is a gap before " +
|
||||
root_name)
|
||||
.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
const std::string& first_instance_type() const {
|
||||
return first_instance_type_;
|
||||
}
|
||||
const std::string& last_instance_type() const {
|
||||
return last_instance_type_;
|
||||
}
|
||||
RootIndex first() const { return first_; }
|
||||
RootIndex last() const { return last_; }
|
||||
bool singleton() const {
|
||||
return first_instance_type_ == last_instance_type_;
|
||||
}
|
||||
|
||||
private:
|
||||
RootIndex first_ = RootIndex::kRootListLength;
|
||||
RootIndex last_ = RootIndex::kRootListLength;
|
||||
std::string first_instance_type_;
|
||||
std::string last_instance_type_;
|
||||
|
||||
std::list<RootIndex> objects_;
|
||||
bool open_ = false;
|
||||
|
||||
friend class StaticRootsTableGenImpl;
|
||||
};
|
||||
|
||||
size_t RangesHash() const {
|
||||
size_t hash = 0;
|
||||
for (auto& range : root_ranges_) {
|
||||
hash = base::hash_combine(hash, range.first_,
|
||||
base::hash_combine(hash, range.last_));
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
const std::map<Tagged_t, std::list<RootIndex>>& sorted_roots() {
|
||||
return sorted_roots_;
|
||||
}
|
||||
|
||||
const std::list<ObjectRange>& root_ranges() { return root_ranges_; }
|
||||
|
||||
const std::string& camel_name(RootIndex idx) { return camel_names_.at(idx); }
|
||||
|
||||
private:
|
||||
std::map<Tagged_t, std::list<RootIndex>> sorted_roots_;
|
||||
std::list<ObjectRange> root_ranges_;
|
||||
std::unordered_map<RootIndex, std::string> camel_names_;
|
||||
};
|
||||
|
||||
// Check if the computed ranges are still valid, ie. all their members lie
|
||||
// between known boundaries.
|
||||
void StaticRootsTableGen::VerifyRanges(Isolate* isolate) {
|
||||
#if V8_STATIC_ROOTS_BOOL
|
||||
StaticRootsTableGenImpl gen(isolate);
|
||||
CHECK_WITH_MSG(kStaticReadOnlyRootRangesHash == gen.RangesHash(),
|
||||
"StaticReadOnlyRanges changed. Run "
|
||||
"tools/dev/gen-static-roots.py` to update static-roots.h.");
|
||||
#endif // V8_STATIC_ROOTS_BOOL
|
||||
}
|
||||
|
||||
void StaticRootsTableGen::write(Isolate* isolate, const char* file) {
|
||||
CHECK_WITH_MSG(!V8_STATIC_ROOTS_BOOL,
|
||||
"Re-generating the table of roots is only supported in builds "
|
||||
@ -207,6 +22,7 @@ void StaticRootsTableGen::write(Isolate* isolate, const char* file) {
|
||||
static_assert(static_cast<int>(RootIndex::kFirstReadOnlyRoot) == 0);
|
||||
|
||||
std::ofstream out(file);
|
||||
const auto ro_roots = ReadOnlyRoots(isolate);
|
||||
|
||||
out << "// Copyright 2022 the V8 project authors. All rights reserved.\n"
|
||||
<< "// Use of this source code is governed by a BSD-style license "
|
||||
@ -223,15 +39,13 @@ void StaticRootsTableGen::write(Isolate* isolate, const char* file) {
|
||||
<< "\n"
|
||||
<< "#if V8_STATIC_ROOTS_BOOL\n"
|
||||
<< "\n"
|
||||
<< "#include \"src/objects/instance-type.h\"\n"
|
||||
<< "#include \"src/roots/roots.h\"\n"
|
||||
<< "\n"
|
||||
<< "// Disabling Wasm or Intl invalidates the contents of "
|
||||
"static-roots.h.\n"
|
||||
<< "// TODO(olivf): To support static roots for multiple build "
|
||||
"configurations we\n"
|
||||
<< "// will need to generate target specific versions of "
|
||||
"this file.\n"
|
||||
"this "
|
||||
"file.\n"
|
||||
<< "static_assert(V8_ENABLE_WEBASSEMBLY);\n"
|
||||
<< "static_assert(V8_INTL_SUPPORT);\n"
|
||||
<< "\n"
|
||||
@ -243,23 +57,29 @@ void StaticRootsTableGen::write(Isolate* isolate, const char* file) {
|
||||
// Output a symbol for every root. Ordered by ptr to make it easier to see the
|
||||
// memory layout of the read only page.
|
||||
const auto size = static_cast<int>(RootIndex::kReadOnlyRootsCount);
|
||||
StaticRootsTableGenImpl gen(isolate);
|
||||
{
|
||||
std::map<Tagged_t, std::list<std::string>> sorted_roots;
|
||||
#define ADD_ROOT(_, value, CamelName) \
|
||||
{ \
|
||||
Tagged_t ptr = V8HeapCompressionScheme::CompressTagged( \
|
||||
ro_roots.unchecked_##value().ptr()); \
|
||||
sorted_roots[ptr].push_back(#CamelName); \
|
||||
}
|
||||
READ_ONLY_ROOT_LIST(ADD_ROOT)
|
||||
#undef ADD_ROOT
|
||||
|
||||
for (auto& entry : gen.sorted_roots()) {
|
||||
Tagged_t ptr = entry.first;
|
||||
const std::list<RootIndex>& roots = entry.second;
|
||||
for (auto& entry : sorted_roots) {
|
||||
Tagged_t ptr = entry.first;
|
||||
std::list<std::string>& names = entry.second;
|
||||
|
||||
for (RootIndex root : roots) {
|
||||
static const char* kPreString = " static constexpr Tagged_t k";
|
||||
const std::string& name = gen.camel_name(root);
|
||||
size_t ptr_len = ceil(log2(ptr) / 4.0);
|
||||
// Full line is: "kPreString|name = 0x.....;"
|
||||
size_t len = strlen(kPreString) + name.length() + 5 + ptr_len + 1;
|
||||
out << kPreString << name << " =";
|
||||
if (len > 80) out << "\n ";
|
||||
out << " " << reinterpret_cast<void*>(ptr) << ";\n";
|
||||
for (std::string& name : names) {
|
||||
out << " static constexpr Tagged_t k" << name << " =";
|
||||
if (name.length() + 39 > 80) out << "\n ";
|
||||
out << " " << reinterpret_cast<void*>(ptr) << ";\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out << "};\n";
|
||||
|
||||
// Output in order of roots table
|
||||
@ -273,49 +93,6 @@ void StaticRootsTableGen::write(Isolate* isolate, const char* file) {
|
||||
#undef ENTRY
|
||||
out << "};\n";
|
||||
}
|
||||
out << "\n";
|
||||
|
||||
// Output interesting ranges of consecutive roots
|
||||
out << "inline constexpr base::Optional<std::pair<RootIndex, RootIndex>>\n"
|
||||
"StaticReadOnlyRootMapRange(InstanceType type) {\n"
|
||||
" switch (type) {\n";
|
||||
static const char* kPreString = " return {{RootIndex::k";
|
||||
static const char* kMidString = " RootIndex::k";
|
||||
for (const auto& rng : gen.root_ranges()) {
|
||||
if (!rng.singleton()) continue;
|
||||
out << " case " << rng.first_instance_type() << ":\n";
|
||||
const std::string& first_name = gen.camel_name(rng.first());
|
||||
const std::string& last_name = gen.camel_name(rng.last());
|
||||
// Full line is: " kPreString|first_name,kMidString|last_name}};"
|
||||
size_t len = 2 + strlen(kPreString) + first_name.length() + 1 +
|
||||
strlen(kMidString) + last_name.length() + 3;
|
||||
out << " " << kPreString << first_name << ",";
|
||||
if (len > 80) out << "\n ";
|
||||
out << kMidString << last_name << "}};\n";
|
||||
}
|
||||
out << " default: {\n }\n"
|
||||
" }\n"
|
||||
" return {};\n}\n\n";
|
||||
out << "inline constexpr base::Optional<std::pair<RootIndex, RootIndex>>\n"
|
||||
"StaticReadOnlyRootMapRange(InstanceType first, InstanceType last) "
|
||||
"{\n";
|
||||
for (const auto& rng : gen.root_ranges()) {
|
||||
if (rng.singleton()) continue;
|
||||
out << " if (first == " << rng.first_instance_type()
|
||||
<< " && last == " << rng.last_instance_type() << ") {\n";
|
||||
const std::string& first_name = gen.camel_name(rng.first());
|
||||
const std::string& last_name = gen.camel_name(rng.last());
|
||||
// Full line is: "kPreString|first_name,kMidString|last_name}};"
|
||||
size_t len = strlen(kPreString) + first_name.length() + 1 +
|
||||
strlen(kMidString) + last_name.length() + 3;
|
||||
out << " return {{RootIndex::k" << first_name << ",";
|
||||
if (len > 80) out << "\n ";
|
||||
out << " RootIndex::k" << last_name << "}};\n"
|
||||
<< " }\n";
|
||||
}
|
||||
out << " return {};\n}\n\n";
|
||||
out << "static constexpr size_t kStaticReadOnlyRootRangesHash = "
|
||||
<< gen.RangesHash() << "UL;\n";
|
||||
|
||||
out << "\n} // namespace internal\n"
|
||||
<< "} // namespace v8\n"
|
||||
|
@ -13,7 +13,6 @@ class Isolate;
|
||||
class StaticRootsTableGen {
|
||||
public:
|
||||
static void write(Isolate* isolate, const char* file);
|
||||
static void VerifyRanges(Isolate* isolate);
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -356,19 +356,19 @@ KNOWN_MAPS = {
|
||||
("read_only_space", 0x02b05): (187, "ClosureFeedbackCellArrayMap"),
|
||||
("read_only_space", 0x02b2d): (249, "FeedbackVectorMap"),
|
||||
("read_only_space", 0x02b55): (130, "HeapNumberMap"),
|
||||
("read_only_space", 0x02b7d): (204, "ForeignMap"),
|
||||
("read_only_space", 0x02ba5): (256, "MegaDomHandlerMap"),
|
||||
("read_only_space", 0x02bcd): (131, "BooleanMap"),
|
||||
("read_only_space", 0x02bf5): (131, "UninitializedMap"),
|
||||
("read_only_space", 0x02c1d): (131, "ArgumentsMarkerMap"),
|
||||
("read_only_space", 0x02c45): (131, "ExceptionMap"),
|
||||
("read_only_space", 0x02c6d): (131, "TerminationExceptionMap"),
|
||||
("read_only_space", 0x02c95): (131, "OptimizedOutMap"),
|
||||
("read_only_space", 0x02cbd): (131, "StaleRegisterMap"),
|
||||
("read_only_space", 0x02ce5): (131, "SelfReferenceMarkerMap"),
|
||||
("read_only_space", 0x02d0d): (131, "BasicBlockCountersMarkerMap"),
|
||||
("read_only_space", 0x02d35): (129, "BigIntMap"),
|
||||
("read_only_space", 0x02d5d): (128, "SymbolMap"),
|
||||
("read_only_space", 0x02b7d): (128, "SymbolMap"),
|
||||
("read_only_space", 0x02ba5): (204, "ForeignMap"),
|
||||
("read_only_space", 0x02bcd): (256, "MegaDomHandlerMap"),
|
||||
("read_only_space", 0x02bf5): (131, "BooleanMap"),
|
||||
("read_only_space", 0x02c1d): (131, "UninitializedMap"),
|
||||
("read_only_space", 0x02c45): (131, "ArgumentsMarkerMap"),
|
||||
("read_only_space", 0x02c6d): (131, "ExceptionMap"),
|
||||
("read_only_space", 0x02c95): (131, "TerminationExceptionMap"),
|
||||
("read_only_space", 0x02cbd): (131, "OptimizedOutMap"),
|
||||
("read_only_space", 0x02ce5): (131, "StaleRegisterMap"),
|
||||
("read_only_space", 0x02d0d): (131, "SelfReferenceMarkerMap"),
|
||||
("read_only_space", 0x02d35): (131, "BasicBlockCountersMarkerMap"),
|
||||
("read_only_space", 0x02d5d): (129, "BigIntMap"),
|
||||
("read_only_space", 0x02d85): (32, "StringMap"),
|
||||
("read_only_space", 0x02dad): (40, "OneByteStringMap"),
|
||||
("read_only_space", 0x02dd5): (33, "ConsStringMap"),
|
||||
|
Loading…
Reference in New Issue
Block a user