[ext-code-space] Cache code kind and builtin_id in CodeDataContainer

Drive-by: fix TSAN issue.

Bug: v8:11880
Change-Id: I8a31391c6a1855a20a243eb740e4e3e1223ecbbf
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3333930
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78360}
This commit is contained in:
Igor Sheludko 2021-12-13 15:08:40 +01:00 committed by V8 LUCI CQ
parent 2c89f0fdec
commit 7476acb90f
8 changed files with 137 additions and 68 deletions

View File

@ -432,19 +432,19 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
__ Unreachable();
}
static void AssertCodeIsBaselineAllowClobber(MacroAssembler* masm,
Register code, Register scratch) {
static void AssertCodeTIsBaselineAllowClobber(MacroAssembler* masm,
Register code, Register scratch) {
// Verify that the code kind is baseline code via the CodeKind.
__ Ldr(scratch, FieldMemOperand(code, Code::kFlagsOffset));
__ DecodeField<Code::KindField>(scratch);
__ Ldr(scratch, FieldMemOperand(code, CodeT::kFlagsOffset));
__ DecodeField<CodeT::KindField>(scratch);
__ Cmp(scratch, Operand(static_cast<int>(CodeKind::BASELINE)));
__ Assert(eq, AbortReason::kExpectedBaselineData);
}
static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
Register scratch) {
static void AssertCodeTIsBaseline(MacroAssembler* masm, Register code,
Register scratch) {
DCHECK(!AreAliased(code, scratch));
return AssertCodeIsBaselineAllowClobber(masm, code, scratch);
return AssertCodeTIsBaselineAllowClobber(masm, code, scratch);
}
// TODO(v8:11429): Add a path for "not_compiled" and unify the two uses under
@ -459,12 +459,7 @@ static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
if (FLAG_debug_code) {
Label not_baseline;
__ B(ne, &not_baseline);
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
__ LoadCodeDataContainerCodeNonBuiltin(scratch1, sfi_data);
AssertCodeIsBaselineAllowClobber(masm, scratch1, scratch1);
} else {
AssertCodeIsBaseline(masm, sfi_data, scratch1);
}
AssertCodeTIsBaseline(masm, sfi_data, scratch1);
__ B(eq, is_baseline);
__ Bind(&not_baseline);
} else {
@ -4196,12 +4191,12 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
__ Assert(eq, AbortReason::kExpectedBaselineData);
}
if (FLAG_debug_code) {
AssertCodeTIsBaseline(masm, code_obj, x3);
}
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
__ LoadCodeDataContainerCodeNonBuiltin(code_obj, code_obj);
}
if (FLAG_debug_code) {
AssertCodeIsBaseline(masm, code_obj, x3);
}
// Load the feedback vector.
Register feedback_vector = x2;

View File

@ -347,6 +347,24 @@ class Builtins {
friend class SetupIsolateDelegate;
};
V8_INLINE constexpr bool IsInterpreterTrampolineBuiltin(Builtin builtin_id) {
// Check for kNoBuiltinId first to abort early when the current Code object
// is not a builtin.
return builtin_id != Builtin::kNoBuiltinId &&
(builtin_id == Builtin::kInterpreterEntryTrampoline ||
builtin_id == Builtin::kInterpreterEnterAtBytecode ||
builtin_id == Builtin::kInterpreterEnterAtNextBytecode);
}
V8_INLINE constexpr bool IsBaselineTrampolineBuiltin(Builtin builtin_id) {
// Check for kNoBuiltinId first to abort early when the current Code object
// is not a builtin.
return builtin_id != Builtin::kNoBuiltinId &&
(builtin_id == Builtin::kBaselineOutOfLinePrologue ||
builtin_id == Builtin::kBaselineOrInterpreterEnterAtBytecode ||
builtin_id == Builtin::kBaselineOrInterpreterEnterAtNextBytecode);
}
Builtin ExampleBuiltinForTorqueFunctionPointerType(
size_t function_pointer_type_id);

View File

@ -684,19 +684,19 @@ void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
__ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
}
static void AssertCodeIsBaselineAllowClobber(MacroAssembler* masm,
Register code, Register scratch) {
static void AssertCodeTIsBaselineAllowClobber(MacroAssembler* masm,
Register code, Register scratch) {
// Verify that the code kind is baseline code via the CodeKind.
__ movl(scratch, FieldOperand(code, Code::kFlagsOffset));
__ DecodeField<Code::KindField>(scratch);
__ movl(scratch, FieldOperand(code, CodeT::kFlagsOffset));
__ DecodeField<CodeT::KindField>(scratch);
__ cmpl(scratch, Immediate(static_cast<int>(CodeKind::BASELINE)));
__ Assert(equal, AbortReason::kExpectedBaselineData);
}
static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
Register scratch) {
static void AssertCodeTIsBaseline(MacroAssembler* masm, Register code,
Register scratch) {
DCHECK(!AreAliased(code, scratch));
return AssertCodeIsBaselineAllowClobber(masm, code, scratch);
return AssertCodeTIsBaselineAllowClobber(masm, code, scratch);
}
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
@ -711,12 +711,7 @@ static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
if (FLAG_debug_code) {
Label not_baseline;
__ j(not_equal, &not_baseline);
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
__ LoadCodeDataContainerCodeNonBuiltin(scratch1, sfi_data);
AssertCodeIsBaselineAllowClobber(masm, scratch1, scratch1);
} else {
AssertCodeIsBaseline(masm, sfi_data, scratch1);
}
AssertCodeTIsBaseline(masm, sfi_data, scratch1);
__ j(equal, is_baseline);
__ bind(&not_baseline);
} else {
@ -4713,13 +4708,12 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
__ Assert(equal, AbortReason::kExpectedBaselineData);
}
// Load baseline code from baseline data.
if (FLAG_debug_code) {
AssertCodeTIsBaseline(masm, code_obj, r11);
}
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
__ LoadCodeDataContainerCodeNonBuiltin(code_obj, code_obj);
}
if (FLAG_debug_code) {
AssertCodeIsBaseline(masm, code_obj, r11);
}
// Load the feedback vector.
Register feedback_vector = r11;

View File

@ -1013,6 +1013,12 @@ void CodeDataContainer::CodeDataContainerVerify(Isolate* isolate) {
CHECK(next_code_link().IsCodeT() || next_code_link().IsUndefined(isolate));
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
if (raw_code() != Smi::zero()) {
#ifdef V8_EXTERNAL_CODE_SPACE
// kind and builtin_id() getters are not available on CodeDataContainer
// when external code space is not enabled.
CHECK_EQ(code().kind(), kind());
CHECK_EQ(code().builtin_id(), builtin_id());
#endif // V8_EXTERNAL_CODE_SPACE
CHECK_EQ(code().InstructionStart(), code_entry_point());
CHECK_EQ(code().code_data_container(kAcquireLoad), *this);
}

View File

@ -127,6 +127,9 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
0, read_only_data_container_ ? AllocationType::kReadOnly
: AllocationType::kOld);
}
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
data_container->initialize_flags(kind_, builtin_);
}
data_container->set_kind_specific_flags(kind_specific_flags_,
kRelaxedStore);
}
@ -2273,10 +2276,14 @@ Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
#endif
raw_result.set_relocation_info(canonical_reloc_info);
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
CodeDataContainer code_data_container =
raw_result.code_data_container(kAcquireLoad);
// Updating flags (in particular is_off_heap_trampoline one) might change
// the value of the instruction start, so update it here.
raw_result.code_data_container(kAcquireLoad)
.UpdateCodeEntryPoint(isolate(), raw_result);
code_data_container.UpdateCodeEntryPoint(isolate(), raw_result);
// Also update flag values cached on the code data container.
code_data_container.initialize_flags(raw_code.kind(),
raw_code.builtin_id());
}
}
@ -2315,6 +2322,7 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) {
#endif
}
if (V8_EXTERNAL_CODE_SPACE_BOOL) {
data_container->initialize_flags(code->kind(), code->builtin_id());
data_container->SetCodeAndEntryPoint(isolate(), *new_code);
}

View File

@ -540,19 +540,11 @@ void Code::initialize_flags(CodeKind kind, bool is_turbofanned, int stack_slots,
}
inline bool Code::is_interpreter_trampoline_builtin() const {
// Check for kNoBuiltinId first to abort early when the current Code object
// is not a builtin.
return builtin_id() != Builtin::kNoBuiltinId &&
(builtin_id() == Builtin::kInterpreterEntryTrampoline ||
builtin_id() == Builtin::kInterpreterEnterAtBytecode ||
builtin_id() == Builtin::kInterpreterEnterAtNextBytecode);
return IsInterpreterTrampolineBuiltin(builtin_id());
}
inline bool Code::is_baseline_trampoline_builtin() const {
return builtin_id() != Builtin::kNoBuiltinId &&
(builtin_id() == Builtin::kBaselineOutOfLinePrologue ||
builtin_id() == Builtin::kBaselineOrInterpreterEnterAtBytecode ||
builtin_id() == Builtin::kBaselineOrInterpreterEnterAtNextBytecode);
return IsBaselineTrampolineBuiltin(builtin_id());
}
inline bool Code::is_baseline_leave_frame_builtin() const {
@ -972,7 +964,45 @@ void CodeDataContainer::clear_padding() {
kSize - kUnalignedSize);
}
INT_ACCESSORS(CodeDataContainer, flags, kFlagsOffset)
// Ensure builtin_id field fits into int16_t, so that we can rely on sign
// extension to convert int16_t{-1} to kNoBuiltinId.
// If the asserts fail, update the code that use kBuiltinIdOffset below.
STATIC_ASSERT(static_cast<int>(Builtin::kNoBuiltinId) == -1);
STATIC_ASSERT(Builtins::kBuiltinCount < std::numeric_limits<int16_t>::max());
void CodeDataContainer::initialize_flags(CodeKind kind, Builtin builtin_id) {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
int value = KindField::encode(kind);
set_flags(value);
WriteField<int16_t>(kBuiltinIdOffset, static_cast<int16_t>(builtin_id));
}
#ifdef V8_EXTERNAL_CODE_SPACE
CodeKind CodeDataContainer::kind() const { return KindField::decode(flags()); }
Builtin CodeDataContainer::builtin_id() const {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
// Rely on sign-extension when converting int16_t to int to preserve
// kNoBuiltinId value.
STATIC_ASSERT(static_cast<int>(static_cast<int16_t>(Builtin::kNoBuiltinId)) ==
static_cast<int>(Builtin::kNoBuiltinId));
int value = ReadField<int16_t>(kBuiltinIdOffset);
return static_cast<Builtin>(value);
}
bool CodeDataContainer::is_builtin() const {
CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
return builtin_id() != Builtin::kNoBuiltinId;
}
inline bool CodeDataContainer::is_interpreter_trampoline_builtin() const {
return IsInterpreterTrampolineBuiltin(builtin_id());
}
//
// A collection of getters and predicates that forward queries to associated
// Code object.
@ -986,11 +1016,6 @@ void CodeDataContainer::clear_padding() {
return FromCodeT(*this).name(cage_base); \
}
DEF_PRIMITIVE_FORWARDING_CDC_GETTER(kind, CodeKind)
DEF_PRIMITIVE_FORWARDING_CDC_GETTER(builtin_id, Builtin)
DEF_PRIMITIVE_FORWARDING_CDC_GETTER(is_builtin, bool)
DEF_PRIMITIVE_FORWARDING_CDC_GETTER(is_interpreter_trampoline_builtin, bool)
DEF_FORWARDING_CDC_GETTER(deoptimization_data, FixedArray)
DEF_FORWARDING_CDC_GETTER(bytecode_or_interpreter_data, HeapObject)
DEF_FORWARDING_CDC_GETTER(source_position_table, ByteArray)

View File

@ -82,6 +82,11 @@ class CodeDataContainer : public HeapObject {
inline void AllocateExternalPointerEntries(Isolate* isolate);
// Initializes internal flags field which stores cached values of some
// properties of the respective Code object.
// Available only when V8_EXTERNAL_CODE_SPACE is enabled.
inline void initialize_flags(CodeKind kind, Builtin builtin_id);
// Alias for code_entry_point to make it API compatible with Code.
inline Address InstructionStart() const;
@ -110,23 +115,25 @@ class CodeDataContainer : public HeapObject {
DECL_VERIFIER(CodeDataContainer)
// Layout description.
#define CODE_DATA_FIELDS(V) \
/* Strong pointer fields. */ \
V(kPointerFieldsStrongEndOffset, 0) \
/* Weak pointer fields. */ \
V(kNextCodeLinkOffset, kTaggedSize) \
V(kPointerFieldsWeakEndOffset, 0) \
/* Strong Code pointer fields. */ \
V(kCodeOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0) \
V(kCodePointerFieldsStrongEndOffset, 0) \
/* Raw data fields. */ \
V(kCodeCageBaseUpper32BitsOffset, \
V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0) \
V(kCodeEntryPointOffset, \
V8_EXTERNAL_CODE_SPACE_BOOL ? kExternalPointerSize : 0) \
V(kKindSpecificFlagsOffset, kInt32Size) \
V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
/* Total size. */ \
#define CODE_DATA_FIELDS(V) \
/* Strong pointer fields. */ \
V(kPointerFieldsStrongEndOffset, 0) \
/* Weak pointer fields. */ \
V(kNextCodeLinkOffset, kTaggedSize) \
V(kPointerFieldsWeakEndOffset, 0) \
/* Strong Code pointer fields. */ \
V(kCodeOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0) \
V(kCodePointerFieldsStrongEndOffset, 0) \
/* Raw data fields. */ \
V(kCodeCageBaseUpper32BitsOffset, \
V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0) \
V(kCodeEntryPointOffset, \
V8_EXTERNAL_CODE_SPACE_BOOL ? kExternalPointerSize : 0) \
V(kFlagsOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kUInt16Size : 0) \
V(kBuiltinIdOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kInt16Size : 0) \
V(kKindSpecificFlagsOffset, kInt32Size) \
V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
/* Total size. */ \
V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
@ -134,11 +141,27 @@ class CodeDataContainer : public HeapObject {
class BodyDescriptor;
// Flags layout.
#define FLAGS_BIT_FIELDS(V, _) \
V(KindField, CodeKind, 4, _) \
/* The other 12 bits are still free. */
DEFINE_BIT_FIELDS(FLAGS_BIT_FIELDS)
#undef FLAGS_BIT_FIELDS
STATIC_ASSERT(FLAGS_BIT_FIELDS_Ranges::kBitsCount == 4);
STATIC_ASSERT(!V8_EXTERNAL_CODE_SPACE_BOOL ||
(FLAGS_BIT_FIELDS_Ranges::kBitsCount <=
FIELD_SIZE(CodeDataContainer::kFlagsOffset) * kBitsPerByte));
private:
DECL_ACCESSORS(raw_code, Object)
DECL_RELAXED_GETTER(raw_code, Object)
inline void set_code_entry_point(Isolate* isolate, Address value);
// When V8_EXTERNAL_CODE_SPACE is enabled the flags field contains cached
// values of some flags of the from the respective Code object.
DECL_INT_ACCESSORS(flags)
friend Factory;
friend FactoryBase<Factory>;
friend FactoryBase<LocalFactory>;

View File

@ -325,7 +325,7 @@ bool JSFunction::NeedsResetDueToFlushedBytecode() {
Object maybe_code = ACQUIRE_READ_FIELD(*this, kCodeOffset);
if (!maybe_code.IsCodeT()) return false;
Code code = FromCodeT(CodeT::cast(maybe_code), kRelaxedLoad);
CodeT code = CodeT::cast(maybe_code);
SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
return !shared.is_compiled() && code.builtin_id() != Builtin::kCompileLazy;