[objects] Reshuffle bits in the {Code} object header.
This moves bits that are accessed during a stack-walk out if the first kind-specific flags field. Such bits are accessed during evacuation within the GC and hence need to remain directly in the {Code} object, the other bits in the kind-specific flags are mutable and hence will be moved into a separate data container object. R=jarin@chromium.org BUG=v8:6792 Change-Id: I20b7d307110ca0c0eb6dd4df31a35fab4701c6da Reviewed-on: https://chromium-review.googlesource.com/735145 Reviewed-by: Jaroslav Sevcik <jarin@chromium.org> Commit-Queue: Michael Starzinger <mstarzinger@chromium.org> Cr-Commit-Position: refs/heads/master@{#48877}
This commit is contained in:
parent
4d71799789
commit
1a5c3f0db8
@ -1829,10 +1829,10 @@ Handle<Code> Factory::NewCode(
|
||||
code->initialize_flags(kind);
|
||||
code->set_has_unwinding_info(has_unwinding_info);
|
||||
code->set_raw_kind_specific_flags1(0);
|
||||
code->set_raw_kind_specific_flags2(0);
|
||||
code->set_safepoint_table_offset(0);
|
||||
code->set_has_tagged_params(true);
|
||||
code->set_deoptimization_data(*deopt_data);
|
||||
code->set_raw_type_feedback_info(Smi::kZero);
|
||||
code->set_stub_key(0);
|
||||
code->set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER);
|
||||
code->set_handler_table(*handler_table);
|
||||
code->set_source_position_table(*source_position_table);
|
||||
|
@ -355,8 +355,6 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
|
||||
STATIC_ASSERT(kDeoptimizationDataOffset + kPointerSize ==
|
||||
kSourcePositionTableOffset);
|
||||
STATIC_ASSERT(kSourcePositionTableOffset + kPointerSize ==
|
||||
kTypeFeedbackInfoOffset);
|
||||
STATIC_ASSERT(kTypeFeedbackInfoOffset + kPointerSize ==
|
||||
kNextCodeLinkOffset);
|
||||
|
||||
static bool IsValidSlot(HeapObject* obj, int offset) {
|
||||
|
@ -847,8 +847,6 @@ void Code::CodeVerify() {
|
||||
last_gc_pc = it.rinfo()->pc();
|
||||
}
|
||||
}
|
||||
CHECK(raw_type_feedback_info() == Smi::kZero ||
|
||||
raw_type_feedback_info()->IsSmi() == is_stub());
|
||||
}
|
||||
|
||||
|
||||
|
@ -156,7 +156,6 @@ CODE_ACCESSORS(handler_table, FixedArray, kHandlerTableOffset)
|
||||
CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
|
||||
CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
|
||||
CODE_ACCESSORS(trap_handler_index, Smi, kTrapHandlerIndex)
|
||||
CODE_ACCESSORS(raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
|
||||
CODE_ACCESSORS(next_code_link, Object, kNextCodeLinkOffset)
|
||||
#undef CODE_ACCESSORS
|
||||
|
||||
@ -165,10 +164,6 @@ void Code::WipeOutHeader() {
|
||||
WRITE_FIELD(this, kHandlerTableOffset, nullptr);
|
||||
WRITE_FIELD(this, kDeoptimizationDataOffset, nullptr);
|
||||
WRITE_FIELD(this, kSourcePositionTableOffset, nullptr);
|
||||
// Do not wipe out major/minor keys on a code stub or IC
|
||||
if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
|
||||
WRITE_FIELD(this, kTypeFeedbackInfoOffset, nullptr);
|
||||
}
|
||||
WRITE_FIELD(this, kNextCodeLinkOffset, nullptr);
|
||||
}
|
||||
|
||||
@ -189,13 +184,12 @@ ByteArray* Code::SourcePositionTable() const {
|
||||
|
||||
uint32_t Code::stub_key() const {
|
||||
DCHECK(is_stub());
|
||||
Smi* smi_key = Smi::cast(raw_type_feedback_info());
|
||||
return static_cast<uint32_t>(smi_key->value());
|
||||
return READ_UINT32_FIELD(this, kStubKeyOffset);
|
||||
}
|
||||
|
||||
void Code::set_stub_key(uint32_t key) {
|
||||
DCHECK(is_stub());
|
||||
set_raw_type_feedback_info(Smi::FromInt(key));
|
||||
DCHECK(is_stub() || key == 0); // Allow zero initialization.
|
||||
WRITE_UINT32_FIELD(this, kStubKeyOffset, key);
|
||||
}
|
||||
|
||||
byte* Code::instruction_start() const {
|
||||
@ -297,10 +291,6 @@ void Code::set_raw_kind_specific_flags1(int value) {
|
||||
WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
|
||||
}
|
||||
|
||||
void Code::set_raw_kind_specific_flags2(int value) {
|
||||
WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
|
||||
}
|
||||
|
||||
inline bool Code::is_interpreter_trampoline_builtin() const {
|
||||
Builtins* builtins = GetIsolate()->builtins();
|
||||
bool is_interpreter_trampoline =
|
||||
@ -333,25 +323,24 @@ inline void Code::set_has_unwinding_info(bool state) {
|
||||
}
|
||||
|
||||
inline bool Code::has_tagged_params() const {
|
||||
int flags = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
||||
int flags = READ_UINT32_FIELD(this, kFlagsOffset);
|
||||
return HasTaggedStackField::decode(flags);
|
||||
}
|
||||
|
||||
inline void Code::set_has_tagged_params(bool value) {
|
||||
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
||||
int previous = READ_UINT32_FIELD(this, kFlagsOffset);
|
||||
int updated = HasTaggedStackField::update(previous, value);
|
||||
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
|
||||
WRITE_UINT32_FIELD(this, kFlagsOffset, updated);
|
||||
}
|
||||
|
||||
inline bool Code::is_turbofanned() const {
|
||||
return IsTurbofannedField::decode(
|
||||
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
|
||||
return IsTurbofannedField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
|
||||
}
|
||||
|
||||
inline void Code::set_is_turbofanned(bool value) {
|
||||
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
|
||||
int previous = READ_UINT32_FIELD(this, kFlagsOffset);
|
||||
int updated = IsTurbofannedField::update(previous, value);
|
||||
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
|
||||
WRITE_UINT32_FIELD(this, kFlagsOffset, updated);
|
||||
}
|
||||
|
||||
inline bool Code::can_have_weak_objects() const {
|
||||
@ -427,31 +416,27 @@ bool Code::is_builtin() const { return builtin_index() != -1; }
|
||||
|
||||
unsigned Code::stack_slots() const {
|
||||
DCHECK(is_turbofanned());
|
||||
return StackSlotsField::decode(
|
||||
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
|
||||
return StackSlotsField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
|
||||
}
|
||||
|
||||
void Code::set_stack_slots(unsigned slots) {
|
||||
CHECK(slots <= (1 << kStackSlotsBitCount));
|
||||
CHECK(slots <= StackSlotsField::kMax);
|
||||
DCHECK(is_turbofanned());
|
||||
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
|
||||
int previous = READ_UINT32_FIELD(this, kFlagsOffset);
|
||||
int updated = StackSlotsField::update(previous, slots);
|
||||
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
|
||||
WRITE_UINT32_FIELD(this, kFlagsOffset, updated);
|
||||
}
|
||||
|
||||
unsigned Code::safepoint_table_offset() const {
|
||||
DCHECK(is_turbofanned());
|
||||
return SafepointTableOffsetField::decode(
|
||||
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
|
||||
return READ_UINT32_FIELD(this, kSafepointTableOffsetOffset);
|
||||
}
|
||||
|
||||
void Code::set_safepoint_table_offset(unsigned offset) {
|
||||
CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
|
||||
DCHECK(is_turbofanned());
|
||||
CHECK(offset <= std::numeric_limits<uint32_t>::max());
|
||||
DCHECK(is_turbofanned() || offset == 0); // Allow zero initialization.
|
||||
DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
|
||||
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
||||
int updated = SafepointTableOffsetField::update(previous, offset);
|
||||
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
|
||||
WRITE_UINT32_FIELD(this, kSafepointTableOffsetOffset, offset);
|
||||
}
|
||||
|
||||
bool Code::marked_for_deoptimization() const {
|
||||
|
@ -164,12 +164,7 @@ class Code : public HeapObject {
|
||||
// objects.
|
||||
DECL_ACCESSORS(trap_handler_index, Smi)
|
||||
|
||||
// [raw_type_feedback_info]: This field stores various things, depending on
|
||||
// the kind of the code object.
|
||||
// STUB and ICs => major/minor key as Smi.
|
||||
// TODO(mvstanton): rename raw_type_feedback_info to stub_key, since the
|
||||
// field is no longer overloaded.
|
||||
DECL_ACCESSORS(raw_type_feedback_info, Object)
|
||||
// [stub_key]: The major/minor key of a code stub.
|
||||
inline uint32_t stub_key() const;
|
||||
inline void set_stub_key(uint32_t key);
|
||||
|
||||
@ -196,7 +191,6 @@ class Code : public HeapObject {
|
||||
inline bool is_wasm_code() const;
|
||||
|
||||
inline void set_raw_kind_specific_flags1(int value);
|
||||
inline void set_raw_kind_specific_flags2(int value);
|
||||
|
||||
// Testers for interpreter builtins.
|
||||
inline bool is_interpreter_trampoline_builtin() const;
|
||||
@ -436,16 +430,15 @@ class Code : public HeapObject {
|
||||
kHandlerTableOffset + kPointerSize;
|
||||
static const int kSourcePositionTableOffset =
|
||||
kDeoptimizationDataOffset + kPointerSize;
|
||||
// For FUNCTION kind, we store the type feedback info here.
|
||||
static const int kTypeFeedbackInfoOffset =
|
||||
static const int kNextCodeLinkOffset =
|
||||
kSourcePositionTableOffset + kPointerSize;
|
||||
static const int kNextCodeLinkOffset = kTypeFeedbackInfoOffset + kPointerSize;
|
||||
static const int kInstructionSizeOffset = kNextCodeLinkOffset + kPointerSize;
|
||||
static const int kFlagsOffset = kInstructionSizeOffset + kIntSize;
|
||||
static const int kKindSpecificFlags1Offset = kFlagsOffset + kIntSize;
|
||||
static const int kKindSpecificFlags2Offset =
|
||||
static const int kSafepointTableOffsetOffset =
|
||||
kKindSpecificFlags1Offset + kIntSize;
|
||||
static const int kConstantPoolOffset = kKindSpecificFlags2Offset + kIntSize;
|
||||
static const int kStubKeyOffset = kSafepointTableOffsetOffset + kIntSize;
|
||||
static const int kConstantPoolOffset = kStubKeyOffset + kIntSize;
|
||||
static const int kBuiltinIndexOffset =
|
||||
kConstantPoolOffset + kConstantPoolSize;
|
||||
static const int kTrapHandlerIndex = kBuiltinIndexOffset + kIntSize;
|
||||
@ -468,33 +461,28 @@ class Code : public HeapObject {
|
||||
// Flags layout. BitField<type, shift, size>.
|
||||
class HasUnwindingInfoField : public BitField<bool, 0, 1> {};
|
||||
class KindField : public BitField<Kind, HasUnwindingInfoField::kNext, 5> {};
|
||||
STATIC_ASSERT(NUMBER_OF_KINDS <= KindField::kMax);
|
||||
class HasTaggedStackField : public BitField<bool, KindField::kNext, 1> {};
|
||||
class IsTurbofannedField
|
||||
: public BitField<bool, HasTaggedStackField::kNext, 1> {};
|
||||
class StackSlotsField : public BitField<int, IsTurbofannedField::kNext, 24> {
|
||||
};
|
||||
static_assert(NUMBER_OF_KINDS <= KindField::kMax, "Code::KindField size");
|
||||
static_assert(StackSlotsField::kNext <= 32, "Code::flags field exhausted");
|
||||
|
||||
// KindSpecificFlags1 layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
|
||||
static const int kStackSlotsFirstBit = 0;
|
||||
static const int kStackSlotsBitCount = 24;
|
||||
static const int kMarkedForDeoptimizationBit =
|
||||
kStackSlotsFirstBit + kStackSlotsBitCount;
|
||||
static const int kMarkedForDeoptimizationBit = 0;
|
||||
static const int kDeoptAlreadyCountedBit = kMarkedForDeoptimizationBit + 1;
|
||||
static const int kIsTurbofannedBit = kDeoptAlreadyCountedBit + 1;
|
||||
static const int kCanHaveWeakObjects = kIsTurbofannedBit + 1;
|
||||
static const int kCanHaveWeakObjects = kDeoptAlreadyCountedBit + 1;
|
||||
// Could be moved to overlap previous bits when we need more space.
|
||||
static const int kIsConstructStub = kCanHaveWeakObjects + 1;
|
||||
static const int kIsPromiseRejection = kIsConstructStub + 1;
|
||||
static const int kIsExceptionCaught = kIsPromiseRejection + 1;
|
||||
|
||||
STATIC_ASSERT(kStackSlotsFirstBit + kStackSlotsBitCount <= 32);
|
||||
STATIC_ASSERT(kIsExceptionCaught + 1 <= 32);
|
||||
|
||||
class StackSlotsField
|
||||
: public BitField<int, kStackSlotsFirstBit, kStackSlotsBitCount> {
|
||||
}; // NOLINT
|
||||
class MarkedForDeoptimizationField
|
||||
: public BitField<bool, kMarkedForDeoptimizationBit, 1> {}; // NOLINT
|
||||
class DeoptAlreadyCountedField
|
||||
: public BitField<bool, kDeoptAlreadyCountedBit, 1> {}; // NOLINT
|
||||
class IsTurbofannedField : public BitField<bool, kIsTurbofannedBit, 1> {
|
||||
}; // NOLINT
|
||||
class CanHaveWeakObjectsField
|
||||
: public BitField<bool, kCanHaveWeakObjects, 1> {}; // NOLINT
|
||||
class IsConstructStubField : public BitField<bool, kIsConstructStub, 1> {
|
||||
@ -504,22 +492,6 @@ class Code : public HeapObject {
|
||||
class IsExceptionCaughtField : public BitField<bool, kIsExceptionCaught, 1> {
|
||||
}; // NOLINT
|
||||
|
||||
// KindSpecificFlags2 layout (ALL)
|
||||
static const int kHasTaggedStackBit = 0;
|
||||
class HasTaggedStackField : public BitField<bool, kHasTaggedStackBit, 1> {};
|
||||
|
||||
// KindSpecificFlags2 layout (STUB and OPTIMIZED_FUNCTION)
|
||||
static const int kSafepointTableOffsetFirstBit = kHasTaggedStackBit + 1;
|
||||
static const int kSafepointTableOffsetBitCount = 30;
|
||||
|
||||
STATIC_ASSERT(kSafepointTableOffsetFirstBit + kSafepointTableOffsetBitCount <=
|
||||
32);
|
||||
STATIC_ASSERT(1 + kSafepointTableOffsetBitCount <= 32);
|
||||
|
||||
class SafepointTableOffsetField
|
||||
: public BitField<int, kSafepointTableOffsetFirstBit,
|
||||
kSafepointTableOffsetBitCount> {}; // NOLINT
|
||||
|
||||
static const int kArgumentsBits = 16;
|
||||
static const int kMaxArguments = (1 << kArgumentsBits) - 1;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user