Remove all leftovers of store buffer
V8's write barrier doesn't use a store buffer anymore but inserts directly into the remembered set. However, there were still some comments/method definitions left. Bug: v8:9454 Change-Id: Ic3bc3394750f1d4989027e07dbc9201c3f484ccd Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3270536 Reviewed-by: Jakob Gruber <jgruber@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Commit-Queue: Dominik Inführ <dinfuehr@chromium.org> Cr-Commit-Position: refs/heads/main@{#77814}
This commit is contained in:
parent
c88140fed6
commit
a760f03a6e
@ -248,7 +248,8 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler {
|
|||||||
|
|
||||||
void GenerationalWriteBarrier(SaveFPRegsMode fp_mode) {
|
void GenerationalWriteBarrier(SaveFPRegsMode fp_mode) {
|
||||||
Label incremental_wb(this), test_old_to_young_flags(this),
|
Label incremental_wb(this), test_old_to_young_flags(this),
|
||||||
store_buffer_exit(this), store_buffer_incremental_wb(this), next(this);
|
remembered_set_only(this), remembered_set_and_incremental_wb(this),
|
||||||
|
next(this);
|
||||||
|
|
||||||
// When incremental marking is not on, we skip cross generation pointer
|
// When incremental marking is not on, we skip cross generation pointer
|
||||||
// checking here, because there are checks for
|
// checking here, because there are checks for
|
||||||
@ -258,7 +259,7 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler {
|
|||||||
// stub, which serves as the cross generation checking.
|
// stub, which serves as the cross generation checking.
|
||||||
auto slot =
|
auto slot =
|
||||||
UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
|
UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
|
||||||
Branch(IsMarking(), &test_old_to_young_flags, &store_buffer_exit);
|
Branch(IsMarking(), &test_old_to_young_flags, &remembered_set_only);
|
||||||
|
|
||||||
BIND(&test_old_to_young_flags);
|
BIND(&test_old_to_young_flags);
|
||||||
{
|
{
|
||||||
@ -275,10 +276,11 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler {
|
|||||||
UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
|
UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
|
||||||
TNode<BoolT> object_is_young =
|
TNode<BoolT> object_is_young =
|
||||||
IsPageFlagSet(object, MemoryChunk::kIsInYoungGenerationMask);
|
IsPageFlagSet(object, MemoryChunk::kIsInYoungGenerationMask);
|
||||||
Branch(object_is_young, &incremental_wb, &store_buffer_incremental_wb);
|
Branch(object_is_young, &incremental_wb,
|
||||||
|
&remembered_set_and_incremental_wb);
|
||||||
}
|
}
|
||||||
|
|
||||||
BIND(&store_buffer_exit);
|
BIND(&remembered_set_only);
|
||||||
{
|
{
|
||||||
TNode<IntPtrT> object = BitcastTaggedToWord(
|
TNode<IntPtrT> object = BitcastTaggedToWord(
|
||||||
UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
|
UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
|
||||||
@ -286,7 +288,7 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler {
|
|||||||
Goto(&next);
|
Goto(&next);
|
||||||
}
|
}
|
||||||
|
|
||||||
BIND(&store_buffer_incremental_wb);
|
BIND(&remembered_set_and_incremental_wb);
|
||||||
{
|
{
|
||||||
TNode<IntPtrT> object = BitcastTaggedToWord(
|
TNode<IntPtrT> object = BitcastTaggedToWord(
|
||||||
UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
|
UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
|
||||||
|
@ -113,7 +113,7 @@ transitioning macro MorphAndEnqueuePromiseReaction(implicit context: Context)(
|
|||||||
|
|
||||||
// Morph {current} from a PromiseReaction into a PromiseReactionJobTask
|
// Morph {current} from a PromiseReaction into a PromiseReactionJobTask
|
||||||
// and schedule that on the microtask queue. We try to minimize the number
|
// and schedule that on the microtask queue. We try to minimize the number
|
||||||
// of stores here to avoid screwing up the store buffer.
|
// of stores here to avoid write barrier overhead.
|
||||||
static_assert(
|
static_assert(
|
||||||
kPromiseReactionSize ==
|
kPromiseReactionSize ==
|
||||||
kPromiseReactionJobTaskSizeOfAllPromiseReactionJobTasks);
|
kPromiseReactionJobTaskSizeOfAllPromiseReactionJobTasks);
|
||||||
|
@ -326,7 +326,7 @@ int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
|
|||||||
int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
||||||
Register exclusion2, Register exclusion3) {
|
Register exclusion2, Register exclusion3) {
|
||||||
ASM_CODE_COMMENT(this);
|
ASM_CODE_COMMENT(this);
|
||||||
// We don't allow a GC during a store buffer overflow so there is no need to
|
// We don't allow a GC in a write barrier slow path so there is no need to
|
||||||
// store the registers in any particular way, but we do have to store and
|
// store the registers in any particular way, but we do have to store and
|
||||||
// restore them.
|
// restore them.
|
||||||
int bytes = 0;
|
int bytes = 0;
|
||||||
|
@ -819,7 +819,7 @@ int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
|
|||||||
int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
|
||||||
Register exclusion2, Register exclusion3) {
|
Register exclusion2, Register exclusion3) {
|
||||||
ASM_CODE_COMMENT(this);
|
ASM_CODE_COMMENT(this);
|
||||||
// We don't allow a GC during a store buffer overflow so there is no need to
|
// We don't allow a GC in a write barrier slow path so there is no need to
|
||||||
// store the registers in any particular way, but we do have to store and
|
// store the registers in any particular way, but we do have to store and
|
||||||
// restore them.
|
// restore them.
|
||||||
int bytes = 0;
|
int bytes = 0;
|
||||||
|
@ -1062,7 +1062,7 @@ class Heap {
|
|||||||
void IterateStackRoots(RootVisitor* v);
|
void IterateStackRoots(RootVisitor* v);
|
||||||
|
|
||||||
// ===========================================================================
|
// ===========================================================================
|
||||||
// Store buffer API. =========================================================
|
// Remembered set API. =======================================================
|
||||||
// ===========================================================================
|
// ===========================================================================
|
||||||
|
|
||||||
// Used for query incremental marking status in generated code.
|
// Used for query incremental marking status in generated code.
|
||||||
@ -1072,10 +1072,6 @@ class Heap {
|
|||||||
|
|
||||||
void SetIsMarkingFlag(uint8_t flag) { is_marking_flag_ = flag; }
|
void SetIsMarkingFlag(uint8_t flag) { is_marking_flag_ = flag; }
|
||||||
|
|
||||||
V8_EXPORT_PRIVATE Address* store_buffer_top_address();
|
|
||||||
static intptr_t store_buffer_mask_constant();
|
|
||||||
static Address store_buffer_overflow_function_address();
|
|
||||||
|
|
||||||
void ClearRecordedSlot(HeapObject object, ObjectSlot slot);
|
void ClearRecordedSlot(HeapObject object, ObjectSlot slot);
|
||||||
void ClearRecordedSlotRange(Address start, Address end);
|
void ClearRecordedSlotRange(Address start, Address end);
|
||||||
static int InsertIntoRememberedSetFromCode(MemoryChunk* chunk, Address slot);
|
static int InsertIntoRememberedSetFromCode(MemoryChunk* chunk, Address slot);
|
||||||
|
@ -61,8 +61,7 @@ class SemiSpace;
|
|||||||
// collection. The large object space is paged. Pages in large object space
|
// collection. The large object space is paged. Pages in large object space
|
||||||
// may be larger than the page size.
|
// may be larger than the page size.
|
||||||
//
|
//
|
||||||
// A store-buffer based write barrier is used to keep track of intergenerational
|
// A remembered set is used to keep track of intergenerational references.
|
||||||
// references. See heap/store-buffer.h.
|
|
||||||
//
|
//
|
||||||
// During scavenges and mark-sweep collections we sometimes (after a store
|
// During scavenges and mark-sweep collections we sometimes (after a store
|
||||||
// buffer overflow) iterate intergenerational pointers without decoding heap
|
// buffer overflow) iterate intergenerational pointers without decoding heap
|
||||||
|
@ -303,9 +303,7 @@ namespace internal {
|
|||||||
SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
|
SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
|
||||||
/* Number of code objects found from pc. */ \
|
/* Number of code objects found from pc. */ \
|
||||||
SC(pc_to_code, V8.PcToCode) \
|
SC(pc_to_code, V8.PcToCode) \
|
||||||
SC(pc_to_code_cached, V8.PcToCodeCached) \
|
SC(pc_to_code_cached, V8.PcToCodeCached)
|
||||||
/* The store-buffer implementation of the write barrier. */ \
|
|
||||||
SC(store_buffer_overflows, V8.StoreBufferOverflows)
|
|
||||||
|
|
||||||
#define STATS_COUNTER_LIST_2(SC) \
|
#define STATS_COUNTER_LIST_2(SC) \
|
||||||
/* Amount of (JS) compiled code. */ \
|
/* Amount of (JS) compiled code. */ \
|
||||||
|
@ -92,7 +92,6 @@ INTERESTING_OLD_GEN_KEYS="\
|
|||||||
clear.global_handles \
|
clear.global_handles \
|
||||||
clear.maps \
|
clear.maps \
|
||||||
clear.slots_buffer \
|
clear.slots_buffer \
|
||||||
clear.store_buffer \
|
|
||||||
clear.string_table \
|
clear.string_table \
|
||||||
clear.weak_collections \
|
clear.weak_collections \
|
||||||
clear.weak_lists \
|
clear.weak_lists \
|
||||||
|
Loading…
Reference in New Issue
Block a user