[heap] Templatize Worklist segment size
Bug: chromium:738865 Change-Id: I67b65f3006d6fe7e88854806f364d9863076b49b Reviewed-on: https://chromium-review.googlesource.com/558969 Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#46397}
This commit is contained in:
parent
ff3b948c6b
commit
8c8bb2b150
@ -48,7 +48,8 @@ class ConcurrentMarkingVisitor final
|
||||
public:
|
||||
using BaseClass = HeapVisitor<int, ConcurrentMarkingVisitor>;
|
||||
|
||||
explicit ConcurrentMarkingVisitor(Worklist* shared, Worklist* bailout,
|
||||
explicit ConcurrentMarkingVisitor(ConcurrentMarking::MarkingWorklist* shared,
|
||||
ConcurrentMarking::MarkingWorklist* bailout,
|
||||
int task_id)
|
||||
: shared_(shared, task_id), bailout_(bailout, task_id) {}
|
||||
|
||||
@ -246,8 +247,8 @@ class ConcurrentMarkingVisitor final
|
||||
return MarkingState::Internal(object);
|
||||
}
|
||||
|
||||
WorklistView shared_;
|
||||
WorklistView bailout_;
|
||||
ConcurrentMarking::MarkingWorklist::View shared_;
|
||||
ConcurrentMarking::MarkingWorklist::View bailout_;
|
||||
SlotSnapshot slot_snapshot_;
|
||||
};
|
||||
|
||||
@ -275,8 +276,8 @@ class ConcurrentMarking::Task : public CancelableTask {
|
||||
DISALLOW_COPY_AND_ASSIGN(Task);
|
||||
};
|
||||
|
||||
ConcurrentMarking::ConcurrentMarking(Heap* heap, Worklist* shared,
|
||||
Worklist* bailout)
|
||||
ConcurrentMarking::ConcurrentMarking(Heap* heap, MarkingWorklist* shared,
|
||||
MarkingWorklist* bailout)
|
||||
: heap_(heap),
|
||||
pending_task_semaphore_(0),
|
||||
shared_(shared),
|
||||
|
@ -15,11 +15,15 @@ namespace internal {
|
||||
|
||||
class Heap;
|
||||
class Isolate;
|
||||
template <int SEGMENT_SIZE>
|
||||
class Worklist;
|
||||
|
||||
class ConcurrentMarking {
|
||||
public:
|
||||
ConcurrentMarking(Heap* heap, Worklist* shared_, Worklist* bailout_);
|
||||
using MarkingWorklist = Worklist<64 /* segment size */>;
|
||||
|
||||
ConcurrentMarking(Heap* heap, MarkingWorklist* shared_,
|
||||
MarkingWorklist* bailout_);
|
||||
|
||||
void StartTask();
|
||||
void WaitForTaskToComplete();
|
||||
@ -31,8 +35,8 @@ class ConcurrentMarking {
|
||||
void Run(int task_id);
|
||||
Heap* heap_;
|
||||
base::Semaphore pending_task_semaphore_;
|
||||
Worklist* shared_;
|
||||
Worklist* bailout_;
|
||||
MarkingWorklist* shared_;
|
||||
MarkingWorklist* bailout_;
|
||||
bool is_task_pending_;
|
||||
};
|
||||
|
||||
|
@ -2182,8 +2182,9 @@ void MarkCompactCollector::RecordObjectStats() {
|
||||
class YoungGenerationMarkingVisitor final
|
||||
: public NewSpaceVisitor<YoungGenerationMarkingVisitor> {
|
||||
public:
|
||||
YoungGenerationMarkingVisitor(Heap* heap, Worklist* global_worklist,
|
||||
int task_id)
|
||||
YoungGenerationMarkingVisitor(
|
||||
Heap* heap, MinorMarkCompactCollector::MarkingWorklist* global_worklist,
|
||||
int task_id)
|
||||
: heap_(heap), worklist_(global_worklist, task_id) {}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, Object** start,
|
||||
@ -2218,7 +2219,7 @@ class YoungGenerationMarkingVisitor final
|
||||
}
|
||||
|
||||
Heap* heap_;
|
||||
MinorMarkCompactCollector::MarkingWorklist worklist_;
|
||||
MinorMarkCompactCollector::MarkingWorklist::View worklist_;
|
||||
};
|
||||
|
||||
class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
|
||||
@ -2272,9 +2273,9 @@ class MarkingItem : public ItemParallelJob::Item {
|
||||
|
||||
class YoungGenerationMarkingTask : public ItemParallelJob::Task {
|
||||
public:
|
||||
YoungGenerationMarkingTask(Isolate* isolate,
|
||||
MinorMarkCompactCollector* collector,
|
||||
Worklist* global_worklist, int task_id)
|
||||
YoungGenerationMarkingTask(
|
||||
Isolate* isolate, MinorMarkCompactCollector* collector,
|
||||
MinorMarkCompactCollector::MarkingWorklist* global_worklist, int task_id)
|
||||
: ItemParallelJob::Task(isolate),
|
||||
collector_(collector),
|
||||
marking_worklist_(global_worklist, task_id),
|
||||
@ -2347,7 +2348,7 @@ class YoungGenerationMarkingTask : public ItemParallelJob::Task {
|
||||
}
|
||||
|
||||
MinorMarkCompactCollector* collector_;
|
||||
MinorMarkCompactCollector::MarkingWorklist marking_worklist_;
|
||||
MinorMarkCompactCollector::MarkingWorklist::View marking_worklist_;
|
||||
YoungGenerationMarkingVisitor visitor_;
|
||||
std::unordered_map<Page*, intptr_t, Page::Hasher> local_live_bytes_;
|
||||
};
|
||||
@ -2511,12 +2512,13 @@ class MinorMarkCompactCollector::RootMarkingVisitorSeedOnly
|
||||
|
||||
MinorMarkCompactCollector::MinorMarkCompactCollector(Heap* heap)
|
||||
: MarkCompactCollectorBase(heap),
|
||||
worklist_(new Worklist()),
|
||||
worklist_(new MinorMarkCompactCollector::MarkingWorklist()),
|
||||
main_marking_visitor_(
|
||||
new YoungGenerationMarkingVisitor(heap, worklist_, kMainMarker)),
|
||||
page_parallel_job_semaphore_(0) {
|
||||
static_assert(kNumMarkers <= Worklist::kMaxNumTasks,
|
||||
"more marker tasks than marking deque can handle");
|
||||
static_assert(
|
||||
kNumMarkers <= MinorMarkCompactCollector::MarkingWorklist::kMaxNumTasks,
|
||||
"more marker tasks than marking deque can handle");
|
||||
}
|
||||
|
||||
MinorMarkCompactCollector::~MinorMarkCompactCollector() {
|
||||
@ -2617,7 +2619,7 @@ void MinorMarkCompactCollector::ProcessMarkingWorklist() {
|
||||
}
|
||||
|
||||
void MinorMarkCompactCollector::EmptyMarkingWorklist() {
|
||||
MarkingWorklist marking_worklist(worklist(), kMainMarker);
|
||||
MarkingWorklist::View marking_worklist(worklist(), kMainMarker);
|
||||
HeapObject* object = nullptr;
|
||||
while (marking_worklist.Pop(&object)) {
|
||||
DCHECK(!object->IsFiller());
|
||||
|
@ -24,7 +24,9 @@ class ItemParallelJob;
|
||||
class MigrationObserver;
|
||||
class RecordMigratedSlotVisitor;
|
||||
class YoungGenerationMarkingVisitor;
|
||||
template <int SEGMENT_SIZE>
|
||||
class Worklist;
|
||||
template <int SEGMENT_SIZE>
|
||||
class WorklistView;
|
||||
|
||||
class ObjectMarking : public AllStatic {
|
||||
@ -348,14 +350,14 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
void CleanupSweepToIteratePages();
|
||||
|
||||
private:
|
||||
using MarkingWorklist = WorklistView;
|
||||
using MarkingWorklist = Worklist<64 /* segment size */>;
|
||||
class RootMarkingVisitorSeedOnly;
|
||||
class RootMarkingVisitor;
|
||||
|
||||
static const int kNumMarkers = 8;
|
||||
static const int kMainMarker = 0;
|
||||
|
||||
inline Worklist* worklist() { return worklist_; }
|
||||
inline MarkingWorklist* worklist() { return worklist_; }
|
||||
|
||||
inline YoungGenerationMarkingVisitor* main_marking_visitor() {
|
||||
return main_marking_visitor_;
|
||||
@ -377,7 +379,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
|
||||
int NumberOfParallelMarkingTasks(int pages);
|
||||
|
||||
Worklist* worklist_;
|
||||
MarkingWorklist* worklist_;
|
||||
YoungGenerationMarkingVisitor* main_marking_visitor_;
|
||||
base::Semaphore page_parallel_job_semaphore_;
|
||||
std::vector<Page*> new_space_evacuation_pages_;
|
||||
@ -394,6 +396,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// Wrapper for the shared and bailout worklists.
|
||||
class MarkingWorklist {
|
||||
public:
|
||||
using ConcurrentMarkingWorklist = Worklist<64>;
|
||||
|
||||
static const int kMainThread = 0;
|
||||
// The heap parameter is not used but needed to match the sequential case.
|
||||
explicit MarkingWorklist(Heap* heap) {}
|
||||
@ -439,8 +443,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
shared_.Update(callback);
|
||||
}
|
||||
|
||||
Worklist* shared() { return &shared_; }
|
||||
Worklist* bailout() { return &bailout_; }
|
||||
ConcurrentMarkingWorklist* shared() { return &shared_; }
|
||||
ConcurrentMarkingWorklist* bailout() { return &bailout_; }
|
||||
|
||||
// These empty functions are needed to match the interface
|
||||
// of the sequential marking deque.
|
||||
@ -453,8 +457,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
bool overflowed() const { return false; }
|
||||
|
||||
private:
|
||||
Worklist shared_;
|
||||
Worklist bailout_;
|
||||
ConcurrentMarkingWorklist shared_;
|
||||
ConcurrentMarkingWorklist bailout_;
|
||||
};
|
||||
#else
|
||||
using MarkingWorklist = SequentialMarkingDeque;
|
||||
|
@ -25,10 +25,34 @@ class HeapObject;
|
||||
//
|
||||
// Work stealing is best effort, i.e., there is no way to inform other tasks
|
||||
// of the need of items.
|
||||
template <int SEGMENT_SIZE>
|
||||
class Worklist {
|
||||
public:
|
||||
class View {
|
||||
public:
|
||||
View(Worklist<SEGMENT_SIZE>* worklist, int task_id)
|
||||
: worklist_(worklist), task_id_(task_id) {}
|
||||
|
||||
// Pushes an object onto the worklist.
|
||||
bool Push(HeapObject* object) { return worklist_->Push(task_id_, object); }
|
||||
|
||||
// Pops an object from the worklist.
|
||||
bool Pop(HeapObject** object) { return worklist_->Pop(task_id_, object); }
|
||||
|
||||
// Returns true if the local portion of the worklist is empty.
|
||||
bool IsLocalEmpty() { return worklist_->IsLocalEmpty(task_id_); }
|
||||
|
||||
// Returns true if the worklist is empty. Can only be used from the main
|
||||
// thread without concurrent access.
|
||||
bool IsGlobalEmpty() { return worklist_->IsGlobalEmpty(); }
|
||||
|
||||
private:
|
||||
Worklist<SEGMENT_SIZE>* worklist_;
|
||||
int task_id_;
|
||||
};
|
||||
|
||||
static const int kMaxNumTasks = 8;
|
||||
static const int kSegmentCapacity = 64;
|
||||
static const int kSegmentCapacity = SEGMENT_SIZE;
|
||||
|
||||
Worklist() {
|
||||
for (int i = 0; i < kMaxNumTasks; i++) {
|
||||
@ -141,16 +165,17 @@ class Worklist {
|
||||
}
|
||||
|
||||
private:
|
||||
FRIEND_TEST(Worklist, SegmentCreate);
|
||||
FRIEND_TEST(Worklist, SegmentPush);
|
||||
FRIEND_TEST(Worklist, SegmentPushPop);
|
||||
FRIEND_TEST(Worklist, SegmentIsEmpty);
|
||||
FRIEND_TEST(Worklist, SegmentIsFull);
|
||||
FRIEND_TEST(Worklist, SegmentClear);
|
||||
FRIEND_TEST(Worklist, SegmentFullPushFails);
|
||||
FRIEND_TEST(Worklist, SegmentEmptyPopFails);
|
||||
FRIEND_TEST(Worklist, SegmentUpdateNull);
|
||||
FRIEND_TEST(Worklist, SegmentUpdate);
|
||||
using TestWorklist = Worklist<64>;
|
||||
FRIEND_TEST(TestWorklist, SegmentCreate);
|
||||
FRIEND_TEST(TestWorklist, SegmentPush);
|
||||
FRIEND_TEST(TestWorklist, SegmentPushPop);
|
||||
FRIEND_TEST(TestWorklist, SegmentIsEmpty);
|
||||
FRIEND_TEST(TestWorklist, SegmentIsFull);
|
||||
FRIEND_TEST(TestWorklist, SegmentClear);
|
||||
FRIEND_TEST(TestWorklist, SegmentFullPushFails);
|
||||
FRIEND_TEST(TestWorklist, SegmentEmptyPopFails);
|
||||
FRIEND_TEST(TestWorklist, SegmentUpdateNull);
|
||||
FRIEND_TEST(TestWorklist, SegmentUpdate);
|
||||
|
||||
class Segment {
|
||||
public:
|
||||
@ -227,29 +252,6 @@ class Worklist {
|
||||
std::vector<Segment*> global_pool_;
|
||||
};
|
||||
|
||||
class WorklistView {
|
||||
public:
|
||||
WorklistView(Worklist* worklist, int task_id)
|
||||
: worklist_(worklist), task_id_(task_id) {}
|
||||
|
||||
// Pushes an object onto the worklist.
|
||||
bool Push(HeapObject* object) { return worklist_->Push(task_id_, object); }
|
||||
|
||||
// Pops an object from the worklist.
|
||||
bool Pop(HeapObject** object) { return worklist_->Pop(task_id_, object); }
|
||||
|
||||
// Returns true if the local portion of the worklist is empty.
|
||||
bool IsLocalEmpty() { return worklist_->IsLocalEmpty(task_id_); }
|
||||
|
||||
// Returns true if the worklist is empty. Can only be used from the main
|
||||
// thread without concurrent access.
|
||||
bool IsGlobalEmpty() { return worklist_->IsGlobalEmpty(); }
|
||||
|
||||
private:
|
||||
Worklist* worklist_;
|
||||
int task_id_;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -19,8 +19,9 @@ TEST(ConcurrentMarking) {
|
||||
if (!i::FLAG_concurrent_marking) return;
|
||||
CcTest::InitializeVM();
|
||||
Heap* heap = CcTest::heap();
|
||||
Worklist shared, bailout;
|
||||
for (int i = 0; i <= Worklist::kSegmentCapacity; i++) {
|
||||
ConcurrentMarking::MarkingWorklist shared, bailout;
|
||||
for (int i = 0; i <= ConcurrentMarking::MarkingWorklist::kSegmentCapacity;
|
||||
i++) {
|
||||
shared.Push(0, heap->undefined_value());
|
||||
}
|
||||
HeapObject* object;
|
||||
|
@ -9,24 +9,26 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
using TestWorklist = Worklist<64>;
|
||||
|
||||
class HeapObject {};
|
||||
|
||||
TEST(Worklist, SegmentCreate) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentCreate) {
|
||||
TestWorklist::Segment segment;
|
||||
EXPECT_TRUE(segment.IsEmpty());
|
||||
EXPECT_EQ(0u, segment.Size());
|
||||
EXPECT_FALSE(segment.IsFull());
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentPush) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentPush) {
|
||||
TestWorklist::Segment segment;
|
||||
EXPECT_EQ(0u, segment.Size());
|
||||
EXPECT_TRUE(segment.Push(nullptr));
|
||||
EXPECT_EQ(1u, segment.Size());
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentPushPop) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentPushPop) {
|
||||
TestWorklist::Segment segment;
|
||||
EXPECT_TRUE(segment.Push(nullptr));
|
||||
EXPECT_EQ(1u, segment.Size());
|
||||
HeapObject dummy;
|
||||
@ -36,52 +38,52 @@ TEST(Worklist, SegmentPushPop) {
|
||||
EXPECT_EQ(nullptr, object);
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentIsEmpty) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentIsEmpty) {
|
||||
TestWorklist::Segment segment;
|
||||
EXPECT_TRUE(segment.IsEmpty());
|
||||
EXPECT_TRUE(segment.Push(nullptr));
|
||||
EXPECT_FALSE(segment.IsEmpty());
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentIsFull) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentIsFull) {
|
||||
TestWorklist::Segment segment;
|
||||
EXPECT_FALSE(segment.IsFull());
|
||||
for (size_t i = 0; i < Worklist::Segment::kCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::Segment::kCapacity; i++) {
|
||||
EXPECT_TRUE(segment.Push(nullptr));
|
||||
}
|
||||
EXPECT_TRUE(segment.IsFull());
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentClear) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentClear) {
|
||||
TestWorklist::Segment segment;
|
||||
EXPECT_TRUE(segment.Push(nullptr));
|
||||
EXPECT_FALSE(segment.IsEmpty());
|
||||
segment.Clear();
|
||||
EXPECT_TRUE(segment.IsEmpty());
|
||||
for (size_t i = 0; i < Worklist::Segment::kCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::Segment::kCapacity; i++) {
|
||||
EXPECT_TRUE(segment.Push(nullptr));
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentFullPushFails) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentFullPushFails) {
|
||||
TestWorklist::Segment segment;
|
||||
EXPECT_FALSE(segment.IsFull());
|
||||
for (size_t i = 0; i < Worklist::Segment::kCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::Segment::kCapacity; i++) {
|
||||
EXPECT_TRUE(segment.Push(nullptr));
|
||||
}
|
||||
EXPECT_TRUE(segment.IsFull());
|
||||
EXPECT_FALSE(segment.Push(nullptr));
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentEmptyPopFails) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentEmptyPopFails) {
|
||||
TestWorklist::Segment segment;
|
||||
EXPECT_TRUE(segment.IsEmpty());
|
||||
HeapObject* object;
|
||||
EXPECT_FALSE(segment.Pop(&object));
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentUpdateNull) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentUpdateNull) {
|
||||
TestWorklist::Segment segment;
|
||||
HeapObject* object;
|
||||
object = reinterpret_cast<HeapObject*>(&object);
|
||||
EXPECT_TRUE(segment.Push(object));
|
||||
@ -89,8 +91,8 @@ TEST(Worklist, SegmentUpdateNull) {
|
||||
EXPECT_TRUE(segment.IsEmpty());
|
||||
}
|
||||
|
||||
TEST(Worklist, SegmentUpdate) {
|
||||
Worklist::Segment segment;
|
||||
TEST(TestWorklist, SegmentUpdate) {
|
||||
TestWorklist::Segment segment;
|
||||
HeapObject* objectA;
|
||||
objectA = reinterpret_cast<HeapObject*>(&objectA);
|
||||
HeapObject* objectB;
|
||||
@ -102,16 +104,16 @@ TEST(Worklist, SegmentUpdate) {
|
||||
EXPECT_EQ(object, objectB);
|
||||
}
|
||||
|
||||
TEST(Worklist, CreateEmpty) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view(&worklist, 0);
|
||||
TEST(TestWorklist, CreateEmpty) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view(&worklist, 0);
|
||||
EXPECT_TRUE(worklist_view.IsLocalEmpty());
|
||||
EXPECT_TRUE(worklist.IsGlobalEmpty());
|
||||
}
|
||||
|
||||
TEST(Worklist, LocalPushPop) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view(&worklist, 0);
|
||||
TEST(TestWorklist, LocalPushPop) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view(&worklist, 0);
|
||||
HeapObject dummy;
|
||||
HeapObject* retrieved = nullptr;
|
||||
EXPECT_TRUE(worklist_view.Push(&dummy));
|
||||
@ -120,11 +122,11 @@ TEST(Worklist, LocalPushPop) {
|
||||
EXPECT_EQ(&dummy, retrieved);
|
||||
}
|
||||
|
||||
TEST(Worklist, LocalIsBasedOnId) {
|
||||
Worklist worklist;
|
||||
TEST(TestWorklist, LocalIsBasedOnId) {
|
||||
TestWorklist worklist;
|
||||
// Use the same id.
|
||||
WorklistView worklist_view1(&worklist, 0);
|
||||
WorklistView worklist_view2(&worklist, 0);
|
||||
TestWorklist::View worklist_view1(&worklist, 0);
|
||||
TestWorklist::View worklist_view2(&worklist, 0);
|
||||
HeapObject dummy;
|
||||
HeapObject* retrieved = nullptr;
|
||||
EXPECT_TRUE(worklist_view1.Push(&dummy));
|
||||
@ -136,10 +138,10 @@ TEST(Worklist, LocalIsBasedOnId) {
|
||||
EXPECT_TRUE(worklist_view2.IsLocalEmpty());
|
||||
}
|
||||
|
||||
TEST(Worklist, LocalPushStaysPrivate) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view1(&worklist, 0);
|
||||
WorklistView worklist_view2(&worklist, 1);
|
||||
TEST(TestWorklist, LocalPushStaysPrivate) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view1(&worklist, 0);
|
||||
TestWorklist::View worklist_view2(&worklist, 1);
|
||||
HeapObject dummy;
|
||||
HeapObject* retrieved = nullptr;
|
||||
EXPECT_TRUE(worklist.IsGlobalEmpty());
|
||||
@ -152,12 +154,12 @@ TEST(Worklist, LocalPushStaysPrivate) {
|
||||
EXPECT_TRUE(worklist.IsGlobalEmpty());
|
||||
}
|
||||
|
||||
TEST(Worklist, GlobalUpdateNull) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view(&worklist, 0);
|
||||
TEST(TestWorklist, GlobalUpdateNull) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view(&worklist, 0);
|
||||
HeapObject* object;
|
||||
object = reinterpret_cast<HeapObject*>(&object);
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view.Push(object));
|
||||
}
|
||||
EXPECT_TRUE(worklist_view.Push(object));
|
||||
@ -165,36 +167,36 @@ TEST(Worklist, GlobalUpdateNull) {
|
||||
EXPECT_TRUE(worklist.IsGlobalEmpty());
|
||||
}
|
||||
|
||||
TEST(Worklist, GlobalUpdate) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view(&worklist, 0);
|
||||
TEST(TestWorklist, GlobalUpdate) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view(&worklist, 0);
|
||||
HeapObject* objectA = nullptr;
|
||||
objectA = reinterpret_cast<HeapObject*>(&objectA);
|
||||
HeapObject* objectB = nullptr;
|
||||
objectB = reinterpret_cast<HeapObject*>(&objectB);
|
||||
HeapObject* objectC = nullptr;
|
||||
objectC = reinterpret_cast<HeapObject*>(&objectC);
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view.Push(objectA));
|
||||
}
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view.Push(objectB));
|
||||
}
|
||||
EXPECT_TRUE(worklist_view.Push(objectA));
|
||||
worklist.Update([objectA, objectC](HeapObject* object) {
|
||||
return (object == objectA) ? nullptr : objectC;
|
||||
});
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
HeapObject* object;
|
||||
EXPECT_TRUE(worklist_view.Pop(&object));
|
||||
EXPECT_EQ(object, objectC);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Worklist, FlushToGlobalPushSegment) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view0(&worklist, 0);
|
||||
WorklistView worklist_view1(&worklist, 1);
|
||||
TEST(TestWorklist, FlushToGlobalPushSegment) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view0(&worklist, 0);
|
||||
TestWorklist::View worklist_view1(&worklist, 1);
|
||||
HeapObject* object = nullptr;
|
||||
HeapObject* objectA = nullptr;
|
||||
objectA = reinterpret_cast<HeapObject*>(&objectA);
|
||||
@ -203,10 +205,10 @@ TEST(Worklist, FlushToGlobalPushSegment) {
|
||||
EXPECT_TRUE(worklist_view1.Pop(&object));
|
||||
}
|
||||
|
||||
TEST(Worklist, FlushToGlobalPopSegment) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view0(&worklist, 0);
|
||||
WorklistView worklist_view1(&worklist, 1);
|
||||
TEST(TestWorklist, FlushToGlobalPopSegment) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view0(&worklist, 0);
|
||||
TestWorklist::View worklist_view1(&worklist, 1);
|
||||
HeapObject* object = nullptr;
|
||||
HeapObject* objectA = nullptr;
|
||||
objectA = reinterpret_cast<HeapObject*>(&objectA);
|
||||
@ -217,12 +219,12 @@ TEST(Worklist, FlushToGlobalPopSegment) {
|
||||
EXPECT_TRUE(worklist_view1.Pop(&object));
|
||||
}
|
||||
|
||||
TEST(Worklist, Clear) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view(&worklist, 0);
|
||||
TEST(TestWorklist, Clear) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view(&worklist, 0);
|
||||
HeapObject* object;
|
||||
object = reinterpret_cast<HeapObject*>(&object);
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view.Push(object));
|
||||
}
|
||||
EXPECT_TRUE(worklist_view.Push(object));
|
||||
@ -230,12 +232,12 @@ TEST(Worklist, Clear) {
|
||||
EXPECT_TRUE(worklist.IsGlobalEmpty());
|
||||
}
|
||||
|
||||
TEST(Worklist, SingleSegmentSteal) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view1(&worklist, 0);
|
||||
WorklistView worklist_view2(&worklist, 1);
|
||||
TEST(TestWorklist, SingleSegmentSteal) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view1(&worklist, 0);
|
||||
TestWorklist::View worklist_view2(&worklist, 1);
|
||||
HeapObject dummy;
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view1.Push(&dummy));
|
||||
}
|
||||
HeapObject* retrieved = nullptr;
|
||||
@ -244,7 +246,7 @@ TEST(Worklist, SingleSegmentSteal) {
|
||||
EXPECT_TRUE(worklist_view1.Pop(&retrieved));
|
||||
EXPECT_EQ(nullptr, retrieved);
|
||||
// Stealing.
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view2.Pop(&retrieved));
|
||||
EXPECT_EQ(&dummy, retrieved);
|
||||
EXPECT_FALSE(worklist_view1.Pop(&retrieved));
|
||||
@ -252,17 +254,17 @@ TEST(Worklist, SingleSegmentSteal) {
|
||||
EXPECT_TRUE(worklist.IsGlobalEmpty());
|
||||
}
|
||||
|
||||
TEST(Worklist, MultipleSegmentsStolen) {
|
||||
Worklist worklist;
|
||||
WorklistView worklist_view1(&worklist, 0);
|
||||
WorklistView worklist_view2(&worklist, 1);
|
||||
WorklistView worklist_view3(&worklist, 2);
|
||||
TEST(TestWorklist, MultipleSegmentsStolen) {
|
||||
TestWorklist worklist;
|
||||
TestWorklist::View worklist_view1(&worklist, 0);
|
||||
TestWorklist::View worklist_view2(&worklist, 1);
|
||||
TestWorklist::View worklist_view3(&worklist, 2);
|
||||
HeapObject dummy1;
|
||||
HeapObject dummy2;
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view1.Push(&dummy1));
|
||||
}
|
||||
for (size_t i = 0; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view1.Push(&dummy2));
|
||||
}
|
||||
HeapObject* retrieved = nullptr;
|
||||
@ -279,12 +281,12 @@ TEST(Worklist, MultipleSegmentsStolen) {
|
||||
EXPECT_NE(expect_bag2, expect_bag3);
|
||||
EXPECT_TRUE(expect_bag2 == &dummy1 || expect_bag2 == &dummy2);
|
||||
EXPECT_TRUE(expect_bag3 == &dummy1 || expect_bag3 == &dummy2);
|
||||
for (size_t i = 1; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 1; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view2.Pop(&retrieved));
|
||||
EXPECT_EQ(expect_bag2, retrieved);
|
||||
EXPECT_FALSE(worklist_view1.Pop(&retrieved));
|
||||
}
|
||||
for (size_t i = 1; i < Worklist::kSegmentCapacity; i++) {
|
||||
for (size_t i = 1; i < TestWorklist::kSegmentCapacity; i++) {
|
||||
EXPECT_TRUE(worklist_view3.Pop(&retrieved));
|
||||
EXPECT_EQ(expect_bag3, retrieved);
|
||||
EXPECT_FALSE(worklist_view1.Pop(&retrieved));
|
||||
|
Loading…
Reference in New Issue
Block a user