Reland of [heap] Fix CompactionSpace test and move to unittests

This reverts commit eddf90c4a7.

BUG=chromium:651354

Review-Url: https://codereview.chromium.org/2792063004
Cr-Commit-Position: refs/heads/master@{#44398}
This commit is contained in:
mlippautz 2017-04-05 00:36:15 -07:00 committed by Commit bot
parent 1aaafe5a4b
commit a0655790ae
6 changed files with 56 additions and 51 deletions

View File

@ -746,8 +746,8 @@ class Heap {
// when introducing gaps within pages. If slots could have been recorded in
// the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
// pass ClearRecordedSlots::kNo.
HeapObject* CreateFillerObjectAt(Address addr, int size,
ClearRecordedSlots mode);
V8_EXPORT_PRIVATE HeapObject* CreateFillerObjectAt(Address addr, int size,
ClearRecordedSlots mode);
bool CanMoveObjectStart(HeapObject* object);

View File

@ -807,7 +807,7 @@ class Page : public MemoryChunk {
size_t ShrinkToHighWaterMark();
void CreateBlackArea(Address start, Address end);
V8_EXPORT_PRIVATE void CreateBlackArea(Address start, Address end);
#ifdef DEBUG
void Print();
@ -2733,7 +2733,7 @@ class PauseAllocationObserversScope {
// -----------------------------------------------------------------------------
// Compaction space that is used temporarily during compaction.
class CompactionSpace : public PagedSpace {
class V8_EXPORT_PRIVATE CompactionSpace : public PagedSpace {
public:
CompactionSpace(Heap* heap, AllocationSpace id, Executability executable)
: PagedSpace(heap, id, executable) {}

View File

@ -401,53 +401,6 @@ TEST(OldSpace) {
delete memory_allocator;
}
TEST(CompactionSpace) {
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
CHECK(memory_allocator != nullptr);
CHECK(memory_allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize(),
0));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
CompactionSpace* compaction_space =
new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE);
CHECK(compaction_space != NULL);
CHECK(compaction_space->SetUp());
OldSpace* old_space = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE);
CHECK(old_space != NULL);
CHECK(old_space->SetUp());
// Cannot loop until "Available()" since we initially have 0 bytes available
// and would thus neither grow, nor be able to allocate an object.
const int kNumObjects = 100;
const int kNumObjectsPerPage =
compaction_space->AreaSize() / kMaxRegularHeapObjectSize;
const int kExpectedPages =
(kNumObjects + kNumObjectsPerPage - 1) / kNumObjectsPerPage;
for (int i = 0; i < kNumObjects; i++) {
compaction_space->AllocateRawUnaligned(kMaxRegularHeapObjectSize)
.ToObjectChecked();
}
int pages_in_old_space = old_space->CountTotalPages();
int pages_in_compaction_space = compaction_space->CountTotalPages();
CHECK_EQ(pages_in_compaction_space, kExpectedPages);
CHECK_LE(pages_in_old_space, 1);
old_space->MergeCompactionSpace(compaction_space);
CHECK_EQ(old_space->CountTotalPages(),
pages_in_old_space + pages_in_compaction_space);
delete compaction_space;
delete old_space;
memory_allocator->TearDown();
delete memory_allocator;
}
TEST(LargeObjectSpace) {
// This test does not initialize allocated objects, which confuses the
// incremental marker.

View File

@ -109,6 +109,7 @@ v8_executable("unittests") {
"heap/memory-reducer-unittest.cc",
"heap/scavenge-job-unittest.cc",
"heap/slot-set-unittest.cc",
"heap/spaces-unittest.cc",
"heap/unmapper-unittest.cc",
"interpreter/bytecode-array-builder-unittest.cc",
"interpreter/bytecode-array-iterator-unittest.cc",

View File

@ -0,0 +1,50 @@
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/heap-inl.h"
#include "src/heap/spaces-inl.h"
#include "src/isolate.h"
#include "test/unittests/test-utils.h"
namespace v8 {
namespace internal {
typedef TestWithIsolate SpacesTest;
TEST_F(SpacesTest, CompactionSpaceMerge) {
Heap* heap = i_isolate()->heap();
OldSpace* old_space = heap->old_space();
EXPECT_TRUE(old_space != NULL);
CompactionSpace* compaction_space =
new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE);
EXPECT_TRUE(compaction_space != NULL);
EXPECT_TRUE(compaction_space->SetUp());
// Cannot loop until "Available()" since we initially have 0 bytes available
// and would thus neither grow, nor be able to allocate an object.
const int kNumObjects = 10;
const int kNumObjectsPerPage =
compaction_space->AreaSize() / kMaxRegularHeapObjectSize;
const int kExpectedPages =
(kNumObjects + kNumObjectsPerPage - 1) / kNumObjectsPerPage;
for (int i = 0; i < kNumObjects; i++) {
HeapObject* object =
compaction_space->AllocateRawUnaligned(kMaxRegularHeapObjectSize)
.ToObjectChecked();
heap->CreateFillerObjectAt(object->address(), kMaxRegularHeapObjectSize,
ClearRecordedSlots::kNo);
}
int pages_in_old_space = old_space->CountTotalPages();
int pages_in_compaction_space = compaction_space->CountTotalPages();
EXPECT_EQ(kExpectedPages, pages_in_compaction_space);
old_space->MergeCompactionSpace(compaction_space);
EXPECT_EQ(pages_in_old_space + pages_in_compaction_space,
old_space->CountTotalPages());
delete compaction_space;
}
} // namespace internal
} // namespace v8

View File

@ -126,6 +126,7 @@
'heap/heap-unittest.cc',
'heap/scavenge-job-unittest.cc',
'heap/slot-set-unittest.cc',
'heap/spaces-unittest.cc',
'heap/unmapper-unittest.cc',
'locked-queue-unittest.cc',
'object-unittest.cc',