Reland "Merge cellspace into old pointer space"
This fixes the arm(64) and mips(64) write barriers BUG= Review URL: https://codereview.chromium.org/1073133002 Cr-Commit-Position: refs/heads/master@{#27751}
This commit is contained in:
parent
e7ba4791e5
commit
4bd9bdbb28
@ -4750,8 +4750,7 @@ enum ObjectSpace {
|
||||
kObjectSpaceOldSpace = 1 << 1,
|
||||
kObjectSpaceCodeSpace = 1 << 2,
|
||||
kObjectSpaceMapSpace = 1 << 3,
|
||||
kObjectSpaceCellSpace = 1 << 4,
|
||||
kObjectSpaceLoSpace = 1 << 5,
|
||||
kObjectSpaceLoSpace = 1 << 4,
|
||||
kObjectSpaceAll = kObjectSpaceNewSpace | kObjectSpaceOldSpace |
|
||||
kObjectSpaceCodeSpace | kObjectSpaceMapSpace |
|
||||
kObjectSpaceLoSpace
|
||||
@ -6741,7 +6740,7 @@ class Internals {
|
||||
static const int kJSObjectType = 0xbe;
|
||||
static const int kFirstNonstringType = 0x80;
|
||||
static const int kOddballType = 0x83;
|
||||
static const int kForeignType = 0x87;
|
||||
static const int kForeignType = 0x86;
|
||||
|
||||
static const int kUndefinedOddballKind = 5;
|
||||
static const int kNullOddballKind = 3;
|
||||
|
@ -229,10 +229,6 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
|
||||
heap_stats.map_space_size = &map_space_size;
|
||||
intptr_t map_space_capacity;
|
||||
heap_stats.map_space_capacity = &map_space_capacity;
|
||||
intptr_t cell_space_size;
|
||||
heap_stats.cell_space_size = &cell_space_size;
|
||||
intptr_t cell_space_capacity;
|
||||
heap_stats.cell_space_capacity = &cell_space_capacity;
|
||||
intptr_t lo_space_size;
|
||||
heap_stats.lo_space_size = &lo_space_size;
|
||||
int global_handle_count;
|
||||
|
@ -1324,6 +1324,14 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
__ GetRelocatedValueLocation(r9, map_load_offset, scratch);
|
||||
__ ldr(map_load_offset, MemOperand(map_load_offset));
|
||||
__ str(map, FieldMemOperand(map_load_offset, Cell::kValueOffset));
|
||||
|
||||
__ mov(r8, map);
|
||||
// |map_load_offset| points at the beginning of the cell. Calculate the
|
||||
// field containing the map.
|
||||
__ add(function, map_load_offset, Operand(Cell::kValueOffset - 1));
|
||||
__ RecordWriteField(map_load_offset, Cell::kValueOffset, r8, function,
|
||||
kLRHasNotBeenSaved, kDontSaveFPRegs,
|
||||
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
|
||||
}
|
||||
|
||||
// Register mapping: r3 is object map and r4 is function prototype.
|
||||
|
@ -1544,6 +1544,14 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
// We have a cell, so need another level of dereferencing.
|
||||
__ Ldr(scratch1, MemOperand(scratch1));
|
||||
__ Str(map, FieldMemOperand(scratch1, Cell::kValueOffset));
|
||||
|
||||
__ Mov(x14, map);
|
||||
// |scratch1| points at the beginning of the cell. Calculate the
|
||||
// field containing the map.
|
||||
__ Add(function, scratch1, Operand(Cell::kValueOffset - 1));
|
||||
__ RecordWriteField(scratch1, Cell::kValueOffset, x14, function,
|
||||
kLRHasNotBeenSaved, kDontSaveFPRegs,
|
||||
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
|
||||
} else {
|
||||
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
|
||||
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
|
||||
|
@ -435,6 +435,7 @@ class RelocInfo {
|
||||
static inline bool IsEmbeddedObject(Mode mode) {
|
||||
return mode == EMBEDDED_OBJECT;
|
||||
}
|
||||
static inline bool IsCell(Mode mode) { return mode == CELL; }
|
||||
static inline bool IsRuntimeEntry(Mode mode) {
|
||||
return mode == RUNTIME_ENTRY;
|
||||
}
|
||||
|
@ -403,15 +403,12 @@ class AggregatedHistogramTimerScope {
|
||||
HP(external_fragmentation_code_space, \
|
||||
V8.MemoryExternalFragmentationCodeSpace) \
|
||||
HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
|
||||
HP(external_fragmentation_cell_space, \
|
||||
V8.MemoryExternalFragmentationCellSpace) \
|
||||
HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace) \
|
||||
/* Percentages of heap committed to each space. */ \
|
||||
HP(heap_fraction_new_space, V8.MemoryHeapFractionNewSpace) \
|
||||
HP(heap_fraction_old_space, V8.MemoryHeapFractionOldSpace) \
|
||||
HP(heap_fraction_code_space, V8.MemoryHeapFractionCodeSpace) \
|
||||
HP(heap_fraction_map_space, V8.MemoryHeapFractionMapSpace) \
|
||||
HP(heap_fraction_cell_space, V8.MemoryHeapFractionCellSpace) \
|
||||
HP(heap_fraction_lo_space, V8.MemoryHeapFractionLoSpace) \
|
||||
/* Percentage of crankshafted codegen. */ \
|
||||
HP(codegen_fraction_crankshaft, V8.CodegenFractionCrankshaft)
|
||||
@ -422,8 +419,6 @@ class AggregatedHistogramTimerScope {
|
||||
HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
|
||||
HM(heap_sample_map_space_committed, \
|
||||
V8.MemoryHeapSampleMapSpaceCommitted) \
|
||||
HM(heap_sample_cell_space_committed, \
|
||||
V8.MemoryHeapSampleCellSpaceCommitted) \
|
||||
HM(heap_sample_code_space_committed, \
|
||||
V8.MemoryHeapSampleCodeSpaceCommitted) \
|
||||
HM(heap_sample_maximum_committed, \
|
||||
@ -595,9 +590,6 @@ class AggregatedHistogramTimerScope {
|
||||
SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
|
||||
SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
|
||||
SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
|
||||
SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \
|
||||
SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \
|
||||
SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \
|
||||
SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
|
||||
SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
|
||||
SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed)
|
||||
|
@ -114,9 +114,6 @@ void StatisticsExtension::GetCounters(
|
||||
{heap->code_space()->Size(), "code_space_live_bytes"},
|
||||
{heap->code_space()->Available(), "code_space_available_bytes"},
|
||||
{heap->code_space()->CommittedMemory(), "code_space_commited_bytes"},
|
||||
{heap->cell_space()->Size(), "cell_space_live_bytes"},
|
||||
{heap->cell_space()->Available(), "cell_space_available_bytes"},
|
||||
{heap->cell_space()->CommittedMemory(), "cell_space_commited_bytes"},
|
||||
{heap->lo_space()->Size(), "lo_space_live_bytes"},
|
||||
{heap->lo_space()->Available(), "lo_space_available_bytes"},
|
||||
{heap->lo_space()->CommittedMemory(), "lo_space_commited_bytes"},
|
||||
|
@ -412,13 +412,12 @@ enum AllocationSpace {
|
||||
OLD_SPACE, // May contain pointers to new space.
|
||||
CODE_SPACE, // No pointers to new space, marked executable.
|
||||
MAP_SPACE, // Only and all map objects.
|
||||
CELL_SPACE, // Only and all cell objects.
|
||||
LO_SPACE, // Promoted large objects.
|
||||
|
||||
FIRST_SPACE = NEW_SPACE,
|
||||
LAST_SPACE = LO_SPACE,
|
||||
FIRST_PAGED_SPACE = OLD_SPACE,
|
||||
LAST_PAGED_SPACE = CELL_SPACE
|
||||
LAST_PAGED_SPACE = MAP_SPACE
|
||||
};
|
||||
const int kSpaceTagSize = 3;
|
||||
const int kSpaceTagMask = (1 << kSpaceTagSize) - 1;
|
||||
|
@ -194,8 +194,6 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
|
||||
}
|
||||
} else if (LO_SPACE == space) {
|
||||
allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
|
||||
} else if (CELL_SPACE == space) {
|
||||
allocation = cell_space_->AllocateRaw(size_in_bytes);
|
||||
} else {
|
||||
DCHECK(MAP_SPACE == space);
|
||||
allocation = map_space_->AllocateRaw(size_in_bytes);
|
||||
@ -387,7 +385,6 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
|
||||
case CODE_SPACE:
|
||||
return dst == src && type == CODE_TYPE;
|
||||
case MAP_SPACE:
|
||||
case CELL_SPACE:
|
||||
case LO_SPACE:
|
||||
return false;
|
||||
}
|
||||
|
@ -87,7 +87,6 @@ Heap::Heap()
|
||||
old_space_(NULL),
|
||||
code_space_(NULL),
|
||||
map_space_(NULL),
|
||||
cell_space_(NULL),
|
||||
lo_space_(NULL),
|
||||
gc_state_(NOT_IN_GC),
|
||||
gc_post_processing_depth_(0),
|
||||
@ -172,8 +171,7 @@ intptr_t Heap::Capacity() {
|
||||
if (!HasBeenSetUp()) return 0;
|
||||
|
||||
return new_space_.Capacity() + old_space_->Capacity() +
|
||||
code_space_->Capacity() + map_space_->Capacity() +
|
||||
cell_space_->Capacity();
|
||||
code_space_->Capacity() + map_space_->Capacity();
|
||||
}
|
||||
|
||||
|
||||
@ -181,8 +179,7 @@ intptr_t Heap::CommittedOldGenerationMemory() {
|
||||
if (!HasBeenSetUp()) return 0;
|
||||
|
||||
return old_space_->CommittedMemory() + code_space_->CommittedMemory() +
|
||||
map_space_->CommittedMemory() + cell_space_->CommittedMemory() +
|
||||
lo_space_->Size();
|
||||
map_space_->CommittedMemory() + lo_space_->Size();
|
||||
}
|
||||
|
||||
|
||||
@ -200,7 +197,6 @@ size_t Heap::CommittedPhysicalMemory() {
|
||||
old_space_->CommittedPhysicalMemory() +
|
||||
code_space_->CommittedPhysicalMemory() +
|
||||
map_space_->CommittedPhysicalMemory() +
|
||||
cell_space_->CommittedPhysicalMemory() +
|
||||
lo_space_->CommittedPhysicalMemory();
|
||||
}
|
||||
|
||||
@ -226,14 +222,13 @@ intptr_t Heap::Available() {
|
||||
if (!HasBeenSetUp()) return 0;
|
||||
|
||||
return new_space_.Available() + old_space_->Available() +
|
||||
code_space_->Available() + map_space_->Available() +
|
||||
cell_space_->Available();
|
||||
code_space_->Available() + map_space_->Available();
|
||||
}
|
||||
|
||||
|
||||
bool Heap::HasBeenSetUp() {
|
||||
return old_space_ != NULL && code_space_ != NULL && map_space_ != NULL &&
|
||||
cell_space_ != NULL && lo_space_ != NULL;
|
||||
lo_space_ != NULL;
|
||||
}
|
||||
|
||||
|
||||
@ -357,13 +352,6 @@ void Heap::PrintShortHeapStatistics() {
|
||||
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
||||
map_space_->SizeOfObjects() / KB, map_space_->Available() / KB,
|
||||
map_space_->CommittedMemory() / KB);
|
||||
PrintPID("Cell space, used: %6" V8_PTR_PREFIX
|
||||
"d KB"
|
||||
", available: %6" V8_PTR_PREFIX
|
||||
"d KB"
|
||||
", committed: %6" V8_PTR_PREFIX "d KB\n",
|
||||
cell_space_->SizeOfObjects() / KB, cell_space_->Available() / KB,
|
||||
cell_space_->CommittedMemory() / KB);
|
||||
PrintPID("Large object space, used: %6" V8_PTR_PREFIX
|
||||
"d KB"
|
||||
", available: %6" V8_PTR_PREFIX
|
||||
@ -646,9 +634,6 @@ void Heap::GarbageCollectionEpilogue() {
|
||||
CommittedMemory()));
|
||||
isolate_->counters()->heap_fraction_map_space()->AddSample(static_cast<int>(
|
||||
(map_space()->CommittedMemory() * 100.0) / CommittedMemory()));
|
||||
isolate_->counters()->heap_fraction_cell_space()->AddSample(
|
||||
static_cast<int>((cell_space()->CommittedMemory() * 100.0) /
|
||||
CommittedMemory()));
|
||||
isolate_->counters()->heap_fraction_lo_space()->AddSample(static_cast<int>(
|
||||
(lo_space()->CommittedMemory() * 100.0) / CommittedMemory()));
|
||||
|
||||
@ -658,8 +643,6 @@ void Heap::GarbageCollectionEpilogue() {
|
||||
static_cast<int>(SizeOfObjects() / KB));
|
||||
isolate_->counters()->heap_sample_map_space_committed()->AddSample(
|
||||
static_cast<int>(map_space()->CommittedMemory() / KB));
|
||||
isolate_->counters()->heap_sample_cell_space_committed()->AddSample(
|
||||
static_cast<int>(cell_space()->CommittedMemory() / KB));
|
||||
isolate_->counters()->heap_sample_code_space_committed()->AddSample(
|
||||
static_cast<int>(code_space()->CommittedMemory() / KB));
|
||||
|
||||
@ -689,7 +672,6 @@ void Heap::GarbageCollectionEpilogue() {
|
||||
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
|
||||
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
|
||||
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
|
||||
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(cell_space)
|
||||
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space)
|
||||
#undef UPDATE_COUNTERS_FOR_SPACE
|
||||
#undef UPDATE_FRAGMENTATION_FOR_SPACE
|
||||
@ -1554,18 +1536,6 @@ void Heap::Scavenge() {
|
||||
store_buffer()->IteratePointersToNewSpace(&ScavengeObject);
|
||||
}
|
||||
|
||||
// Copy objects reachable from simple cells by scavenging cell values
|
||||
// directly.
|
||||
HeapObjectIterator cell_iterator(cell_space_);
|
||||
for (HeapObject* heap_object = cell_iterator.Next(); heap_object != NULL;
|
||||
heap_object = cell_iterator.Next()) {
|
||||
if (heap_object->IsCell()) {
|
||||
Cell* cell = Cell::cast(heap_object);
|
||||
Address value_address = cell->ValueAddress();
|
||||
scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
|
||||
}
|
||||
}
|
||||
|
||||
// Copy objects reachable from the encountered weak collections list.
|
||||
scavenge_visitor.VisitPointer(&encountered_weak_collections_);
|
||||
// Copy objects reachable from the encountered weak cells.
|
||||
@ -2829,7 +2799,7 @@ AllocationResult Heap::AllocateCell(Object* value) {
|
||||
|
||||
HeapObject* result;
|
||||
{
|
||||
AllocationResult allocation = AllocateRaw(size, CELL_SPACE, CELL_SPACE);
|
||||
AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
|
||||
if (!allocation.To(&result)) return allocation;
|
||||
}
|
||||
result->set_map_no_write_barrier(cell_map());
|
||||
@ -4777,8 +4747,6 @@ void Heap::ReportHeapStatistics(const char* title) {
|
||||
code_space_->ReportStatistics();
|
||||
PrintF("Map space : ");
|
||||
map_space_->ReportStatistics();
|
||||
PrintF("Cell space : ");
|
||||
cell_space_->ReportStatistics();
|
||||
PrintF("Large object space : ");
|
||||
lo_space_->ReportStatistics();
|
||||
PrintF(">>>>>> ========================================= >>>>>>\n");
|
||||
@ -4794,7 +4762,7 @@ bool Heap::Contains(Address addr) {
|
||||
return HasBeenSetUp() &&
|
||||
(new_space_.ToSpaceContains(addr) || old_space_->Contains(addr) ||
|
||||
code_space_->Contains(addr) || map_space_->Contains(addr) ||
|
||||
cell_space_->Contains(addr) || lo_space_->SlowContains(addr));
|
||||
lo_space_->SlowContains(addr));
|
||||
}
|
||||
|
||||
|
||||
@ -4816,8 +4784,6 @@ bool Heap::InSpace(Address addr, AllocationSpace space) {
|
||||
return code_space_->Contains(addr);
|
||||
case MAP_SPACE:
|
||||
return map_space_->Contains(addr);
|
||||
case CELL_SPACE:
|
||||
return cell_space_->Contains(addr);
|
||||
case LO_SPACE:
|
||||
return lo_space_->SlowContains(addr);
|
||||
}
|
||||
@ -4864,7 +4830,6 @@ void Heap::Verify() {
|
||||
|
||||
VerifyPointersVisitor no_dirty_regions_visitor;
|
||||
code_space_->Verify(&no_dirty_regions_visitor);
|
||||
cell_space_->Verify(&no_dirty_regions_visitor);
|
||||
|
||||
lo_space_->Verify();
|
||||
}
|
||||
@ -5174,8 +5139,6 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
|
||||
*stats->code_space_capacity = code_space_->Capacity();
|
||||
*stats->map_space_size = map_space_->SizeOfObjects();
|
||||
*stats->map_space_capacity = map_space_->Capacity();
|
||||
*stats->cell_space_size = cell_space_->SizeOfObjects();
|
||||
*stats->cell_space_capacity = cell_space_->Capacity();
|
||||
*stats->lo_space_size = lo_space_->Size();
|
||||
isolate_->global_handles()->RecordStats(stats);
|
||||
*stats->memory_allocator_size = isolate()->memory_allocator()->Size();
|
||||
@ -5199,8 +5162,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
|
||||
|
||||
intptr_t Heap::PromotedSpaceSizeOfObjects() {
|
||||
return old_space_->SizeOfObjects() + code_space_->SizeOfObjects() +
|
||||
map_space_->SizeOfObjects() + cell_space_->SizeOfObjects() +
|
||||
lo_space_->SizeOfObjects();
|
||||
map_space_->SizeOfObjects() + lo_space_->SizeOfObjects();
|
||||
}
|
||||
|
||||
|
||||
@ -5342,11 +5304,6 @@ bool Heap::SetUp() {
|
||||
if (map_space_ == NULL) return false;
|
||||
if (!map_space_->SetUp()) return false;
|
||||
|
||||
// Initialize simple cell space.
|
||||
cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
|
||||
if (cell_space_ == NULL) return false;
|
||||
if (!cell_space_->SetUp()) return false;
|
||||
|
||||
// The large object code space may contain code or data. We set the memory
|
||||
// to be non-executable here for safety, but this means we need to enable it
|
||||
// explicitly when allocating large code objects.
|
||||
@ -5455,8 +5412,6 @@ void Heap::TearDown() {
|
||||
code_space_->MaximumCommittedMemory());
|
||||
PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ",
|
||||
map_space_->MaximumCommittedMemory());
|
||||
PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ",
|
||||
cell_space_->MaximumCommittedMemory());
|
||||
PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ",
|
||||
lo_space_->MaximumCommittedMemory());
|
||||
PrintF("\n\n");
|
||||
@ -5494,12 +5449,6 @@ void Heap::TearDown() {
|
||||
map_space_ = NULL;
|
||||
}
|
||||
|
||||
if (cell_space_ != NULL) {
|
||||
cell_space_->TearDown();
|
||||
delete cell_space_;
|
||||
cell_space_ = NULL;
|
||||
}
|
||||
|
||||
if (lo_space_ != NULL) {
|
||||
lo_space_->TearDown();
|
||||
delete lo_space_;
|
||||
@ -5640,8 +5589,6 @@ Space* AllSpaces::next() {
|
||||
return heap_->code_space();
|
||||
case MAP_SPACE:
|
||||
return heap_->map_space();
|
||||
case CELL_SPACE:
|
||||
return heap_->cell_space();
|
||||
case LO_SPACE:
|
||||
return heap_->lo_space();
|
||||
default:
|
||||
@ -5658,8 +5605,6 @@ PagedSpace* PagedSpaces::next() {
|
||||
return heap_->code_space();
|
||||
case MAP_SPACE:
|
||||
return heap_->map_space();
|
||||
case CELL_SPACE:
|
||||
return heap_->cell_space();
|
||||
default:
|
||||
return NULL;
|
||||
}
|
||||
@ -5737,9 +5682,6 @@ ObjectIterator* SpaceIterator::CreateIterator() {
|
||||
case MAP_SPACE:
|
||||
iterator_ = new HeapObjectIterator(heap_->map_space(), size_func_);
|
||||
break;
|
||||
case CELL_SPACE:
|
||||
iterator_ = new HeapObjectIterator(heap_->cell_space(), size_func_);
|
||||
break;
|
||||
case LO_SPACE:
|
||||
iterator_ = new LargeObjectIterator(heap_->lo_space(), size_func_);
|
||||
break;
|
||||
|
@ -645,7 +645,6 @@ class Heap {
|
||||
OldSpace* old_space() { return old_space_; }
|
||||
OldSpace* code_space() { return code_space_; }
|
||||
MapSpace* map_space() { return map_space_; }
|
||||
CellSpace* cell_space() { return cell_space_; }
|
||||
LargeObjectSpace* lo_space() { return lo_space_; }
|
||||
PagedSpace* paged_space(int idx) {
|
||||
switch (idx) {
|
||||
@ -653,8 +652,6 @@ class Heap {
|
||||
return old_space();
|
||||
case MAP_SPACE:
|
||||
return map_space();
|
||||
case CELL_SPACE:
|
||||
return cell_space();
|
||||
case CODE_SPACE:
|
||||
return code_space();
|
||||
case NEW_SPACE:
|
||||
@ -1573,7 +1570,6 @@ class Heap {
|
||||
OldSpace* old_space_;
|
||||
OldSpace* code_space_;
|
||||
MapSpace* map_space_;
|
||||
CellSpace* cell_space_;
|
||||
LargeObjectSpace* lo_space_;
|
||||
HeapState gc_state_;
|
||||
int gc_post_processing_depth_;
|
||||
@ -2178,20 +2174,18 @@ class HeapStats {
|
||||
intptr_t* code_space_capacity; // 6
|
||||
intptr_t* map_space_size; // 7
|
||||
intptr_t* map_space_capacity; // 8
|
||||
intptr_t* cell_space_size; // 9
|
||||
intptr_t* cell_space_capacity; // 10
|
||||
intptr_t* lo_space_size; // 11
|
||||
int* global_handle_count; // 12
|
||||
int* weak_global_handle_count; // 13
|
||||
int* pending_global_handle_count; // 14
|
||||
int* near_death_global_handle_count; // 15
|
||||
int* free_global_handle_count; // 16
|
||||
intptr_t* memory_allocator_size; // 17
|
||||
intptr_t* memory_allocator_capacity; // 18
|
||||
int* objects_per_type; // 19
|
||||
int* size_per_type; // 20
|
||||
int* os_error; // 21
|
||||
int* end_marker; // 22
|
||||
intptr_t* lo_space_size; // 9
|
||||
int* global_handle_count; // 10
|
||||
int* weak_global_handle_count; // 11
|
||||
int* pending_global_handle_count; // 12
|
||||
int* near_death_global_handle_count; // 13
|
||||
int* free_global_handle_count; // 14
|
||||
intptr_t* memory_allocator_size; // 15
|
||||
intptr_t* memory_allocator_capacity; // 16
|
||||
int* objects_per_type; // 17
|
||||
int* size_per_type; // 18
|
||||
int* os_error; // 19
|
||||
int* end_marker; // 20
|
||||
};
|
||||
|
||||
|
||||
|
@ -301,10 +301,6 @@ void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
|
||||
chunk->size() > static_cast<size_t>(Page::kPageSize) && is_compacting) {
|
||||
chunk->SetFlag(MemoryChunk::RESCAN_ON_EVACUATION);
|
||||
}
|
||||
} else if (chunk->owner()->identity() == CELL_SPACE ||
|
||||
chunk->scan_on_scavenge()) {
|
||||
chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
|
||||
chunk->ClearFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
|
||||
} else {
|
||||
chunk->ClearFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
|
||||
chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
|
||||
@ -346,7 +342,6 @@ void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
|
||||
|
||||
void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
|
||||
DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
|
||||
DeactivateIncrementalWriteBarrierForSpace(heap_->cell_space());
|
||||
DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
|
||||
DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
|
||||
DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
|
||||
@ -379,7 +374,6 @@ void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
|
||||
|
||||
void IncrementalMarking::ActivateIncrementalWriteBarrier() {
|
||||
ActivateIncrementalWriteBarrier(heap_->old_space());
|
||||
ActivateIncrementalWriteBarrier(heap_->cell_space());
|
||||
ActivateIncrementalWriteBarrier(heap_->map_space());
|
||||
ActivateIncrementalWriteBarrier(heap_->code_space());
|
||||
ActivateIncrementalWriteBarrier(heap_->new_space());
|
||||
|
@ -137,7 +137,6 @@ static void VerifyMarking(PagedSpace* space) {
|
||||
static void VerifyMarking(Heap* heap) {
|
||||
VerifyMarking(heap->old_space());
|
||||
VerifyMarking(heap->code_space());
|
||||
VerifyMarking(heap->cell_space());
|
||||
VerifyMarking(heap->map_space());
|
||||
VerifyMarking(heap->new_space());
|
||||
|
||||
@ -215,7 +214,6 @@ static void VerifyEvacuation(Heap* heap, PagedSpace* space) {
|
||||
static void VerifyEvacuation(Heap* heap) {
|
||||
VerifyEvacuation(heap, heap->old_space());
|
||||
VerifyEvacuation(heap, heap->code_space());
|
||||
VerifyEvacuation(heap, heap->cell_space());
|
||||
VerifyEvacuation(heap, heap->map_space());
|
||||
VerifyEvacuation(heap->new_space());
|
||||
|
||||
@ -268,7 +266,6 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
|
||||
|
||||
if (FLAG_trace_fragmentation) {
|
||||
TraceFragmentation(heap()->map_space());
|
||||
TraceFragmentation(heap()->cell_space());
|
||||
}
|
||||
|
||||
heap()->old_space()->EvictEvacuationCandidatesFromFreeLists();
|
||||
@ -295,7 +292,6 @@ void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
|
||||
|
||||
ClearInvalidSlotsBufferEntries(heap_->old_space());
|
||||
ClearInvalidSlotsBufferEntries(heap_->code_space());
|
||||
ClearInvalidSlotsBufferEntries(heap_->cell_space());
|
||||
ClearInvalidSlotsBufferEntries(heap_->map_space());
|
||||
|
||||
LargeObjectIterator it(heap_->lo_space());
|
||||
@ -321,7 +317,6 @@ static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) {
|
||||
|
||||
VerifyValidSlotsBufferEntries(heap, heap->old_space());
|
||||
VerifyValidSlotsBufferEntries(heap, heap->code_space());
|
||||
VerifyValidSlotsBufferEntries(heap, heap->cell_space());
|
||||
VerifyValidSlotsBufferEntries(heap, heap->map_space());
|
||||
|
||||
LargeObjectIterator it(heap->lo_space());
|
||||
@ -412,7 +407,6 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
|
||||
void MarkCompactCollector::VerifyMarkbitsAreClean() {
|
||||
VerifyMarkbitsAreClean(heap_->old_space());
|
||||
VerifyMarkbitsAreClean(heap_->code_space());
|
||||
VerifyMarkbitsAreClean(heap_->cell_space());
|
||||
VerifyMarkbitsAreClean(heap_->map_space());
|
||||
VerifyMarkbitsAreClean(heap_->new_space());
|
||||
|
||||
@ -469,7 +463,6 @@ void MarkCompactCollector::ClearMarkbits() {
|
||||
ClearMarkbitsInPagedSpace(heap_->code_space());
|
||||
ClearMarkbitsInPagedSpace(heap_->map_space());
|
||||
ClearMarkbitsInPagedSpace(heap_->old_space());
|
||||
ClearMarkbitsInPagedSpace(heap_->cell_space());
|
||||
ClearMarkbitsInNewSpace(heap_->new_space());
|
||||
|
||||
LargeObjectIterator it(heap_->lo_space());
|
||||
@ -636,8 +629,6 @@ const char* AllocationSpaceName(AllocationSpace space) {
|
||||
return "CODE_SPACE";
|
||||
case MAP_SPACE:
|
||||
return "MAP_SPACE";
|
||||
case CELL_SPACE:
|
||||
return "CELL_SPACE";
|
||||
case LO_SPACE:
|
||||
return "LO_SPACE";
|
||||
default:
|
||||
@ -2120,9 +2111,6 @@ void MarkCompactCollector::RefillMarkingDeque() {
|
||||
DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->map_space());
|
||||
if (marking_deque_.IsFull()) return;
|
||||
|
||||
DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->cell_space());
|
||||
if (marking_deque_.IsFull()) return;
|
||||
|
||||
LargeObjectIterator lo_it(heap()->lo_space());
|
||||
DiscoverGreyObjectsWithIterator(heap(), &marking_deque_, &lo_it);
|
||||
if (marking_deque_.IsFull()) return;
|
||||
@ -2313,23 +2301,6 @@ void MarkCompactCollector::MarkLiveObjects() {
|
||||
|
||||
PrepareForCodeFlushing();
|
||||
|
||||
if (was_marked_incrementally_) {
|
||||
// There is no write barrier on cells so we have to scan them now at the end
|
||||
// of the incremental marking.
|
||||
{
|
||||
HeapObjectIterator cell_iterator(heap()->cell_space());
|
||||
HeapObject* cell;
|
||||
while ((cell = cell_iterator.Next()) != NULL) {
|
||||
DCHECK(cell->IsCell());
|
||||
if (IsMarked(cell)) {
|
||||
int offset = Cell::kValueOffset;
|
||||
MarkCompactMarkingVisitor::VisitPointer(
|
||||
heap(), reinterpret_cast<Object**>(cell->address() + offset));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RootMarkingVisitor root_visitor(heap());
|
||||
MarkRoots(&root_visitor);
|
||||
|
||||
@ -2882,6 +2853,16 @@ class PointersUpdatingVisitor : public ObjectVisitor {
|
||||
for (Object** p = start; p < end; p++) UpdatePointer(p);
|
||||
}
|
||||
|
||||
void VisitCell(RelocInfo* rinfo) {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::CELL);
|
||||
Object* cell = rinfo->target_cell();
|
||||
Object* old_cell = cell;
|
||||
VisitPointer(&cell);
|
||||
if (cell != old_cell) {
|
||||
rinfo->set_target_cell(reinterpret_cast<Cell*>(cell));
|
||||
}
|
||||
}
|
||||
|
||||
void VisitEmbeddedPointer(RelocInfo* rinfo) {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
Object* target = rinfo->target_object();
|
||||
@ -2990,11 +2971,9 @@ void PointersUpdatingVisitor::CheckLayoutDescriptorAndDie(Heap* heap,
|
||||
space_owner_id = 4;
|
||||
} else if (heap->map_space()->ContainsSafe(slot_address)) {
|
||||
space_owner_id = 5;
|
||||
} else if (heap->cell_space()->ContainsSafe(slot_address)) {
|
||||
space_owner_id = 6;
|
||||
} else {
|
||||
// Lo space or other.
|
||||
space_owner_id = 7;
|
||||
space_owner_id = 6;
|
||||
}
|
||||
data[index++] = space_owner_id;
|
||||
data[index++] = 0x20aaaaaaaaUL;
|
||||
@ -3423,6 +3402,11 @@ static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v,
|
||||
rinfo.Visit(isolate, v);
|
||||
break;
|
||||
}
|
||||
case SlotsBuffer::CELL_TARGET_SLOT: {
|
||||
RelocInfo rinfo(addr, RelocInfo::CELL, 0, NULL);
|
||||
rinfo.Visit(isolate, v);
|
||||
break;
|
||||
}
|
||||
case SlotsBuffer::CODE_ENTRY_SLOT: {
|
||||
v->VisitCodeEntry(addr);
|
||||
break;
|
||||
@ -3807,15 +3791,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
||||
GCTracer::Scope gc_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_UPDATE_MISC_POINTERS);
|
||||
|
||||
// Update pointers from cells.
|
||||
HeapObjectIterator cell_iterator(heap_->cell_space());
|
||||
for (HeapObject* cell = cell_iterator.Next(); cell != NULL;
|
||||
cell = cell_iterator.Next()) {
|
||||
if (cell->IsCell()) {
|
||||
Cell::BodyDescriptor::IterateBody(cell, &updating_visitor);
|
||||
}
|
||||
}
|
||||
|
||||
heap_->string_table()->Iterate(&updating_visitor);
|
||||
|
||||
// Update pointers from external string table.
|
||||
@ -4430,12 +4405,6 @@ void MarkCompactCollector::SweepSpaces() {
|
||||
SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING);
|
||||
}
|
||||
|
||||
{
|
||||
GCTracer::Scope sweep_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_SWEEP_CELL);
|
||||
SweepSpace(heap()->cell_space(), SEQUENTIAL_SWEEPING);
|
||||
}
|
||||
|
||||
EvacuateNewSpaceAndCandidates();
|
||||
|
||||
// ClearNonLiveReferences depends on precise sweeping of map space to
|
||||
@ -4614,6 +4583,8 @@ void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) {
|
||||
static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
|
||||
if (RelocInfo::IsCodeTarget(rmode)) {
|
||||
return SlotsBuffer::CODE_TARGET_SLOT;
|
||||
} else if (RelocInfo::IsCell(rmode)) {
|
||||
return SlotsBuffer::CELL_TARGET_SLOT;
|
||||
} else if (RelocInfo::IsEmbeddedObject(rmode)) {
|
||||
return SlotsBuffer::EMBEDDED_OBJECT_SLOT;
|
||||
} else if (RelocInfo::IsDebugBreakSlot(rmode)) {
|
||||
|
@ -319,6 +319,7 @@ class SlotsBuffer {
|
||||
enum SlotType {
|
||||
EMBEDDED_OBJECT_SLOT,
|
||||
RELOCATED_CODE_OBJECT,
|
||||
CELL_TARGET_SLOT,
|
||||
CODE_TARGET_SLOT,
|
||||
CODE_ENTRY_SLOT,
|
||||
DEBUG_TARGET_SLOT,
|
||||
@ -332,6 +333,8 @@ class SlotsBuffer {
|
||||
return "EMBEDDED_OBJECT_SLOT";
|
||||
case RELOCATED_CODE_OBJECT:
|
||||
return "RELOCATED_CODE_OBJECT";
|
||||
case CELL_TARGET_SLOT:
|
||||
return "CELL_TARGET_SLOT";
|
||||
case CODE_TARGET_SLOT:
|
||||
return "CODE_TARGET_SLOT";
|
||||
case CODE_ENTRY_SLOT:
|
||||
|
@ -233,7 +233,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
|
||||
RelocInfo* rinfo) {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::CELL);
|
||||
Cell* cell = rinfo->target_cell();
|
||||
// No need to record slots because the cell space is not compacted during GC.
|
||||
heap->mark_compact_collector()->RecordRelocSlot(rinfo, cell);
|
||||
if (!rinfo->host()->IsWeakObject(cell)) {
|
||||
StaticVisitor::MarkObject(heap, cell);
|
||||
}
|
||||
|
@ -43,7 +43,6 @@ HeapObjectIterator::HeapObjectIterator(Page* page,
|
||||
Space* owner = page->owner();
|
||||
DCHECK(owner == page->heap()->old_space() ||
|
||||
owner == page->heap()->map_space() ||
|
||||
owner == page->heap()->cell_space() ||
|
||||
owner == page->heap()->code_space());
|
||||
Initialize(reinterpret_cast<PagedSpace*>(owner), page->area_start(),
|
||||
page->area_end(), kOnePageOnly, size_func);
|
||||
@ -924,8 +923,6 @@ STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_SPACE) ==
|
||||
ObjectSpace::kObjectSpaceOldSpace);
|
||||
STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CODE_SPACE) ==
|
||||
ObjectSpace::kObjectSpaceCodeSpace);
|
||||
STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CELL_SPACE) ==
|
||||
ObjectSpace::kObjectSpaceCellSpace);
|
||||
STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::MAP_SPACE) ==
|
||||
ObjectSpace::kObjectSpaceMapSpace);
|
||||
|
||||
@ -2795,15 +2792,6 @@ void PagedSpace::ReportStatistics() {
|
||||
void MapSpace::VerifyObject(HeapObject* object) { CHECK(object->IsMap()); }
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// CellSpace implementation
|
||||
// TODO(mvstanton): this is weird...the compiler can't make a vtable unless
|
||||
// there is at least one non-inlined virtual function. I would prefer to hide
|
||||
// the VerifyObject definition behind VERIFY_HEAP.
|
||||
|
||||
void CellSpace::VerifyObject(HeapObject* object) { CHECK(object->IsCell()); }
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// LargeObjectIterator
|
||||
|
||||
|
@ -2657,31 +2657,6 @@ class MapSpace : public PagedSpace {
|
||||
};
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Old space for simple property cell objects
|
||||
|
||||
class CellSpace : public PagedSpace {
|
||||
public:
|
||||
// Creates a property cell space object with a maximum capacity.
|
||||
CellSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id)
|
||||
: PagedSpace(heap, max_capacity, id, NOT_EXECUTABLE) {}
|
||||
|
||||
virtual int RoundSizeDownToObjectAlignment(int size) {
|
||||
if (base::bits::IsPowerOfTwo32(Cell::kSize)) {
|
||||
return RoundDown(size, Cell::kSize);
|
||||
} else {
|
||||
return (size / Cell::kSize) * Cell::kSize;
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void VerifyObject(HeapObject* obj);
|
||||
|
||||
public:
|
||||
TRACK_MEMORY("CellSpace")
|
||||
};
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Large objects ( > Page::kMaxHeapObjectSize ) are allocated and managed by
|
||||
// the large object space. A large object is allocated from OS heap with
|
||||
|
@ -16,7 +16,6 @@ Address StoreBuffer::TopAddress() {
|
||||
|
||||
|
||||
void StoreBuffer::Mark(Address addr) {
|
||||
DCHECK(!heap_->cell_space()->Contains(addr));
|
||||
DCHECK(!heap_->code_space()->Contains(addr));
|
||||
Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
|
||||
*top++ = addr;
|
||||
@ -32,8 +31,7 @@ void StoreBuffer::Mark(Address addr) {
|
||||
|
||||
void StoreBuffer::EnterDirectlyIntoStoreBuffer(Address addr) {
|
||||
if (store_buffer_rebuilding_enabled_) {
|
||||
SLOW_DCHECK(!heap_->cell_space()->Contains(addr) &&
|
||||
!heap_->code_space()->Contains(addr) &&
|
||||
SLOW_DCHECK(!heap_->code_space()->Contains(addr) &&
|
||||
!heap_->new_space()->Contains(addr));
|
||||
Address* top = old_top_;
|
||||
*top++ = addr;
|
||||
|
@ -537,7 +537,6 @@ void StoreBuffer::Compact() {
|
||||
// functions to reduce the number of unnecessary clashes.
|
||||
hash_sets_are_empty_ = false; // Hash sets are in use.
|
||||
for (Address* current = start_; current < top; current++) {
|
||||
DCHECK(!heap_->cell_space()->Contains(*current));
|
||||
DCHECK(!heap_->code_space()->Contains(*current));
|
||||
uintptr_t int_addr = reinterpret_cast<uintptr_t>(*current);
|
||||
// Shift out the last bits including any tags.
|
||||
|
@ -5706,9 +5706,6 @@ inline bool ReceiverObjectNeedsWriteBarrier(HValue* object,
|
||||
while (object->IsInnerAllocatedObject()) {
|
||||
object = HInnerAllocatedObject::cast(object)->base_object();
|
||||
}
|
||||
if (object->IsConstant() && HConstant::cast(object)->IsCell()) {
|
||||
return false;
|
||||
}
|
||||
if (object->IsConstant() &&
|
||||
HConstant::cast(object)->HasExternalReferenceValue()) {
|
||||
// Stores to external references require no write barriers
|
||||
|
@ -204,6 +204,7 @@ Cell* RelocInfo::target_cell() {
|
||||
void RelocInfo::set_target_cell(Cell* cell,
|
||||
WriteBarrierMode write_barrier_mode,
|
||||
ICacheFlushMode icache_flush_mode) {
|
||||
DCHECK(cell->IsCell());
|
||||
DCHECK(rmode_ == RelocInfo::CELL);
|
||||
Address address = cell->address() + Cell::kValueOffset;
|
||||
Memory::Address_at(pc_) = address;
|
||||
|
@ -2760,6 +2760,12 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
__ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
|
||||
__ mov(Operand(scratch, 0), map);
|
||||
__ push(map);
|
||||
// Scratch points at the cell payload. Calculate the start of the object.
|
||||
__ sub(scratch, Immediate(Cell::kValueOffset - 1));
|
||||
__ RecordWriteField(scratch, Cell::kValueOffset, map, function,
|
||||
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
|
||||
__ pop(map);
|
||||
}
|
||||
|
||||
// Loop through the prototype chain of the object looking for the function
|
||||
|
@ -1459,6 +1459,14 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
// Get the map location in scratch and patch it.
|
||||
__ GetRelocatedValue(inline_site, scratch, v1); // v1 used as scratch.
|
||||
__ sw(map, FieldMemOperand(scratch, Cell::kValueOffset));
|
||||
|
||||
__ mov(t4, map);
|
||||
// |scratch| points at the beginning of the cell. Calculate the field
|
||||
// containing the map.
|
||||
__ Addu(function, scratch, Operand(Cell::kValueOffset - 1));
|
||||
__ RecordWriteField(scratch, Cell::kValueOffset, t4, function,
|
||||
kRAHasNotBeenSaved, kDontSaveFPRegs,
|
||||
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
|
||||
}
|
||||
|
||||
// Register mapping: a3 is object map and t0 is function prototype.
|
||||
|
@ -1459,6 +1459,14 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
// Get the map location in scratch and patch it.
|
||||
__ GetRelocatedValue(inline_site, scratch, v1); // v1 used as scratch.
|
||||
__ sd(map, FieldMemOperand(scratch, Cell::kValueOffset));
|
||||
|
||||
__ mov(t0, map);
|
||||
// |scratch| points at the beginning of the cell. Calculate the
|
||||
// field containing the map.
|
||||
__ Daddu(function, scratch, Operand(Cell::kValueOffset - 1));
|
||||
__ RecordWriteField(scratch, Cell::kValueOffset, t0, function,
|
||||
kRAHasNotBeenSaved, kDontSaveFPRegs,
|
||||
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
|
||||
}
|
||||
|
||||
// Register mapping: a3 is object map and a4 is function prototype.
|
||||
|
@ -1896,17 +1896,7 @@ void Oddball::set_kind(byte value) {
|
||||
}
|
||||
|
||||
|
||||
Object* Cell::value() const {
|
||||
return READ_FIELD(this, kValueOffset);
|
||||
}
|
||||
|
||||
|
||||
void Cell::set_value(Object* val, WriteBarrierMode ignored) {
|
||||
// The write barrier is not used for global property cells.
|
||||
DCHECK(!val->IsPropertyCell() && !val->IsCell());
|
||||
WRITE_FIELD(this, kValueOffset, val);
|
||||
}
|
||||
|
||||
ACCESSORS(Cell, value, Object, kValueOffset)
|
||||
ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
|
||||
ACCESSORS(PropertyCell, value, Object, kValueOffset)
|
||||
|
||||
|
@ -665,7 +665,6 @@ enum InstanceType {
|
||||
MAP_TYPE,
|
||||
CODE_TYPE,
|
||||
ODDBALL_TYPE,
|
||||
CELL_TYPE,
|
||||
|
||||
// "Data", objects that cannot contain non-map-word pointers to heap
|
||||
// objects.
|
||||
@ -720,6 +719,7 @@ enum InstanceType {
|
||||
FIXED_ARRAY_TYPE,
|
||||
CONSTANT_POOL_ARRAY_TYPE,
|
||||
SHARED_FUNCTION_INFO_TYPE,
|
||||
CELL_TYPE,
|
||||
WEAK_CELL_TYPE,
|
||||
PROPERTY_CELL_TYPE,
|
||||
PROTOTYPE_INFO_TYPE,
|
||||
@ -9780,7 +9780,6 @@ class Cell: public HeapObject {
|
||||
|
||||
static inline Cell* FromValueAddress(Address value) {
|
||||
Object* result = FromAddress(value - kValueOffset);
|
||||
DCHECK(result->IsCell());
|
||||
return static_cast<Cell*>(result);
|
||||
}
|
||||
|
||||
|
@ -843,7 +843,7 @@ void Deserializer::ReadData(Object** current, Object** limit, int source_space,
|
||||
// but that may change.
|
||||
bool write_barrier_needed =
|
||||
(current_object_address != NULL && source_space != NEW_SPACE &&
|
||||
source_space != CELL_SPACE && source_space != CODE_SPACE);
|
||||
source_space != CODE_SPACE);
|
||||
while (current < limit) {
|
||||
byte data = source_.Get();
|
||||
switch (data) {
|
||||
@ -952,7 +952,6 @@ void Deserializer::ReadData(Object** current, Object** limit, int source_space,
|
||||
CASE_STATEMENT(where, how, within, OLD_SPACE) \
|
||||
CASE_STATEMENT(where, how, within, CODE_SPACE) \
|
||||
CASE_STATEMENT(where, how, within, MAP_SPACE) \
|
||||
CASE_STATEMENT(where, how, within, CELL_SPACE) \
|
||||
CASE_STATEMENT(where, how, within, LO_SPACE) \
|
||||
CASE_BODY(where, how, within, kAnyOldSpace)
|
||||
|
||||
|
@ -299,23 +299,27 @@ class SerializerDeserializer: public ObjectVisitor {
|
||||
static int nop() { return kNop; }
|
||||
|
||||
// No reservation for large object space necessary.
|
||||
static const int kNumberOfPreallocatedSpaces = LO_SPACE;
|
||||
static const int kNumberOfPreallocatedSpaces = LAST_PAGED_SPACE + 1;
|
||||
static const int kNumberOfSpaces = LAST_SPACE + 1;
|
||||
|
||||
protected:
|
||||
// ---------- byte code range 0x00..0x7f ----------
|
||||
// Byte codes in this range represent Where, HowToCode and WhereToPoint.
|
||||
// Where the pointed-to object can be found:
|
||||
// The static assert below will trigger when the number of preallocated spaces
|
||||
// changed. If that happens, update the bytecode ranges in the comments below.
|
||||
STATIC_ASSERT(5 == kNumberOfSpaces);
|
||||
enum Where {
|
||||
// 0x00..0x05 Allocate new object, in specified space.
|
||||
// 0x00..0x04 Allocate new object, in specified space.
|
||||
kNewObject = 0,
|
||||
// 0x05 Unused (including 0x25, 0x45, 0x65).
|
||||
// 0x06 Unused (including 0x26, 0x46, 0x66).
|
||||
// 0x07 Unused (including 0x27, 0x47, 0x67).
|
||||
// 0x08..0x0d Reference to previous object from space.
|
||||
// 0x08..0x0c Reference to previous object from space.
|
||||
kBackref = 0x08,
|
||||
// 0x0e Unused (including 0x2e, 0x4e, 0x6e).
|
||||
// 0x0f Unused (including 0x2f, 0x4f, 0x6f).
|
||||
// 0x10..0x15 Reference to previous object from space after skip.
|
||||
// 0x10..0x14 Reference to previous object from space after skip.
|
||||
kBackrefWithSkip = 0x10,
|
||||
// 0x16 Unused (including 0x36, 0x56, 0x76).
|
||||
// 0x17 Unused (including 0x37, 0x57, 0x77).
|
||||
|
@ -2754,6 +2754,12 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
__ movp(kScratchRegister,
|
||||
Operand(kScratchRegister, kOffsetToMapCheckValue));
|
||||
__ movp(Operand(kScratchRegister, 0), map);
|
||||
|
||||
__ movp(r8, map);
|
||||
// Scratch points at the cell payload. Calculate the start of the object.
|
||||
__ subp(kScratchRegister, Immediate(Cell::kValueOffset - 1));
|
||||
__ RecordWriteField(kScratchRegister, Cell::kValueOffset, r8, function,
|
||||
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
|
||||
}
|
||||
|
||||
// Loop through the prototype chain looking for the function prototype.
|
||||
|
Loading…
Reference in New Issue
Block a user