X64: Disabled RSet in 64-bit mode.

Made a few more places use intptr_t instead of int for pointer arithmetic.
Ensure that objects have a declared size that matches heap object alignment.

Review URL: http://codereview.chromium.org/115559


git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2007 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
lrn@chromium.org 2009-05-20 08:05:12 +00:00
parent 10b8928e0e
commit 0920be9524
8 changed files with 60 additions and 27 deletions

View File

@ -145,7 +145,9 @@ void Heap::RecordWrite(Address address, int offset) {
if (new_space_.Contains(address)) return;
ASSERT(!new_space_.FromSpaceContains(address));
SLOW_ASSERT(Contains(address + offset));
#ifndef V8_HOST_ARCH_64_BIT
Page::SetRSet(address, offset);
#endif // V8_HOST_ARCH_64_BIT
}

View File

@ -667,11 +667,33 @@ void Heap::Scavenge() {
// Copy objects reachable from weak pointers.
GlobalHandles::IterateWeakRoots(&scavenge_visitor);
#if V8_HOST_ARCH_64_BIT
// TODO(X64): Make this go away again. We currently disable RSets for
// 64-bit-mode.
HeapObjectIterator old_pointer_iterator(old_pointer_space_);
while (old_pointer_iterator.has_next()) {
HeapObject* heap_object = old_pointer_iterator.next();
heap_object->Iterate(&scavenge_visitor);
}
HeapObjectIterator map_iterator(map_space_);
while (map_iterator.has_next()) {
HeapObject* heap_object = map_iterator.next();
heap_object->Iterate(&scavenge_visitor);
}
LargeObjectIterator lo_iterator(lo_space_);
while (lo_iterator.has_next()) {
HeapObject* heap_object = lo_iterator.next();
if (heap_object->IsFixedArray()) {
heap_object->Iterate(&scavenge_visitor);
}
}
#else // V8_HOST_ARCH_64_BIT
// Copy objects reachable from the old generation. By definition,
// there are no intergenerational pointers in code or data spaces.
IterateRSet(old_pointer_space_, &ScavengePointer);
IterateRSet(map_space_, &ScavengePointer);
lo_space_->IterateRSet(&ScavengePointer);
#endif // V8_HOST_ARCH_64_BIT
do {
ASSERT(new_space_front <= new_space_.top());
@ -999,7 +1021,7 @@ bool Heap::CreateInitialMaps() {
meta_map_ = reinterpret_cast<Map*>(obj);
meta_map()->set_map(meta_map());
obj = AllocatePartialMap(FIXED_ARRAY_TYPE, Array::kHeaderSize);
obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize);
if (obj->IsFailure()) return false;
fixed_array_map_ = Map::cast(obj);
@ -1056,37 +1078,37 @@ bool Heap::CreateInitialMaps() {
STRING_TYPE_LIST(ALLOCATE_STRING_MAP);
#undef ALLOCATE_STRING_MAP
obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kHeaderSize);
obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_short_string_map_ = Map::cast(obj);
undetectable_short_string_map_->set_is_undetectable();
obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kHeaderSize);
obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_medium_string_map_ = Map::cast(obj);
undetectable_medium_string_map_->set_is_undetectable();
obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kHeaderSize);
obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_long_string_map_ = Map::cast(obj);
undetectable_long_string_map_->set_is_undetectable();
obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_short_ascii_string_map_ = Map::cast(obj);
undetectable_short_ascii_string_map_->set_is_undetectable();
obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_medium_ascii_string_map_ = Map::cast(obj);
undetectable_medium_ascii_string_map_->set_is_undetectable();
obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize);
if (obj->IsFailure()) return false;
undetectable_long_ascii_string_map_ = Map::cast(obj);
undetectable_long_ascii_string_map_->set_is_undetectable();
obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kHeaderSize);
obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kAlignedSize);
if (obj->IsFailure()) return false;
byte_array_map_ = Map::cast(obj);

View File

@ -296,12 +296,12 @@ enum PropertyNormalizationMode {
// Since string types are not consecutive, this macro is used to
// iterate over them.
#define STRING_TYPE_LIST(V) \
V(SHORT_SYMBOL_TYPE, SeqTwoByteString::kHeaderSize, short_symbol) \
V(MEDIUM_SYMBOL_TYPE, SeqTwoByteString::kHeaderSize, medium_symbol) \
V(LONG_SYMBOL_TYPE, SeqTwoByteString::kHeaderSize, long_symbol) \
V(SHORT_ASCII_SYMBOL_TYPE, SeqAsciiString::kHeaderSize, short_ascii_symbol) \
V(MEDIUM_ASCII_SYMBOL_TYPE, SeqAsciiString::kHeaderSize, medium_ascii_symbol)\
V(LONG_ASCII_SYMBOL_TYPE, SeqAsciiString::kHeaderSize, long_ascii_symbol) \
V(SHORT_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, short_symbol) \
V(MEDIUM_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, medium_symbol) \
V(LONG_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, long_symbol) \
V(SHORT_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize, short_ascii_symbol) \
V(MEDIUM_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize, medium_ascii_symbol)\
V(LONG_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize, long_ascii_symbol) \
V(SHORT_CONS_SYMBOL_TYPE, ConsString::kSize, short_cons_symbol) \
V(MEDIUM_CONS_SYMBOL_TYPE, ConsString::kSize, medium_cons_symbol) \
V(LONG_CONS_SYMBOL_TYPE, ConsString::kSize, long_cons_symbol) \
@ -338,12 +338,12 @@ enum PropertyNormalizationMode {
V(LONG_EXTERNAL_ASCII_SYMBOL_TYPE, \
ExternalAsciiString::kSize, \
long_external_ascii_symbol) \
V(SHORT_STRING_TYPE, SeqTwoByteString::kHeaderSize, short_string) \
V(MEDIUM_STRING_TYPE, SeqTwoByteString::kHeaderSize, medium_string) \
V(LONG_STRING_TYPE, SeqTwoByteString::kHeaderSize, long_string) \
V(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize, short_ascii_string) \
V(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize, medium_ascii_string)\
V(LONG_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize, long_ascii_string) \
V(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize, short_string) \
V(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize, medium_string) \
V(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize, long_string) \
V(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize, short_ascii_string) \
V(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize, medium_ascii_string)\
V(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize, long_ascii_string) \
V(SHORT_CONS_STRING_TYPE, ConsString::kSize, short_cons_string) \
V(MEDIUM_CONS_STRING_TYPE, ConsString::kSize, medium_cons_string) \
V(LONG_CONS_STRING_TYPE, ConsString::kSize, long_cons_string) \
@ -1553,6 +1553,7 @@ class Array: public HeapObject {
// Layout descriptor.
static const int kLengthOffset = HeapObject::kHeaderSize;
static const int kHeaderSize = kLengthOffset + kIntSize;
static const int kAlignedSize = POINTER_SIZE_ALIGN(kHeaderSize);
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Array);
@ -3373,6 +3374,7 @@ class String: public HeapObject {
// Layout description.
static const int kLengthOffset = HeapObject::kHeaderSize;
static const int kSize = kLengthOffset + kIntSize;
// Notice: kSize is not pointer-size aligned if pointers are 64-bit.
// Limits on sizes of different types of strings.
static const int kMaxShortStringSize = 63;
@ -3526,6 +3528,7 @@ class SeqAsciiString: public SeqString {
// Layout description.
static const int kHeaderSize = String::kSize;
static const int kAlignedSize = POINTER_SIZE_ALIGN(kHeaderSize);
// Support for StringInputBuffer.
inline void SeqAsciiStringReadBlockIntoBuffer(ReadBlockBuffer* buffer,
@ -3571,6 +3574,7 @@ class SeqTwoByteString: public SeqString {
// Layout description.
static const int kHeaderSize = String::kSize;
static const int kAlignedSize = POINTER_SIZE_ALIGN(kHeaderSize);
// Support for StringInputBuffer.
inline void SeqTwoByteStringReadBlockIntoBuffer(ReadBlockBuffer* buffer,

View File

@ -5428,7 +5428,7 @@ static Object* Runtime_DebugBreak(Arguments args) {
// Helper functions for wrapping and unwrapping stack frame ids.
static Smi* WrapFrameId(StackFrame::Id id) {
ASSERT(IsAligned(OffsetFrom(id), 4));
ASSERT(IsAligned(OffsetFrom(id), static_cast<intptr_t>(4)));
return Smi::FromInt(id >> 2);
}

View File

@ -92,8 +92,10 @@ Address Page::AllocationTop() {
void Page::ClearRSet() {
#ifndef V8_HOST_ARCH_64_BIT
// This method can be called in all rset states.
memset(RSetStart(), 0, kRSetEndOffset - kRSetStartOffset);
#endif
}
@ -194,7 +196,7 @@ bool MemoryAllocator::IsPageInSpace(Page* p, PagedSpace* space) {
Page* MemoryAllocator::GetNextPage(Page* p) {
ASSERT(p->is_valid());
int raw_addr = p->opaque_header & ~Page::kPageAlignmentMask;
intptr_t raw_addr = p->opaque_header & ~Page::kPageAlignmentMask;
return Page::FromAddress(AddressFrom<Address>(raw_addr));
}
@ -207,7 +209,7 @@ int MemoryAllocator::GetChunkId(Page* p) {
void MemoryAllocator::SetNextPage(Page* prev, Page* next) {
ASSERT(prev->is_valid());
int chunk_id = prev->opaque_header & Page::kPageAlignmentMask;
int chunk_id = GetChunkId(prev);
ASSERT_PAGE_ALIGNED(next->address());
prev->opaque_header = OffsetFrom(next->address()) | chunk_id;
}

View File

@ -98,6 +98,7 @@ class AllocationInfo;
// its page offset by 32. Therefore, the object area in a page starts at the
// 256th byte (8K/32). Bytes 0 to 255 do not need the remembered set, so that
// the first two words (64 bits) in a page can be used for other purposes.
// TODO(X64): This description only represents the 32-bit layout.
//
// The mark-compact collector transforms a map pointer into a page index and a
// page offset. The map space can have up to 1024 pages, and 8M bytes (1024 *
@ -213,7 +214,7 @@ class Page {
static const int kPageSize = 1 << kPageSizeBits;
// Page size mask.
static const int kPageAlignmentMask = (1 << kPageSizeBits) - 1;
static const intptr_t kPageAlignmentMask = (1 << kPageSizeBits) - 1;
// The end offset of the remembered set in a page
// (heaps are aligned to pointer size).
@ -242,7 +243,7 @@ class Page {
// in the current page. If a page is in the large object space, the first
// word *may* (if the page start and large object chunk start are the
// same) contain the address of the next large object chunk.
int opaque_header;
intptr_t opaque_header;
// If the page is not in the large object space, the low-order bit of the
// second word is set. If the page is in the large object space, the

View File

@ -54,7 +54,7 @@ static inline int ArithmeticShiftRight(int x, int s) {
// This allows conversion of Addresses and integral types into
// 0-relative int offsets.
template <typename T>
static inline int OffsetFrom(T x) {
static inline intptr_t OffsetFrom(T x) {
return x - static_cast<T>(0);
}
@ -63,7 +63,7 @@ static inline int OffsetFrom(T x) {
// This allows conversion of 0-relative int offsets into Addresses and
// integral types.
template <typename T>
static inline T AddressFrom(int x) {
static inline T AddressFrom(intptr_t x) {
return static_cast<T>(0) + x;
}

View File

@ -261,6 +261,8 @@ void ThreadManager::EagerlyArchiveThread() {
ThreadState* state = lazily_archived_thread_state_;
state->LinkInto(ThreadState::IN_USE_LIST);
char* to = state->data();
// Ensure that data containing GC roots are archived first, and handle them
// in ThreadManager::Iterate(ObjectVisitor*).
to = HandleScopeImplementer::ArchiveThread(to);
to = Top::ArchiveThread(to);
#ifdef ENABLE_DEBUGGER_SUPPORT