New attempt to make the allocation routines 64 bit clean.

This one has been approved by the 64 bit compiler in MSVC
2005 so I hope it also passes the 2008 version.

The --max-new-space-size option is now in kBytes.
The --max-old-space-size option is now in MBytes.

Some issues remain with 64 bit heaps and the counters.  See
http://code.google.com/p/v8/issues/detail?id=887
Review URL: http://codereview.chromium.org/3573005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5559 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
erik.corry@gmail.com 2010-09-30 07:22:53 +00:00
parent 9e618ff460
commit d46fb9d454
28 changed files with 322 additions and 232 deletions

View File

@ -134,27 +134,27 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
heap_stats.new_space_size = &new_space_size;
int new_space_capacity;
heap_stats.new_space_capacity = &new_space_capacity;
int old_pointer_space_size;
intptr_t old_pointer_space_size;
heap_stats.old_pointer_space_size = &old_pointer_space_size;
int old_pointer_space_capacity;
intptr_t old_pointer_space_capacity;
heap_stats.old_pointer_space_capacity = &old_pointer_space_capacity;
int old_data_space_size;
intptr_t old_data_space_size;
heap_stats.old_data_space_size = &old_data_space_size;
int old_data_space_capacity;
intptr_t old_data_space_capacity;
heap_stats.old_data_space_capacity = &old_data_space_capacity;
int code_space_size;
intptr_t code_space_size;
heap_stats.code_space_size = &code_space_size;
int code_space_capacity;
intptr_t code_space_capacity;
heap_stats.code_space_capacity = &code_space_capacity;
int map_space_size;
intptr_t map_space_size;
heap_stats.map_space_size = &map_space_size;
int map_space_capacity;
intptr_t map_space_capacity;
heap_stats.map_space_capacity = &map_space_capacity;
int cell_space_size;
intptr_t cell_space_size;
heap_stats.cell_space_size = &cell_space_size;
int cell_space_capacity;
intptr_t cell_space_capacity;
heap_stats.cell_space_capacity = &cell_space_capacity;
int lo_space_size;
intptr_t lo_space_size;
heap_stats.lo_space_size = &lo_space_size;
int global_handle_count;
heap_stats.global_handle_count = &global_handle_count;
@ -166,9 +166,9 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
heap_stats.near_death_global_handle_count = &near_death_global_handle_count;
int destroyed_global_handle_count;
heap_stats.destroyed_global_handle_count = &destroyed_global_handle_count;
int memory_allocator_size;
intptr_t memory_allocator_size;
heap_stats.memory_allocator_size = &memory_allocator_size;
int memory_allocator_capacity;
intptr_t memory_allocator_capacity;
heap_stats.memory_allocator_capacity = &memory_allocator_capacity;
int objects_per_type[LAST_TYPE + 1] = {0};
heap_stats.objects_per_type = objects_per_type;

View File

@ -294,7 +294,7 @@ void Debugger::Debug() {
} else if (GetVFPSingleValue(arg1, &svalue)) {
PrintF("%s: %f \n", arg1, svalue);
} else if (GetVFPDoubleValue(arg1, &dvalue)) {
PrintF("%s: %lf \n", arg1, dvalue);
PrintF("%s: %f \n", arg1, dvalue);
} else {
PrintF("%s unrecognized\n", arg1);
}
@ -349,7 +349,8 @@ void Debugger::Debug() {
end = cur + words;
while (cur < end) {
PrintF(" 0x%08x: 0x%08x %10d\n", cur, *cur, *cur);
PrintF(" 0x%08x: 0x%08x %10d\n",
reinterpret_cast<intptr_t>(cur), *cur, *cur);
cur++;
}
} else if (strcmp(cmd, "disasm") == 0) {
@ -382,7 +383,8 @@ void Debugger::Debug() {
while (cur < end) {
dasm.InstructionDecode(buffer, cur);
PrintF(" 0x%08x %s\n", cur, buffer.start());
PrintF(" 0x%08x %s\n",
reinterpret_cast<intptr_t>(cur), buffer.start());
cur += Instr::kInstrSize;
}
} else if (strcmp(cmd, "gdb") == 0) {
@ -1061,7 +1063,7 @@ uintptr_t Simulator::StackLimit() const {
// Unsupported instructions use Format to print an error and stop execution.
void Simulator::Format(Instr* instr, const char* format) {
PrintF("Simulator found unsupported instruction:\n 0x%08x: %s\n",
instr, format);
reinterpret_cast<intptr_t>(instr), format);
UNIMPLEMENTED();
}
@ -2650,7 +2652,7 @@ void Simulator::InstructionDecode(Instr* instr) {
v8::internal::EmbeddedVector<char, 256> buffer;
dasm.InstructionDecode(buffer,
reinterpret_cast<byte*>(instr));
PrintF(" 0x%08x %s\n", instr, buffer.start());
PrintF(" 0x%08x %s\n", reinterpret_cast<intptr_t>(instr), buffer.start());
}
if (instr->ConditionField() == special_condition) {
DecodeUnconditional(instr);

View File

@ -465,7 +465,7 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
void RelocInfo::Print() {
PrintF("%p %s", pc_, RelocModeName(rmode_));
if (IsComment(rmode_)) {
PrintF(" (%s)", data_);
PrintF(" (%s)", reinterpret_cast<char*>(data_));
} else if (rmode_ == EMBEDDED_OBJECT) {
PrintF(" (");
target_object()->ShortPrint();
@ -479,7 +479,7 @@ void RelocInfo::Print() {
Code* code = Code::GetCodeFromTargetAddress(target_address());
PrintF(" (%s) (%p)", Code::Kind2String(code->kind()), target_address());
} else if (IsPosition(rmode_)) {
PrintF(" (%d)", data());
PrintF(" (%" V8_PTR_PREFIX "d)", data());
}
PrintF("\n");

View File

@ -90,7 +90,7 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
do {
if (FLAG_trace_contexts) {
PrintF(" - looking in context %p", *context);
PrintF(" - looking in context %p", reinterpret_cast<void*>(*context));
if (context->IsGlobalContext()) PrintF(" (global context)");
PrintF("\n");
}
@ -110,7 +110,8 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
if (*attributes != ABSENT) {
// property found
if (FLAG_trace_contexts) {
PrintF("=> found property in context object %p\n", *extension);
PrintF("=> found property in context object %p\n",
reinterpret_cast<void*>(*extension));
}
return extension;
}

View File

@ -42,7 +42,7 @@ void BitVector::Print() {
if (Contains(i)) {
if (!first) PrintF(",");
first = false;
PrintF("%d");
PrintF("%d", i);
}
}
PrintF("}");

View File

@ -44,7 +44,10 @@ namespace internal {
void Disassembler::Dump(FILE* f, byte* begin, byte* end) {
for (byte* pc = begin; pc < end; pc++) {
if (f == NULL) {
PrintF("%" V8PRIxPTR " %4" V8PRIdPTR " %02x\n", pc, pc - begin, *pc);
PrintF("%" V8PRIxPTR " %4" V8PRIdPTR " %02x\n",
reinterpret_cast<intptr_t>(pc),
pc - begin,
*pc);
} else {
fprintf(f, "%" V8PRIxPTR " %4" V8PRIdPTR " %02x\n",
reinterpret_cast<uintptr_t>(pc), pc - begin, *pc);

View File

@ -181,8 +181,8 @@ DEFINE_bool(always_inline_smi_code, false,
"always inline smi code in non-opt code")
// heap.cc
DEFINE_int(max_new_space_size, 0, "max size of the new generation")
DEFINE_int(max_old_space_size, 0, "max size of the old generation")
DEFINE_int(max_new_space_size, 0, "max size of the new generation (in kBytes)")
DEFINE_int(max_old_space_size, 0, "max size of the old generation (in Mbytes)")
DEFINE_bool(gc_global, false, "always perform global GCs")
DEFINE_int(gc_interval, -1, "garbage collect after <n> allocations")
DEFINE_bool(trace_gc, false,

View File

@ -486,7 +486,7 @@ void GlobalHandles::PrintStats() {
}
PrintF("Global Handle Statistics:\n");
PrintF(" allocated memory = %dB\n", sizeof(Node) * total);
PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
PrintF(" # weak = %d\n", weak);
PrintF(" # pending = %d\n", pending);
PrintF(" # near_death = %d\n", near_death);
@ -497,8 +497,10 @@ void GlobalHandles::PrintStats() {
void GlobalHandles::Print() {
PrintF("Global handles:\n");
for (Node* current = head_; current != NULL; current = current->next()) {
PrintF(" handle %p to %p (weak=%d)\n", current->handle().location(),
*current->handle(), current->state_ == Node::WEAK);
PrintF(" handle %p to %p (weak=%d)\n",
reinterpret_cast<void*>(current->handle().location()),
reinterpret_cast<void*>(*current->handle()),
current->state_ == Node::WEAK);
}
}

View File

@ -36,7 +36,7 @@ namespace v8 {
namespace internal {
void Heap::UpdateOldSpaceLimits() {
int old_gen_size = PromotedSpaceSize();
intptr_t old_gen_size = PromotedSpaceSize();
old_gen_promotion_limit_ =
old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
old_gen_allocation_limit_ =

View File

@ -63,8 +63,8 @@ MapSpace* Heap::map_space_ = NULL;
CellSpace* Heap::cell_space_ = NULL;
LargeObjectSpace* Heap::lo_space_ = NULL;
int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
intptr_t Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
intptr_t Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
int Heap::old_gen_exhausted_ = false;
@ -75,19 +75,19 @@ int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
// a multiple of Page::kPageSize.
#if defined(ANDROID)
int Heap::max_semispace_size_ = 2*MB;
int Heap::max_old_generation_size_ = 192*MB;
intptr_t Heap::max_old_generation_size_ = 192*MB;
int Heap::initial_semispace_size_ = 128*KB;
size_t Heap::code_range_size_ = 0;
intptr_t Heap::code_range_size_ = 0;
#elif defined(V8_TARGET_ARCH_X64)
int Heap::max_semispace_size_ = 16*MB;
int Heap::max_old_generation_size_ = 1*GB;
intptr_t Heap::max_old_generation_size_ = 1*GB;
int Heap::initial_semispace_size_ = 1*MB;
size_t Heap::code_range_size_ = 512*MB;
intptr_t Heap::code_range_size_ = 512*MB;
#else
int Heap::max_semispace_size_ = 8*MB;
int Heap::max_old_generation_size_ = 512*MB;
intptr_t Heap::max_old_generation_size_ = 512*MB;
int Heap::initial_semispace_size_ = 512*KB;
size_t Heap::code_range_size_ = 0;
intptr_t Heap::code_range_size_ = 0;
#endif
// The snapshot semispace size will be the default semispace size if
@ -108,7 +108,7 @@ HeapObjectCallback Heap::gc_safe_size_of_old_object_ = NULL;
// Will be 4 * reserved_semispace_size_ to ensure that young
// generation can be aligned to its size.
int Heap::survived_since_last_expansion_ = 0;
int Heap::external_allocation_limit_ = 0;
intptr_t Heap::external_allocation_limit_ = 0;
Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
@ -137,13 +137,13 @@ int Heap::allocation_timeout_ = 0;
bool Heap::disallow_allocation_failure_ = false;
#endif // DEBUG
int GCTracer::alive_after_last_gc_ = 0;
intptr_t GCTracer::alive_after_last_gc_ = 0;
double GCTracer::last_gc_end_timestamp_ = 0.0;
int GCTracer::max_gc_pause_ = 0;
int GCTracer::max_alive_after_gc_ = 0;
intptr_t GCTracer::max_alive_after_gc_ = 0;
int GCTracer::min_in_mutator_ = kMaxInt;
int Heap::Capacity() {
intptr_t Heap::Capacity() {
if (!HasBeenSetup()) return 0;
return new_space_.Capacity() +
@ -155,7 +155,7 @@ int Heap::Capacity() {
}
int Heap::CommittedMemory() {
intptr_t Heap::CommittedMemory() {
if (!HasBeenSetup()) return 0;
return new_space_.CommittedMemory() +
@ -168,7 +168,7 @@ int Heap::CommittedMemory() {
}
int Heap::Available() {
intptr_t Heap::Available() {
if (!HasBeenSetup()) return 0;
return new_space_.Available() +
@ -289,33 +289,46 @@ void Heap::ReportStatisticsBeforeGC() {
#if defined(ENABLE_LOGGING_AND_PROFILING)
void Heap::PrintShortHeapStatistics() {
if (!FLAG_trace_gc_verbose) return;
PrintF("Memory allocator, used: %8d, available: %8d\n",
PrintF("Memory allocator, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d\n",
MemoryAllocator::Size(),
MemoryAllocator::Available());
PrintF("New space, used: %8d, available: %8d\n",
PrintF("New space, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d\n",
Heap::new_space_.Size(),
new_space_.Available());
PrintF("Old pointers, used: %8d, available: %8d, waste: %8d\n",
PrintF("Old pointers, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d"
", waste: %8" V8_PTR_PREFIX "d\n",
old_pointer_space_->Size(),
old_pointer_space_->Available(),
old_pointer_space_->Waste());
PrintF("Old data space, used: %8d, available: %8d, waste: %8d\n",
PrintF("Old data space, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d"
", waste: %8" V8_PTR_PREFIX "d\n",
old_data_space_->Size(),
old_data_space_->Available(),
old_data_space_->Waste());
PrintF("Code space, used: %8d, available: %8d, waste: %8d\n",
PrintF("Code space, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d"
", waste: %8" V8_PTR_PREFIX "d\n",
code_space_->Size(),
code_space_->Available(),
code_space_->Waste());
PrintF("Map space, used: %8d, available: %8d, waste: %8d\n",
PrintF("Map space, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d"
", waste: %8" V8_PTR_PREFIX "d\n",
map_space_->Size(),
map_space_->Available(),
map_space_->Waste());
PrintF("Cell space, used: %8d, available: %8d, waste: %8d\n",
PrintF("Cell space, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d"
", waste: %8" V8_PTR_PREFIX "d\n",
cell_space_->Size(),
cell_space_->Available(),
cell_space_->Waste());
PrintF("Large object space, used: %8d, avaialble: %8d\n",
PrintF("Large object space, used: %8" V8_PTR_PREFIX "d"
", available: %8" V8_PTR_PREFIX "d\n",
lo_space_->Size(),
lo_space_->Available());
}
@ -364,8 +377,8 @@ void Heap::GarbageCollectionPrologue() {
#endif
}
int Heap::SizeOfObjects() {
int total = 0;
intptr_t Heap::SizeOfObjects() {
intptr_t total = 0;
AllSpaces spaces;
for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
total += space->Size();
@ -388,7 +401,7 @@ void Heap::GarbageCollectionEpilogue() {
if (FLAG_code_stats) ReportCodeStatistics("After GC");
#endif
Counters::alive_after_last_gc.Set(SizeOfObjects());
Counters::alive_after_last_gc.Set(static_cast<int>(SizeOfObjects()));
Counters::symbol_table_capacity.Set(symbol_table()->Capacity());
Counters::number_of_symbols.Set(symbol_table()->NumberOfElements());
@ -690,7 +703,7 @@ void Heap::PerformGarbageCollection(GarbageCollector collector,
EnsureFromSpaceIsCommitted();
int start_new_space_size = Heap::new_space()->Size();
int start_new_space_size = Heap::new_space()->SizeAsInt();
if (collector == MARK_COMPACTOR) {
// Perform mark-sweep with optional compaction.
@ -962,7 +975,7 @@ void Heap::Scavenge() {
DescriptorLookupCache::Clear();
// Used for updating survived_since_last_expansion_ at function end.
int survived_watermark = PromotedSpaceSize();
intptr_t survived_watermark = PromotedSpaceSize();
CheckNewSpaceExpansionCriteria();
@ -1032,8 +1045,8 @@ void Heap::Scavenge() {
new_space_.set_age_mark(new_space_.top());
// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(
(PromotedSpaceSize() - survived_watermark) + new_space_.Size());
IncrementYoungSurvivorsCounter(static_cast<int>(
(PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
LOG(ResourceEvent("scavenge", "end"));
@ -3496,8 +3509,10 @@ void Heap::ReportHeapStatistics(const char* title) {
PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
title, gc_count_);
PrintF("mark-compact GC : %d\n", mc_count_);
PrintF("old_gen_promotion_limit_ %d\n", old_gen_promotion_limit_);
PrintF("old_gen_allocation_limit_ %d\n", old_gen_allocation_limit_);
PrintF("old_gen_promotion_limit_ %" V8_PTR_PREFIX "d\n",
old_gen_promotion_limit_);
PrintF("old_gen_allocation_limit_ %" V8_PTR_PREFIX "d\n",
old_gen_allocation_limit_);
PrintF("\n");
PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
@ -4069,15 +4084,16 @@ bool Heap::ConfigureHeap(int max_semispace_size, int max_old_gen_size) {
bool Heap::ConfigureHeapDefault() {
return ConfigureHeap(FLAG_max_new_space_size / 2, FLAG_max_old_space_size);
return ConfigureHeap(
FLAG_max_new_space_size * (KB / 2), FLAG_max_old_space_size * MB);
}
void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->start_marker = HeapStats::kStartMarker;
*stats->end_marker = HeapStats::kEndMarker;
*stats->new_space_size = new_space_.Size();
*stats->new_space_capacity = new_space_.Capacity();
*stats->new_space_size = new_space_.SizeAsInt();
*stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
*stats->old_pointer_space_size = old_pointer_space_->Size();
*stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
*stats->old_data_space_size = old_data_space_->Size();
@ -4111,7 +4127,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
}
int Heap::PromotedSpaceSize() {
intptr_t Heap::PromotedSpaceSize() {
return old_pointer_space_->Size()
+ old_data_space_->Size()
+ code_space_->Size()
@ -4222,8 +4238,8 @@ bool Heap::Setup(bool create_heap_objects) {
if (!CreateInitialObjects()) return false;
}
LOG(IntEvent("heap-capacity", Capacity()));
LOG(IntEvent("heap-available", Available()));
LOG(IntPtrTEvent("heap-capacity", Capacity()));
LOG(IntPtrTEvent("heap-available", Available()));
#ifdef ENABLE_LOGGING_AND_PROFILING
// This should be called only after initial objects have been created.
@ -4257,7 +4273,8 @@ void Heap::TearDown() {
PrintF("mark_compact_count=%d ", mc_count_);
PrintF("max_gc_pause=%d ", GCTracer::get_max_gc_pause());
PrintF("min_in_mutator=%d ", GCTracer::get_min_in_mutator());
PrintF("max_alive_after_gc=%d ", GCTracer::get_max_alive_after_gc());
PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
GCTracer::get_max_alive_after_gc());
PrintF("\n\n");
}
@ -4383,7 +4400,9 @@ class PrintHandleVisitor: public ObjectVisitor {
public:
void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++)
PrintF(" handle %p to %p\n", p, *p);
PrintF(" handle %p to %p\n",
reinterpret_cast<void*>(p),
reinterpret_cast<void*>(*p));
}
};
@ -4736,8 +4755,8 @@ void Heap::TracePathToGlobal() {
#endif
static int CountTotalHolesSize() {
int holes_size = 0;
static intptr_t CountTotalHolesSize() {
intptr_t holes_size = 0;
OldSpaces spaces;
for (OldSpace* space = spaces.next();
space != NULL;
@ -4835,13 +4854,14 @@ GCTracer::~GCTracer() {
PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
PrintF("total_size_before=%d ", start_size_);
PrintF("total_size_after=%d ", Heap::SizeOfObjects());
PrintF("holes_size_before=%d ", in_free_list_or_wasted_before_gc_);
PrintF("holes_size_after=%d ", CountTotalHolesSize());
PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
PrintF("total_size_after=%" V8_PTR_PREFIX "d ", Heap::SizeOfObjects());
PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
in_free_list_or_wasted_before_gc_);
PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
PrintF("allocated=%d ", allocated_since_last_gc_);
PrintF("promoted=%d ", promoted_objects_size_);
PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_);
PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_);
PrintF("\n");
}

View File

@ -245,31 +245,31 @@ class Heap : public AllStatic {
// semi space. The young generation consists of two semi spaces and
// we reserve twice the amount needed for those in order to ensure
// that new space can be aligned to its size.
static int MaxReserved() {
static intptr_t MaxReserved() {
return 4 * reserved_semispace_size_ + max_old_generation_size_;
}
static int MaxSemiSpaceSize() { return max_semispace_size_; }
static int ReservedSemiSpaceSize() { return reserved_semispace_size_; }
static int InitialSemiSpaceSize() { return initial_semispace_size_; }
static int MaxOldGenerationSize() { return max_old_generation_size_; }
static intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
// more spaces are needed until it reaches the limit.
static int Capacity();
static intptr_t Capacity();
// Returns the amount of memory currently committed for the heap.
static int CommittedMemory();
static intptr_t CommittedMemory();
// Returns the available bytes in space w/o growing.
// Heap doesn't guarantee that it can allocate an object that requires
// all available bytes. Check MaxHeapObjectSize() instead.
static int Available();
static intptr_t Available();
// Returns the maximum object size in paged space.
static inline int MaxObjectSizeInPagedSpace();
// Returns of size of all objects residing in the heap.
static int SizeOfObjects();
static intptr_t SizeOfObjects();
// Return the starting address and a mask for the new space. And-masking an
// address with the mask will result in the start address of the new space
@ -1069,8 +1069,8 @@ class Heap : public AllStatic {
static int reserved_semispace_size_;
static int max_semispace_size_;
static int initial_semispace_size_;
static int max_old_generation_size_;
static size_t code_range_size_;
static intptr_t max_old_generation_size_;
static intptr_t code_range_size_;
// For keeping track of how much data has survived
// scavenge since last new space expansion.
@ -1098,7 +1098,7 @@ class Heap : public AllStatic {
static HeapState gc_state_;
// Returns the size of object residing in non new spaces.
static int PromotedSpaceSize();
static intptr_t PromotedSpaceSize();
// Returns the amount of external memory registered since last global gc.
static int PromotedExternalMemorySize();
@ -1133,16 +1133,16 @@ class Heap : public AllStatic {
// Limit that triggers a global GC on the next (normally caused) GC. This
// is checked when we have already decided to do a GC to help determine
// which collector to invoke.
static int old_gen_promotion_limit_;
static intptr_t old_gen_promotion_limit_;
// Limit that triggers a global GC as soon as is reasonable. This is
// checked before expanding a paged space in the old generation and on
// every allocation in large object space.
static int old_gen_allocation_limit_;
static intptr_t old_gen_allocation_limit_;
// Limit on the amount of externally allocated memory allowed
// between global GCs. If reached a global GC is forced.
static int external_allocation_limit_;
static intptr_t external_allocation_limit_;
// The amount of external memory registered through the API kept alive
// by global handles
@ -1231,8 +1231,8 @@ class Heap : public AllStatic {
GCTracer* tracer,
CollectionPolicy collectionPolicy);
static const int kMinimumPromotionLimit = 2 * MB;
static const int kMinimumAllocationLimit = 8 * MB;
static const intptr_t kMinimumPromotionLimit = 2 * MB;
static const intptr_t kMinimumAllocationLimit = 8 * MB;
inline static void UpdateOldSpaceLimits();
@ -1385,24 +1385,24 @@ class HeapStats {
int* start_marker; // 0
int* new_space_size; // 1
int* new_space_capacity; // 2
int* old_pointer_space_size; // 3
int* old_pointer_space_capacity; // 4
int* old_data_space_size; // 5
int* old_data_space_capacity; // 6
int* code_space_size; // 7
int* code_space_capacity; // 8
int* map_space_size; // 9
int* map_space_capacity; // 10
int* cell_space_size; // 11
int* cell_space_capacity; // 12
int* lo_space_size; // 13
intptr_t* old_pointer_space_size; // 3
intptr_t* old_pointer_space_capacity; // 4
intptr_t* old_data_space_size; // 5
intptr_t* old_data_space_capacity; // 6
intptr_t* code_space_size; // 7
intptr_t* code_space_capacity; // 8
intptr_t* map_space_size; // 9
intptr_t* map_space_capacity; // 10
intptr_t* cell_space_size; // 11
intptr_t* cell_space_capacity; // 12
intptr_t* lo_space_size; // 13
int* global_handle_count; // 14
int* weak_global_handle_count; // 15
int* pending_global_handle_count; // 16
int* near_death_global_handle_count; // 17
int* destroyed_global_handle_count; // 18
int* memory_allocator_size; // 19
int* memory_allocator_capacity; // 20
intptr_t* memory_allocator_size; // 19
intptr_t* memory_allocator_capacity; // 20
int* objects_per_type; // 21
int* size_per_type; // 22
int* os_error; // 23
@ -1837,7 +1837,7 @@ class GCTracer BASE_EMBEDDED {
static int get_max_gc_pause() { return max_gc_pause_; }
// Returns maximum size of objects alive after GC.
static int get_max_alive_after_gc() { return max_alive_after_gc_; }
static intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
// Returns minimal interval between two subsequent collections.
static int get_min_in_mutator() { return min_in_mutator_; }
@ -1852,7 +1852,7 @@ class GCTracer BASE_EMBEDDED {
}
double start_time_; // Timestamp set in the constructor.
int start_size_; // Size of objects in heap set in constructor.
intptr_t start_size_; // Size of objects in heap set in constructor.
GarbageCollector collector_; // Type of collector.
// A count (including this one, eg, the first collection is 1) of the
@ -1884,30 +1884,30 @@ class GCTracer BASE_EMBEDDED {
// Total amount of space either wasted or contained in one of free lists
// before the current GC.
int in_free_list_or_wasted_before_gc_;
intptr_t in_free_list_or_wasted_before_gc_;
// Difference between space used in the heap at the beginning of the current
// collection and the end of the previous collection.
int allocated_since_last_gc_;
intptr_t allocated_since_last_gc_;
// Amount of time spent in mutator that is time elapsed between end of the
// previous collection and the beginning of the current one.
double spent_in_mutator_;
// Size of objects promoted during the current collection.
int promoted_objects_size_;
intptr_t promoted_objects_size_;
// Maximum GC pause.
static int max_gc_pause_;
// Maximum size of objects alive after GC.
static int max_alive_after_gc_;
static intptr_t max_alive_after_gc_;
// Minimal interval between two subsequent collections.
static int min_in_mutator_;
// Size of objects alive after last GC.
static int alive_after_last_gc_;
static intptr_t alive_after_last_gc_;
static double last_gc_end_timestamp_;
};

View File

@ -393,6 +393,13 @@ void Logger::IntEvent(const char* name, int value) {
}
void Logger::IntPtrTEvent(const char* name, intptr_t value) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (FLAG_log) UncheckedIntPtrTEvent(name, value);
#endif
}
#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::UncheckedIntEvent(const char* name, int value) {
if (!Log::IsEnabled()) return;
@ -403,6 +410,16 @@ void Logger::UncheckedIntEvent(const char* name, int value) {
#endif
#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::UncheckedIntPtrTEvent(const char* name, intptr_t value) {
if (!Log::IsEnabled()) return;
LogMessageBuilder msg;
msg.Append("%s,%" V8_PTR_PREFIX "d\n", name, value);
msg.WriteToLogFile();
}
#endif
void Logger::HandleEvent(const char* name, Object** location) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log_handles) return;
@ -1005,11 +1022,12 @@ void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
void Logger::HeapSampleStats(const char* space, const char* kind,
int capacity, int used) {
intptr_t capacity, intptr_t used) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg;
msg.Append("heap-sample-stats,\"%s\",\"%s\",%d,%d\n",
msg.Append("heap-sample-stats,\"%s\",\"%s\","
"%" V8_PTR_PREFIX "d,%" V8_PTR_PREFIX "d\n",
space, kind, capacity, used);
msg.WriteToLogFile();
#endif

View File

@ -159,6 +159,7 @@ class Logger {
// Emits an event with an int value -> (name, value).
static void IntEvent(const char* name, int value);
static void IntPtrTEvent(const char* name, intptr_t value);
// Emits an event with an handle value -> (name, location).
static void HandleEvent(const char* name, Object** location);
@ -237,7 +238,7 @@ class Logger {
static void HeapSampleJSProducerEvent(const char* constructor,
Address* stack);
static void HeapSampleStats(const char* space, const char* kind,
int capacity, int used);
intptr_t capacity, intptr_t used);
static void SharedLibraryEvent(const char* library_path,
uintptr_t start,
@ -326,6 +327,7 @@ class Logger {
// Logs an IntEvent regardless of whether FLAG_log is true.
static void UncheckedIntEvent(const char* name, int value);
static void UncheckedIntPtrTEvent(const char* name, intptr_t value);
// Stops logging and profiling in case of insufficient resources.
static void StopLoggingAndProfiling();

View File

@ -167,8 +167,8 @@ void MarkCompactCollector::Finish() {
// reclaiming the waste and free list blocks).
static const int kFragmentationLimit = 15; // Percent.
static const int kFragmentationAllowed = 1 * MB; // Absolute.
int old_gen_recoverable = 0;
int old_gen_used = 0;
intptr_t old_gen_recoverable = 0;
intptr_t old_gen_used = 0;
OldSpaces spaces;
for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
@ -2008,8 +2008,10 @@ class MapCompact {
#ifdef DEBUG
if (FLAG_gc_verbose) {
PrintF("update %p : %p -> %p\n", obj->address(),
map, new_map);
PrintF("update %p : %p -> %p\n",
obj->address(),
reinterpret_cast<void*>(map),
reinterpret_cast<void*>(new_map));
}
#endif
}
@ -2068,8 +2070,8 @@ void MarkCompactCollector::SweepSpaces() {
&UpdatePointerToNewGen,
Heap::WATERMARK_SHOULD_BE_VALID);
int live_maps_size = Heap::map_space()->Size();
int live_maps = live_maps_size / Map::kSize;
intptr_t live_maps_size = Heap::map_space()->Size();
int live_maps = static_cast<int>(live_maps_size / Map::kSize);
ASSERT(live_map_objects_size_ == live_maps_size);
if (Heap::map_space()->NeedsCompaction(live_maps)) {

View File

@ -89,7 +89,7 @@ void Failure::FailureVerify() {
void HeapObject::PrintHeader(const char* id) {
PrintF("%p: [%s]\n", this, id);
PrintF("%p: [%s]\n", reinterpret_cast<void*>(this), id);
}
@ -522,9 +522,9 @@ void JSObject::PrintElements() {
void JSObject::JSObjectPrint() {
PrintF("%p: [JSObject]\n", this);
PrintF(" - map = %p\n", map());
PrintF(" - prototype = %p\n", GetPrototype());
PrintF("%p: [JSObject]\n", reinterpret_cast<void*>(this));
PrintF(" - map = %p\n", reinterpret_cast<void*>(map()));
PrintF(" - prototype = %p\n", reinterpret_cast<void*>(GetPrototype()));
PrintF(" {\n");
PrintProperties();
PrintElements();
@ -744,7 +744,7 @@ void String::StringVerify() {
void JSFunction::JSFunctionPrint() {
HeapObject::PrintHeader("Function");
PrintF(" - map = 0x%p\n", map());
PrintF(" - map = 0x%p\n", reinterpret_cast<void*>(map()));
PrintF(" - initial_map = ");
if (has_initial_map()) {
initial_map()->ShortPrint();
@ -1224,9 +1224,9 @@ void BreakPointInfo::BreakPointInfoVerify() {
void BreakPointInfo::BreakPointInfoPrint() {
HeapObject::PrintHeader("BreakPointInfo");
PrintF("\n - code_position: %d", code_position());
PrintF("\n - source_position: %d", source_position());
PrintF("\n - statement_position: %d", statement_position());
PrintF("\n - code_position: %d", code_position()->value());
PrintF("\n - source_position: %d", source_position()->value());
PrintF("\n - statement_position: %d", statement_position()->value());
PrintF("\n - break_point_objects: ");
break_point_objects()->ShortPrint();
}

View File

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Copyright 2006-2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -153,9 +153,10 @@ class ScriptDataImpl : public ScriptData {
ScriptDataImpl(const char* backing_store, int length)
: store_(reinterpret_cast<unsigned*>(const_cast<char*>(backing_store)),
length / sizeof(unsigned)),
length / static_cast<int>(sizeof(unsigned))),
owns_store_(false) {
ASSERT_EQ(0, reinterpret_cast<intptr_t>(backing_store) % sizeof(unsigned));
ASSERT_EQ(0, static_cast<int>(
reinterpret_cast<intptr_t>(backing_store) % sizeof(unsigned)));
}
// Read strings written by ParserRecorder::WriteString.

View File

@ -47,8 +47,15 @@ RegExpMacroAssemblerTracer::~RegExpMacroAssemblerTracer() {
}
// This is used for printing out debugging information. It makes an integer
// that is closely related to the address of an object.
static int LabelToInt(Label* label) {
return static_cast<int>(reinterpret_cast<intptr_t>(label));
}
void RegExpMacroAssemblerTracer::Bind(Label* label) {
PrintF("label[%08x]: (Bind)\n", label, label);
PrintF("label[%08x]: (Bind)\n", LabelToInt(label));
assembler_->Bind(label);
}
@ -60,7 +67,7 @@ void RegExpMacroAssemblerTracer::AdvanceCurrentPosition(int by) {
void RegExpMacroAssemblerTracer::CheckGreedyLoop(Label* label) {
PrintF(" CheckGreedyLoop(label[%08x]);\n\n", label);
PrintF(" CheckGreedyLoop(label[%08x]);\n\n", LabelToInt(label));
assembler_->CheckGreedyLoop(label);
}
@ -84,14 +91,13 @@ void RegExpMacroAssemblerTracer::Backtrack() {
void RegExpMacroAssemblerTracer::GoTo(Label* label) {
PrintF(" GoTo(label[%08x]);\n\n", label);
PrintF(" GoTo(label[%08x]);\n\n", LabelToInt(label));
assembler_->GoTo(label);
}
void RegExpMacroAssemblerTracer::PushBacktrack(Label* label) {
PrintF(" PushBacktrack(label[%08x]);\n",
label);
PrintF(" PushBacktrack(label[%08x]);\n", LabelToInt(label));
assembler_->PushBacktrack(label);
}
@ -176,7 +182,7 @@ void RegExpMacroAssemblerTracer::LoadCurrentCharacter(int cp_offset,
const char* check_msg = check_bounds ? "" : " (unchecked)";
PrintF(" LoadCurrentCharacter(cp_offset=%d, label[%08x]%s (%d chars));\n",
cp_offset,
on_end_of_input,
LabelToInt(on_end_of_input),
check_msg,
characters);
assembler_->LoadCurrentCharacter(cp_offset,
@ -187,39 +193,43 @@ void RegExpMacroAssemblerTracer::LoadCurrentCharacter(int cp_offset,
void RegExpMacroAssemblerTracer::CheckCharacterLT(uc16 limit, Label* on_less) {
PrintF(" CheckCharacterLT(c='u%04x', label[%08x]);\n", limit, on_less);
PrintF(" CheckCharacterLT(c='u%04x', label[%08x]);\n",
limit, LabelToInt(on_less));
assembler_->CheckCharacterLT(limit, on_less);
}
void RegExpMacroAssemblerTracer::CheckCharacterGT(uc16 limit,
Label* on_greater) {
PrintF(" CheckCharacterGT(c='u%04x', label[%08x]);\n", limit, on_greater);
PrintF(" CheckCharacterGT(c='u%04x', label[%08x]);\n",
limit, LabelToInt(on_greater));
assembler_->CheckCharacterGT(limit, on_greater);
}
void RegExpMacroAssemblerTracer::CheckCharacter(uint32_t c, Label* on_equal) {
PrintF(" CheckCharacter(c='u%04x', label[%08x]);\n", c, on_equal);
PrintF(" CheckCharacter(c='u%04x', label[%08x]);\n",
c, LabelToInt(on_equal));
assembler_->CheckCharacter(c, on_equal);
}
void RegExpMacroAssemblerTracer::CheckAtStart(Label* on_at_start) {
PrintF(" CheckAtStart(label[%08x]);\n", on_at_start);
PrintF(" CheckAtStart(label[%08x]);\n", LabelToInt(on_at_start));
assembler_->CheckAtStart(on_at_start);
}
void RegExpMacroAssemblerTracer::CheckNotAtStart(Label* on_not_at_start) {
PrintF(" CheckNotAtStart(label[%08x]);\n", on_not_at_start);
PrintF(" CheckNotAtStart(label[%08x]);\n", LabelToInt(on_not_at_start));
assembler_->CheckNotAtStart(on_not_at_start);
}
void RegExpMacroAssemblerTracer::CheckNotCharacter(uint32_t c,
Label* on_not_equal) {
PrintF(" CheckNotCharacter(c='u%04x', label[%08x]);\n", c, on_not_equal);
PrintF(" CheckNotCharacter(c='u%04x', label[%08x]);\n",
c, LabelToInt(on_not_equal));
assembler_->CheckNotCharacter(c, on_not_equal);
}
@ -231,7 +241,7 @@ void RegExpMacroAssemblerTracer::CheckCharacterAfterAnd(
PrintF(" CheckCharacterAfterAnd(c='u%04x', mask=0x%04x, label[%08x]);\n",
c,
mask,
on_equal);
LabelToInt(on_equal));
assembler_->CheckCharacterAfterAnd(c, mask, on_equal);
}
@ -243,7 +253,7 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterAnd(
PrintF(" CheckNotCharacterAfterAnd(c='u%04x', mask=0x%04x, label[%08x]);\n",
c,
mask,
on_not_equal);
LabelToInt(on_not_equal));
assembler_->CheckNotCharacterAfterAnd(c, mask, on_not_equal);
}
@ -258,7 +268,7 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterMinusAnd(
c,
minus,
mask,
on_not_equal);
LabelToInt(on_not_equal));
assembler_->CheckNotCharacterAfterMinusAnd(c, minus, mask, on_not_equal);
}
@ -266,7 +276,7 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterMinusAnd(
void RegExpMacroAssemblerTracer::CheckNotBackReference(int start_reg,
Label* on_no_match) {
PrintF(" CheckNotBackReference(register=%d, label[%08x]);\n", start_reg,
on_no_match);
LabelToInt(on_no_match));
assembler_->CheckNotBackReference(start_reg, on_no_match);
}
@ -275,7 +285,7 @@ void RegExpMacroAssemblerTracer::CheckNotBackReferenceIgnoreCase(
int start_reg,
Label* on_no_match) {
PrintF(" CheckNotBackReferenceIgnoreCase(register=%d, label[%08x]);\n",
start_reg, on_no_match);
start_reg, LabelToInt(on_no_match));
assembler_->CheckNotBackReferenceIgnoreCase(start_reg, on_no_match);
}
@ -286,7 +296,7 @@ void RegExpMacroAssemblerTracer::CheckNotRegistersEqual(int reg1,
PrintF(" CheckNotRegistersEqual(reg1=%d, reg2=%d, label[%08x]);\n",
reg1,
reg2,
on_not_equal);
LabelToInt(on_not_equal));
assembler_->CheckNotRegistersEqual(reg1, reg2, on_not_equal);
}
@ -300,7 +310,8 @@ void RegExpMacroAssemblerTracer::CheckCharacters(Vector<const uc16> str,
for (int i = 0; i < str.length(); i++) {
PrintF("u%04x", str[i]);
}
PrintF("\", cp_offset=%d, label[%08x])\n", cp_offset, on_failure);
PrintF("\", cp_offset=%d, label[%08x])\n",
cp_offset, LabelToInt(on_failure));
assembler_->CheckCharacters(str, cp_offset, on_failure, check_end_of_string);
}
@ -312,7 +323,7 @@ bool RegExpMacroAssemblerTracer::CheckSpecialCharacterClass(
on_no_match);
PrintF(" CheckSpecialCharacterClass(type='%c', label[%08x]): %s;\n",
type,
on_no_match,
LabelToInt(on_no_match),
supported ? "true" : "false");
return supported;
}
@ -321,7 +332,7 @@ bool RegExpMacroAssemblerTracer::CheckSpecialCharacterClass(
void RegExpMacroAssemblerTracer::IfRegisterLT(int register_index,
int comparand, Label* if_lt) {
PrintF(" IfRegisterLT(register=%d, number=%d, label[%08x]);\n",
register_index, comparand, if_lt);
register_index, comparand, LabelToInt(if_lt));
assembler_->IfRegisterLT(register_index, comparand, if_lt);
}
@ -329,7 +340,7 @@ void RegExpMacroAssemblerTracer::IfRegisterLT(int register_index,
void RegExpMacroAssemblerTracer::IfRegisterEqPos(int register_index,
Label* if_eq) {
PrintF(" IfRegisterEqPos(register=%d, label[%08x]);\n",
register_index, if_eq);
register_index, LabelToInt(if_eq));
assembler_->IfRegisterEqPos(register_index, if_eq);
}
@ -337,7 +348,7 @@ void RegExpMacroAssemblerTracer::IfRegisterEqPos(int register_index,
void RegExpMacroAssemblerTracer::IfRegisterGE(int register_index,
int comparand, Label* if_ge) {
PrintF(" IfRegisterGE(register=%d, number=%d, label[%08x]);\n",
register_index, comparand, if_ge);
register_index, comparand, LabelToInt(if_ge));
assembler_->IfRegisterGE(register_index, comparand, if_ge);
}

View File

@ -6745,7 +6745,7 @@ static void PrintObject(Object* obj) {
} else if (obj->IsFalse()) {
PrintF("<false>");
} else {
PrintF("%p", obj);
PrintF("%p", reinterpret_cast<void*>(obj));
}
}

View File

@ -270,9 +270,9 @@ void CodeRange::TearDown() {
// -----------------------------------------------------------------------------
// MemoryAllocator
//
int MemoryAllocator::capacity_ = 0;
int MemoryAllocator::size_ = 0;
int MemoryAllocator::size_executable_ = 0;
intptr_t MemoryAllocator::capacity_ = 0;
intptr_t MemoryAllocator::size_ = 0;
intptr_t MemoryAllocator::size_executable_ = 0;
List<MemoryAllocator::MemoryAllocationCallbackRegistration>
MemoryAllocator::memory_allocation_callbacks_;
@ -302,7 +302,7 @@ int MemoryAllocator::Pop() {
}
bool MemoryAllocator::Setup(int capacity) {
bool MemoryAllocator::Setup(intptr_t capacity) {
capacity_ = RoundUp(capacity, Page::kPageSize);
// Over-estimate the size of chunks_ array. It assumes the expansion of old
@ -314,7 +314,8 @@ bool MemoryAllocator::Setup(int capacity) {
//
// Reserve two chunk ids for semispaces, one for map space, one for old
// space, and one for code space.
max_nof_chunks_ = (capacity_ / (kChunkSize - Page::kPageSize)) + 5;
max_nof_chunks_ =
static_cast<int>((capacity_ / (kChunkSize - Page::kPageSize))) + 5;
if (max_nof_chunks_ > kMaxNofChunks) return false;
size_ = 0;
@ -691,7 +692,9 @@ Page* MemoryAllocator::FindLastPageInSameChunk(Page* p) {
#ifdef DEBUG
void MemoryAllocator::ReportStatistics() {
float pct = static_cast<float>(capacity_ - size_) / capacity_;
PrintF(" capacity: %d, used: %d, available: %%%d\n\n",
PrintF(" capacity: %" V8_PTR_PREFIX "d"
", used: %" V8_PTR_PREFIX "d"
", available: %%%d\n\n",
capacity_, size_, static_cast<int>(pct*100));
}
#endif
@ -769,7 +772,7 @@ Page* MemoryAllocator::RelinkPagesInChunk(int chunk_id,
// -----------------------------------------------------------------------------
// PagedSpace implementation
PagedSpace::PagedSpace(int max_capacity,
PagedSpace::PagedSpace(intptr_t max_capacity,
AllocationSpace id,
Executability executable)
: Space(id, executable) {
@ -797,8 +800,9 @@ bool PagedSpace::Setup(Address start, size_t size) {
Page::kPageSize * pages_in_chunk,
this, &num_pages);
} else {
int requested_pages = Min(MemoryAllocator::kPagesPerChunk,
max_capacity_ / Page::kObjectAreaSize);
int requested_pages =
Min(MemoryAllocator::kPagesPerChunk,
static_cast<int>(max_capacity_ / Page::kObjectAreaSize));
first_page_ =
MemoryAllocator::AllocatePages(requested_pages, &num_pages, this);
if (!first_page_->is_valid()) return false;
@ -984,7 +988,8 @@ bool PagedSpace::Expand(Page* last_page) {
// Last page must be valid and its next page is invalid.
ASSERT(last_page->is_valid() && !last_page->next_page()->is_valid());
int available_pages = (max_capacity_ - Capacity()) / Page::kObjectAreaSize;
int available_pages =
static_cast<int>((max_capacity_ - Capacity()) / Page::kObjectAreaSize);
if (available_pages <= 0) return false;
int desired_pages = Min(available_pages, MemoryAllocator::kPagesPerChunk);
@ -1264,7 +1269,7 @@ void NewSpace::Grow() {
void NewSpace::Shrink() {
int new_capacity = Max(InitialCapacity(), 2 * Size());
int new_capacity = Max(InitialCapacity(), 2 * SizeAsInt());
int rounded_new_capacity =
RoundUp(new_capacity, static_cast<int>(OS::AllocateAlignment()));
if (rounded_new_capacity < Capacity() &&
@ -1643,7 +1648,8 @@ void NewSpace::ReportStatistics() {
#ifdef DEBUG
if (FLAG_heap_stats) {
float pct = static_cast<float>(Available()) / Capacity();
PrintF(" capacity: %d, available: %d, %%%d\n",
PrintF(" capacity: %" V8_PTR_PREFIX "d"
", available: %" V8_PTR_PREFIX "d, %%%d\n",
Capacity(), Available(), static_cast<int>(pct*100));
PrintF("\n Object Histogram:\n");
for (int i = 0; i <= LAST_TYPE; i++) {
@ -2401,8 +2407,10 @@ void PagedSpace::CollectCodeStatistics() {
void OldSpace::ReportStatistics() {
int pct = Available() * 100 / Capacity();
PrintF(" capacity: %d, waste: %d, available: %d, %%%d\n",
int pct = static_cast<int>(Available() * 100 / Capacity());
PrintF(" capacity: %" V8_PTR_PREFIX "d"
", waste: %" V8_PTR_PREFIX "d"
", available: %" V8_PTR_PREFIX "d, %%%d\n",
Capacity(), Waste(), Available(), pct);
ClearHistograms();
@ -2558,8 +2566,10 @@ void FixedSpace::DeallocateBlock(Address start,
#ifdef DEBUG
void FixedSpace::ReportStatistics() {
int pct = Available() * 100 / Capacity();
PrintF(" capacity: %d, waste: %d, available: %d, %%%d\n",
int pct = static_cast<int>(Available() * 100 / Capacity());
PrintF(" capacity: %" V8_PTR_PREFIX "d"
", waste: %" V8_PTR_PREFIX "d"
", available: %" V8_PTR_PREFIX "d, %%%d\n",
Capacity(), Waste(), Available(), pct);
ClearHistograms();
@ -3011,7 +3021,7 @@ void LargeObjectSpace::Print() {
void LargeObjectSpace::ReportStatistics() {
PrintF(" size: %d\n", size_);
PrintF(" size: %" V8_PTR_PREFIX "d\n", size_);
int num_objects = 0;
ClearHistograms();
LargeObjectIterator it(this);

View File

@ -371,7 +371,7 @@ class Space : public Malloced {
// Identity used in error reporting.
AllocationSpace identity() { return id_; }
virtual int Size() = 0;
virtual intptr_t Size() = 0;
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the space by marking it read-only/writable.
@ -491,7 +491,7 @@ class MemoryAllocator : public AllStatic {
public:
// Initializes its internal bookkeeping structures.
// Max capacity of the total space.
static bool Setup(int max_capacity);
static bool Setup(intptr_t max_capacity);
// Deletes valid chunks.
static void TearDown();
@ -582,16 +582,18 @@ class MemoryAllocator : public AllStatic {
MemoryAllocationCallback callback);
// Returns the maximum available bytes of heaps.
static int Available() { return capacity_ < size_ ? 0 : capacity_ - size_; }
static intptr_t Available() {
return capacity_ < size_ ? 0 : capacity_ - size_;
}
// Returns allocated spaces in bytes.
static int Size() { return size_; }
static intptr_t Size() { return size_; }
// Returns allocated executable spaces in bytes.
static int SizeExecutable() { return size_executable_; }
static intptr_t SizeExecutable() { return size_executable_; }
// Returns maximum available bytes that the old space can have.
static int MaxAvailable() {
static intptr_t MaxAvailable() {
return (Available() / Page::kPageSize) * Page::kObjectAreaSize;
}
@ -649,12 +651,12 @@ class MemoryAllocator : public AllStatic {
private:
// Maximum space size in bytes.
static int capacity_;
static intptr_t capacity_;
// Allocated space size in bytes.
static int size_;
static intptr_t size_;
// Allocated executable space size in bytes.
static int size_executable_;
static intptr_t size_executable_;
struct MemoryAllocationCallbackRegistration {
MemoryAllocationCallbackRegistration(MemoryAllocationCallback callback,
@ -927,10 +929,10 @@ class AllocationStats BASE_EMBEDDED {
}
// Accessors for the allocation statistics.
int Capacity() { return capacity_; }
int Available() { return available_; }
int Size() { return size_; }
int Waste() { return waste_; }
intptr_t Capacity() { return capacity_; }
intptr_t Available() { return available_; }
intptr_t Size() { return size_; }
intptr_t Waste() { return waste_; }
// Grow the space by adding available bytes.
void ExpandSpace(int size_in_bytes) {
@ -945,13 +947,13 @@ class AllocationStats BASE_EMBEDDED {
}
// Allocate from available bytes (available -> size).
void AllocateBytes(int size_in_bytes) {
void AllocateBytes(intptr_t size_in_bytes) {
available_ -= size_in_bytes;
size_ += size_in_bytes;
}
// Free allocated bytes, making them available (size -> available).
void DeallocateBytes(int size_in_bytes) {
void DeallocateBytes(intptr_t size_in_bytes) {
size_ -= size_in_bytes;
available_ += size_in_bytes;
}
@ -964,23 +966,25 @@ class AllocationStats BASE_EMBEDDED {
// Consider the wasted bytes to be allocated, as they contain filler
// objects (waste -> size).
void FillWastedBytes(int size_in_bytes) {
void FillWastedBytes(intptr_t size_in_bytes) {
waste_ -= size_in_bytes;
size_ += size_in_bytes;
}
private:
int capacity_;
int available_;
int size_;
int waste_;
intptr_t capacity_;
intptr_t available_;
intptr_t size_;
intptr_t waste_;
};
class PagedSpace : public Space {
public:
// Creates a space with a maximum capacity, and an id.
PagedSpace(int max_capacity, AllocationSpace id, Executability executable);
PagedSpace(intptr_t max_capacity,
AllocationSpace id,
Executability executable);
virtual ~PagedSpace() {}
@ -1031,21 +1035,21 @@ class PagedSpace : public Space {
}
// Current capacity without growing (Size() + Available() + Waste()).
int Capacity() { return accounting_stats_.Capacity(); }
intptr_t Capacity() { return accounting_stats_.Capacity(); }
// Total amount of memory committed for this space. For paged
// spaces this equals the capacity.
int CommittedMemory() { return Capacity(); }
intptr_t CommittedMemory() { return Capacity(); }
// Available bytes without growing.
int Available() { return accounting_stats_.Available(); }
intptr_t Available() { return accounting_stats_.Available(); }
// Allocated bytes in this space.
virtual int Size() { return accounting_stats_.Size(); }
virtual intptr_t Size() { return accounting_stats_.Size(); }
// Wasted bytes due to fragmentation and not recoverable until the
// next GC of this space.
int Waste() { return accounting_stats_.Waste(); }
intptr_t Waste() { return accounting_stats_.Waste(); }
// Returns the address of the first object in this space.
Address bottom() { return first_page_->ObjectAreaStart(); }
@ -1137,7 +1141,7 @@ class PagedSpace : public Space {
protected:
// Maximum capacity of this space.
int max_capacity_;
intptr_t max_capacity_;
// Accounting information for this space.
AllocationStats accounting_stats_;
@ -1328,7 +1332,7 @@ class SemiSpace : public Space {
// If we don't have these here then SemiSpace will be abstract. However
// they should never be called.
virtual int Size() {
virtual intptr_t Size() {
UNREACHABLE();
return 0;
}
@ -1471,22 +1475,26 @@ class NewSpace : public Space {
}
// Return the allocated bytes in the active semispace.
virtual int Size() { return static_cast<int>(top() - bottom()); }
virtual intptr_t Size() { return static_cast<int>(top() - bottom()); }
// The same, but returning an int. We have to have the one that returns
// intptr_t because it is inherited, but if we know we are dealing with the
// new space, which can't get as big as the other spaces then this is useful:
int SizeAsInt() { return static_cast<int>(Size()); }
// Return the current capacity of a semispace.
int Capacity() {
intptr_t Capacity() {
ASSERT(to_space_.Capacity() == from_space_.Capacity());
return to_space_.Capacity();
}
// Return the total amount of memory committed for new space.
int CommittedMemory() {
intptr_t CommittedMemory() {
if (from_space_.is_committed()) return 2 * Capacity();
return Capacity();
}
// Return the available bytes without growing in the active semispace.
int Available() { return Capacity() - Size(); }
intptr_t Available() { return Capacity() - Size(); }
// Return the maximum capacity of a semispace.
int MaximumCapacity() {
@ -1681,7 +1689,7 @@ class OldSpaceFreeList BASE_EMBEDDED {
void Reset();
// Return the number of bytes available on the free list.
int available() { return available_; }
intptr_t available() { return available_; }
// Place a node on the free list. The block of size 'size_in_bytes'
// starting at 'start' is placed on the free list. The return value is the
@ -1783,7 +1791,7 @@ class FixedSizeFreeList BASE_EMBEDDED {
void Reset();
// Return the number of bytes available on the free list.
int available() { return available_; }
intptr_t available() { return available_; }
// Place a node on the free list. The block starting at 'start' (assumed to
// have size object_size_) is placed on the free list. Bookkeeping
@ -1797,7 +1805,7 @@ class FixedSizeFreeList BASE_EMBEDDED {
private:
// Available bytes on the free list.
int available_;
intptr_t available_;
// The head of the free list.
Address head_;
@ -1823,7 +1831,7 @@ class OldSpace : public PagedSpace {
public:
// Creates an old space object with a given maximum capacity.
// The constructor does not allocate pages from OS.
explicit OldSpace(int max_capacity,
explicit OldSpace(intptr_t max_capacity,
AllocationSpace id,
Executability executable)
: PagedSpace(max_capacity, id, executable), free_list_(id) {
@ -1832,7 +1840,7 @@ class OldSpace : public PagedSpace {
// The bytes available on the free list (ie, not above the linear allocation
// pointer).
int AvailableFree() { return free_list_.available(); }
intptr_t AvailableFree() { return free_list_.available(); }
// The limit of allocation for a page in this space.
virtual Address PageAllocationLimit(Page* page) {
@ -1893,7 +1901,7 @@ class OldSpace : public PagedSpace {
class FixedSpace : public PagedSpace {
public:
FixedSpace(int max_capacity,
FixedSpace(intptr_t max_capacity,
AllocationSpace id,
int object_size_in_bytes,
const char* name)
@ -1968,7 +1976,7 @@ class FixedSpace : public PagedSpace {
class MapSpace : public FixedSpace {
public:
// Creates a map space object with a maximum capacity.
MapSpace(int max_capacity, int max_map_space_pages, AllocationSpace id)
MapSpace(intptr_t max_capacity, int max_map_space_pages, AllocationSpace id)
: FixedSpace(max_capacity, id, Map::kSize, "map"),
max_map_space_pages_(max_map_space_pages) {
ASSERT(max_map_space_pages < kMaxMapPageIndex);
@ -2073,7 +2081,7 @@ class MapSpace : public FixedSpace {
class CellSpace : public FixedSpace {
public:
// Creates a property cell space object with a maximum capacity.
CellSpace(int max_capacity, AllocationSpace id)
CellSpace(intptr_t max_capacity, AllocationSpace id)
: FixedSpace(max_capacity, id, JSGlobalPropertyCell::kSize, "cell") {}
protected:
@ -2129,7 +2137,7 @@ class LargeObjectChunk {
// Given a chunk size, returns the object size it can accommodate. Used by
// LargeObjectSpace::Available.
static int ObjectSizeFor(int chunk_size) {
static intptr_t ObjectSizeFor(intptr_t chunk_size) {
if (chunk_size <= (Page::kPageSize + Page::kObjectStartOffset)) return 0;
return chunk_size - Page::kPageSize - Page::kObjectStartOffset;
}
@ -2165,11 +2173,11 @@ class LargeObjectSpace : public Space {
Object* AllocateRawFixedArray(int size_in_bytes);
// Available bytes for objects in this space.
int Available() {
intptr_t Available() {
return LargeObjectChunk::ObjectSizeFor(MemoryAllocator::Available());
}
virtual int Size() {
virtual intptr_t Size() {
return size_;
}
@ -2223,7 +2231,7 @@ class LargeObjectSpace : public Space {
private:
// The head of the linked list of large object chunks.
LargeObjectChunk* first_chunk_;
int size_; // allocated bytes
intptr_t size_; // allocated bytes
int page_count_; // number of chunks

View File

@ -222,11 +222,21 @@ uint32_t ComputeIntegerHash(uint32_t key);
// ----------------------------------------------------------------------------
// I/O support.
// Our version of printf(). Avoids compilation errors that we get
// with standard printf when attempting to print pointers, etc.
// (the errors are due to the extra compilation flags, which we
// want elsewhere).
void PrintF(const char* format, ...);
#if __GNUC__ >= 4
// On gcc we can ask the compiler to check the types of %d-style format
// specifiers and their associated arguments. TODO(erikcorry) fix this
// so it works on MacOSX.
#if defined(__MACH__) && defined(__APPLE__)
#define PRINTF_CHECKING
#else // MacOsX.
#define PRINTF_CHECKING __attribute__ ((format (printf, 1, 2)))
#endif
#else
#define PRINTF_CHECKING
#endif
// Our version of printf().
void PRINTF_CHECKING PrintF(const char* format, ...);
// Our version of fflush.
void Flush();

View File

@ -43,7 +43,7 @@ static Object* AllocateAfterFailures() {
NewSpace* new_space = Heap::new_space();
static const int kNewSpaceFillerSize = ByteArray::SizeFor(0);
while (new_space->Available() > kNewSpaceFillerSize) {
int available_before = new_space->Available();
int available_before = static_cast<int>(new_space->Available());
CHECK(!Heap::AllocateByteArray(0)->IsFailure());
if (available_before == new_space->Available()) {
// It seems that we are avoiding new space allocations when

View File

@ -1485,9 +1485,9 @@ THREADED_TEST(InternalFieldsNativePointers) {
char* data = new char[100];
void* aligned = data;
CHECK_EQ(0, reinterpret_cast<uintptr_t>(aligned) & 0x1);
CHECK_EQ(0, static_cast<int>(reinterpret_cast<uintptr_t>(aligned) & 0x1));
void* unaligned = data + 1;
CHECK_EQ(1, reinterpret_cast<uintptr_t>(unaligned) & 0x1);
CHECK_EQ(1, static_cast<int>(reinterpret_cast<uintptr_t>(unaligned) & 0x1));
// Check reading and writing aligned pointers.
obj->SetPointerInInternalField(0, aligned);
@ -1517,9 +1517,9 @@ THREADED_TEST(InternalFieldsNativePointersAndExternal) {
char* data = new char[100];
void* aligned = data;
CHECK_EQ(0, reinterpret_cast<uintptr_t>(aligned) & 0x1);
CHECK_EQ(0, static_cast<int>(reinterpret_cast<uintptr_t>(aligned) & 0x1));
void* unaligned = data + 1;
CHECK_EQ(1, reinterpret_cast<uintptr_t>(unaligned) & 0x1);
CHECK_EQ(1, static_cast<int>(reinterpret_cast<uintptr_t>(unaligned) & 0x1));
obj->SetPointerInInternalField(0, aligned);
i::Heap::CollectAllGarbage(false);

View File

@ -245,8 +245,8 @@ static v8::Handle<Value> construct_call(const v8::Arguments& args) {
args.This()->Set(v8_str("low_bits"), v8_num(low_bits >> 1));
#elif defined(V8_HOST_ARCH_64_BIT)
uint64_t fp = reinterpret_cast<uint64_t>(calling_frame->fp());
int32_t low_bits = fp & 0xffffffff;
int32_t high_bits = fp >> 32;
int32_t low_bits = static_cast<int32_t>(fp & 0xffffffff);
int32_t high_bits = static_cast<int32_t>(fp >> 32);
args.This()->Set(v8_str("low_bits"), v8_num(low_bits));
args.This()->Set(v8_str("high_bits"), v8_num(high_bits));
#else

View File

@ -221,7 +221,7 @@ TEST(LargeObjectSpace) {
CHECK(lo->Contains(ho));
while (true) {
int available = lo->Available();
intptr_t available = lo->Available();
obj = lo->AllocateRaw(lo_size);
if (obj->IsFailure()) break;
HeapObject::cast(obj)->set_map(faked_map);

View File

@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --max-new-space-size=262144
// Flags: --max-new-space-size=256
// Check that a mod where the stub code hits a failure in heap number

View File

@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --max-new-space-size=262144
// Flags: --max-new-space-size=256
function zero() {
var x = 0.5;

View File

@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --max-new-space-size=262144
// Flags: --max-new-space-size=256
function zero() {
var x = 0.5;