Even without --trace-gc dump the last few GC messages on OOM
If we crash V8 due to out-of-memory then we print the last 3 GCs on stdout as we crash. Also records the last 3 GCs on the stack so that it will be part of the minidump. R=hpayer@chromium.org BUG= Review URL: https://codereview.chromium.org/1159513003 Cr-Commit-Position: refs/heads/master@{#28704}
This commit is contained in:
parent
30ef6b7a69
commit
af7e07358a
10
src/api.cc
10
src/api.cc
@ -213,6 +213,10 @@ void i::FatalProcessOutOfMemory(const char* location) {
|
||||
// When V8 cannot allocated memory FatalProcessOutOfMemory is called.
|
||||
// The default fatal error handler is called and execution is stopped.
|
||||
void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
char last_few_messages[Heap::kTraceRingBufferSize + 1];
|
||||
memset(last_few_messages, 0, Heap::kTraceRingBufferSize + 1);
|
||||
|
||||
i::HeapStats heap_stats;
|
||||
int start_marker;
|
||||
heap_stats.start_marker = &start_marker;
|
||||
@ -254,13 +258,17 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
|
||||
heap_stats.size_per_type = size_per_type;
|
||||
int os_error;
|
||||
heap_stats.os_error = &os_error;
|
||||
heap_stats.last_few_messages = last_few_messages;
|
||||
int end_marker;
|
||||
heap_stats.end_marker = &end_marker;
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
if (isolate->heap()->HasBeenSetUp()) {
|
||||
// BUG(1718): Don't use the take_snapshot since we don't support
|
||||
// HeapIterator here without doing a special GC.
|
||||
isolate->heap()->RecordStats(&heap_stats, false);
|
||||
char* first_newline = strchr(last_few_messages, '\n');
|
||||
if (first_newline == NULL || first_newline[1] == '\0')
|
||||
first_newline = last_few_messages;
|
||||
PrintF("\n<--- Last few GCs --->\n%s\n", first_newline);
|
||||
}
|
||||
Utils::ApiCheck(false, location, "Allocation failed - process out of memory");
|
||||
// If the fatal error handler returns, we stop execution.
|
||||
|
@ -167,11 +167,9 @@ void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
|
||||
void GCTracer::Stop(GarbageCollector collector) {
|
||||
start_counter_--;
|
||||
if (start_counter_ != 0) {
|
||||
if (FLAG_trace_gc) {
|
||||
PrintF("[Finished reentrant %s during %s.]\n",
|
||||
collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
|
||||
current_.TypeName(false));
|
||||
}
|
||||
Output("[Finished reentrant %s during %s.]\n",
|
||||
collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
|
||||
current_.TypeName(false));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -238,8 +236,6 @@ void GCTracer::Stop(GarbageCollector collector) {
|
||||
|
||||
// TODO(ernstm): move the code below out of GCTracer.
|
||||
|
||||
if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
|
||||
|
||||
double duration = current_.end_time - current_.start_time;
|
||||
double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);
|
||||
|
||||
@ -249,12 +245,12 @@ void GCTracer::Stop(GarbageCollector collector) {
|
||||
if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
|
||||
return;
|
||||
|
||||
if (FLAG_trace_gc) {
|
||||
if (FLAG_trace_gc_nvp)
|
||||
PrintNVP();
|
||||
else
|
||||
Print();
|
||||
if (FLAG_trace_gc_nvp)
|
||||
PrintNVP();
|
||||
else
|
||||
Print();
|
||||
|
||||
if (FLAG_trace_gc) {
|
||||
heap_->PrintShortHeapStatistics();
|
||||
}
|
||||
}
|
||||
@ -329,11 +325,33 @@ void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
|
||||
}
|
||||
|
||||
|
||||
void GCTracer::Print() const {
|
||||
PrintIsolate(heap_->isolate(), "%8.0f ms: ",
|
||||
heap_->isolate()->time_millis_since_init());
|
||||
void GCTracer::Output(const char* format, ...) const {
|
||||
if (FLAG_trace_gc) {
|
||||
va_list arguments;
|
||||
va_start(arguments, format);
|
||||
base::OS::VPrint(format, arguments);
|
||||
va_end(arguments);
|
||||
}
|
||||
|
||||
PrintF("%s %.1f (%.1f) -> %.1f (%.1f) MB, ", current_.TypeName(false),
|
||||
const int kBufferSize = 256;
|
||||
char raw_buffer[kBufferSize];
|
||||
Vector<char> buffer(raw_buffer, kBufferSize);
|
||||
va_list arguments2;
|
||||
va_start(arguments2, format);
|
||||
VSNPrintF(buffer, format, arguments2);
|
||||
va_end(arguments2);
|
||||
|
||||
heap_->AddToRingBuffer(buffer.start());
|
||||
}
|
||||
|
||||
|
||||
void GCTracer::Print() const {
|
||||
if (FLAG_trace_gc) {
|
||||
PrintIsolate(heap_->isolate(), "");
|
||||
}
|
||||
Output("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
|
||||
|
||||
Output("%s %.1f (%.1f) -> %.1f (%.1f) MB, ", current_.TypeName(false),
|
||||
static_cast<double>(current_.start_object_size) / MB,
|
||||
static_cast<double>(current_.start_memory_size) / MB,
|
||||
static_cast<double>(current_.end_object_size) / MB,
|
||||
@ -343,16 +361,16 @@ void GCTracer::Print() const {
|
||||
if (external_time > 0) PrintF("%d / ", external_time);
|
||||
|
||||
double duration = current_.end_time - current_.start_time;
|
||||
PrintF("%.1f ms", duration);
|
||||
Output("%.1f ms", duration);
|
||||
if (current_.type == Event::SCAVENGER) {
|
||||
if (current_.incremental_marking_steps > 0) {
|
||||
PrintF(" (+ %.1f ms in %d steps since last GC)",
|
||||
Output(" (+ %.1f ms in %d steps since last GC)",
|
||||
current_.incremental_marking_duration,
|
||||
current_.incremental_marking_steps);
|
||||
}
|
||||
} else {
|
||||
if (current_.incremental_marking_steps > 0) {
|
||||
PrintF(
|
||||
Output(
|
||||
" (+ %.1f ms in %d steps since start of marking, "
|
||||
"biggest step %.1f ms)",
|
||||
current_.incremental_marking_duration,
|
||||
@ -362,14 +380,14 @@ void GCTracer::Print() const {
|
||||
}
|
||||
|
||||
if (current_.gc_reason != NULL) {
|
||||
PrintF(" [%s]", current_.gc_reason);
|
||||
Output(" [%s]", current_.gc_reason);
|
||||
}
|
||||
|
||||
if (current_.collector_reason != NULL) {
|
||||
PrintF(" [%s]", current_.collector_reason);
|
||||
Output(" [%s]", current_.collector_reason);
|
||||
}
|
||||
|
||||
PrintF(".\n");
|
||||
Output(".\n");
|
||||
}
|
||||
|
||||
|
||||
|
@ -422,6 +422,10 @@ class GCTracer {
|
||||
// TODO(ernstm): Move to Heap.
|
||||
void Print() const;
|
||||
|
||||
// Prints a line and also adds it to the heap's ring buffer so that
|
||||
// it can be included in later crash dumps.
|
||||
void Output(const char* format, ...) const;
|
||||
|
||||
// Compute the mean duration of the events in the given ring buffer.
|
||||
double MeanDuration(const EventBuffer& events) const;
|
||||
|
||||
|
@ -146,6 +146,8 @@ Heap::Heap()
|
||||
old_generation_size_at_last_gc_(0),
|
||||
gcs_since_last_deopt_(0),
|
||||
allocation_sites_scratchpad_length_(0),
|
||||
ring_buffer_full_(false),
|
||||
ring_buffer_end_(0),
|
||||
promotion_queue_(this),
|
||||
configured_(false),
|
||||
external_string_table_(this),
|
||||
@ -5300,6 +5302,30 @@ bool Heap::ConfigureHeap(int max_semi_space_size, int max_old_space_size,
|
||||
}
|
||||
|
||||
|
||||
void Heap::AddToRingBuffer(const char* string) {
|
||||
size_t first_part =
|
||||
Min(strlen(string), kTraceRingBufferSize - ring_buffer_end_);
|
||||
memcpy(trace_ring_buffer_ + ring_buffer_end_, string, first_part);
|
||||
ring_buffer_end_ += first_part;
|
||||
if (first_part < strlen(string)) {
|
||||
ring_buffer_full_ = true;
|
||||
size_t second_part = strlen(string) - first_part;
|
||||
memcpy(trace_ring_buffer_, string + first_part, second_part);
|
||||
ring_buffer_end_ = second_part;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Heap::GetFromRingBuffer(char* buffer) {
|
||||
size_t copied = 0;
|
||||
if (ring_buffer_full_) {
|
||||
copied = kTraceRingBufferSize - ring_buffer_end_;
|
||||
memcpy(buffer, trace_ring_buffer_ + ring_buffer_end_, copied);
|
||||
}
|
||||
memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_);
|
||||
}
|
||||
|
||||
|
||||
bool Heap::ConfigureHeapDefault() { return ConfigureHeap(0, 0, 0, 0); }
|
||||
|
||||
|
||||
@ -5332,6 +5358,8 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
|
||||
stats->size_per_type[type] += obj->Size();
|
||||
}
|
||||
}
|
||||
if (stats->last_few_messages != NULL)
|
||||
GetFromRingBuffer(stats->last_few_messages);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1156,6 +1156,8 @@ class Heap {
|
||||
static const int kMaxExecutableSizeHugeMemoryDevice =
|
||||
256 * kPointerMultiplier;
|
||||
|
||||
static const int kTraceRingBufferSize = 512;
|
||||
|
||||
// Calculates the allocation limit based on a given growing factor and a
|
||||
// given old generation size.
|
||||
intptr_t CalculateOldGenerationAllocationLimit(double factor,
|
||||
@ -2182,6 +2184,9 @@ class Heap {
|
||||
inline void UpdateAllocationsHash(uint32_t value);
|
||||
inline void PrintAlloctionsHash();
|
||||
|
||||
void AddToRingBuffer(const char* string);
|
||||
void GetFromRingBuffer(char* buffer);
|
||||
|
||||
// Object counts and used memory by InstanceType
|
||||
size_t object_counts_[OBJECT_STATS_COUNT];
|
||||
size_t object_counts_last_time_[OBJECT_STATS_COUNT];
|
||||
@ -2249,6 +2254,13 @@ class Heap {
|
||||
static const int kAllocationSiteScratchpadSize = 256;
|
||||
int allocation_sites_scratchpad_length_;
|
||||
|
||||
char trace_ring_buffer_[kTraceRingBufferSize];
|
||||
// If it's not full then the data is from 0 to ring_buffer_end_. If it's
|
||||
// full then the data is from ring_buffer_end_ to the end of the buffer and
|
||||
// from 0 to ring_buffer_end_.
|
||||
bool ring_buffer_full_;
|
||||
size_t ring_buffer_end_;
|
||||
|
||||
static const int kMaxMarkCompactsInIdleRound = 7;
|
||||
static const int kIdleScavengeThreshold = 5;
|
||||
|
||||
@ -2320,7 +2332,8 @@ class HeapStats {
|
||||
int* objects_per_type; // 17
|
||||
int* size_per_type; // 18
|
||||
int* os_error; // 19
|
||||
int* end_marker; // 20
|
||||
char* last_few_messages; // 20
|
||||
int* end_marker; // 21
|
||||
};
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user