Propagate DeoptInfo to cpu-profiler
1) Deoptimizer::Reason was replaced with Deoptimizer::DeoptInfo because it also has raw position. Also the old name clashes with DeoptReason enum. 2) c_entry_fp assignment call was added to EntryGenerator::Generate So we can calculate sp and have a chance to record the stack for the deopting function. btw it makes the test stable. 3) new kind of CodeEvents was added to cpu-profiler 4) GetDeoptInfo method was extracted from PrintDeoptLocation. So it could be reused in cpu profiler. BUG=452067 LOG=n Review URL: https://codereview.chromium.org/910773002 Cr-Commit-Position: refs/heads/master@{#26545}
This commit is contained in:
parent
0ca9bef37f
commit
86cae1633c
@ -165,6 +165,9 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
// handle this a bit differently.
|
||||
__ stm(db_w, sp, restored_regs | sp.bit() | lr.bit() | pc.bit());
|
||||
|
||||
__ mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
|
||||
__ str(fp, MemOperand(ip));
|
||||
|
||||
const int kSavedRegistersAreaSize =
|
||||
(kNumberOfRegisters * kPointerSize) + kDoubleRegsSize;
|
||||
|
||||
|
@ -336,7 +336,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
|
||||
DCHECK_EQ(jump_table_[0].bailout_type, table_entry->bailout_type);
|
||||
Address entry = table_entry->address;
|
||||
DeoptComment(table_entry->reason);
|
||||
DeoptComment(table_entry->deopt_info);
|
||||
|
||||
// Second-level deopt table entries are contiguous and small, so instead
|
||||
// of loading the full, absolute address of each one, load an immediate
|
||||
@ -893,17 +893,17 @@ void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
|
||||
__ stop("trap_on_deopt", condition);
|
||||
}
|
||||
|
||||
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
Deoptimizer::DeoptInfo deopt_info(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
DCHECK(info()->IsStub() || frame_is_built_);
|
||||
// Go through jump table if we need to handle condition, build frame, or
|
||||
// restore caller doubles.
|
||||
if (condition == al && frame_is_built_ &&
|
||||
!info()->saves_caller_doubles()) {
|
||||
DeoptComment(reason);
|
||||
DeoptComment(deopt_info);
|
||||
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type,
|
||||
!frame_is_built_);
|
||||
// We often have several deopts to the same entry, reuse the last
|
||||
// jump entry if this is the case.
|
||||
|
@ -132,6 +132,9 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
saved_registers.Combine(fp);
|
||||
__ PushCPURegList(saved_registers);
|
||||
|
||||
__ Mov(x3, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
|
||||
__ Str(fp, MemOperand(x3));
|
||||
|
||||
const int kSavedRegistersAreaSize =
|
||||
(saved_registers.Count() * kXRegSize) +
|
||||
(saved_fp_registers.Count() * kDRegSize);
|
||||
|
@ -856,7 +856,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
__ Bind(&table_entry->label);
|
||||
|
||||
Address entry = table_entry->address;
|
||||
DeoptComment(table_entry->reason);
|
||||
DeoptComment(table_entry->deopt_info);
|
||||
|
||||
// Second-level deopt table entries are contiguous and small, so instead
|
||||
// of loading the full, absolute address of each one, load the base
|
||||
@ -1057,18 +1057,18 @@ void LCodeGen::DeoptimizeBranch(
|
||||
__ Bind(&dont_trap);
|
||||
}
|
||||
|
||||
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
Deoptimizer::DeoptInfo deopt_info(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
DCHECK(info()->IsStub() || frame_is_built_);
|
||||
// Go through jump table if we need to build frame, or restore caller doubles.
|
||||
if (branch_type == always &&
|
||||
frame_is_built_ && !info()->saves_caller_doubles()) {
|
||||
DeoptComment(reason);
|
||||
DeoptComment(deopt_info);
|
||||
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
Deoptimizer::JumpTableEntry* table_entry =
|
||||
new (zone()) Deoptimizer::JumpTableEntry(entry, reason, bailout_type,
|
||||
!frame_is_built_);
|
||||
new (zone()) Deoptimizer::JumpTableEntry(
|
||||
entry, deopt_info, bailout_type, !frame_is_built_);
|
||||
// We often have several deopts to the same entry, reuse the last
|
||||
// jump entry if this is the case.
|
||||
if (jump_table_.is_empty() ||
|
||||
|
@ -36,6 +36,15 @@ void CodeDisableOptEventRecord::UpdateCodeMap(CodeMap* code_map) {
|
||||
}
|
||||
|
||||
|
||||
void CodeDeoptEventRecord::UpdateCodeMap(CodeMap* code_map) {
|
||||
CodeEntry* entry = code_map->FindEntry(start);
|
||||
if (entry != NULL) {
|
||||
entry->set_deopt_reason(deopt_reason);
|
||||
entry->set_deopt_location(raw_position);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void SharedFunctionInfoMoveEventRecord::UpdateCodeMap(CodeMap* code_map) {
|
||||
code_map->MoveCode(from, to);
|
||||
}
|
||||
|
@ -7,6 +7,7 @@
|
||||
#include "src/cpu-profiler-inl.h"
|
||||
|
||||
#include "src/compiler.h"
|
||||
#include "src/deoptimizer.h"
|
||||
#include "src/frames-inl.h"
|
||||
#include "src/hashmap.h"
|
||||
#include "src/log-inl.h"
|
||||
@ -38,6 +39,19 @@ void ProfilerEventsProcessor::Enqueue(const CodeEventsContainer& event) {
|
||||
}
|
||||
|
||||
|
||||
void ProfilerEventsProcessor::AddDeoptStack(Isolate* isolate, Address from,
|
||||
int fp_to_sp_delta) {
|
||||
TickSampleEventRecord record(last_code_event_id_);
|
||||
RegisterState regs;
|
||||
Address fp = isolate->c_entry_fp(isolate->thread_local_top());
|
||||
regs.sp = fp - fp_to_sp_delta;
|
||||
regs.fp = fp;
|
||||
regs.pc = from;
|
||||
record.sample.Init(isolate, regs, TickSample::kSkipCEntryFrame);
|
||||
ticks_from_vm_buffer_.Enqueue(record);
|
||||
}
|
||||
|
||||
|
||||
void ProfilerEventsProcessor::AddCurrentStack(Isolate* isolate) {
|
||||
TickSampleEventRecord record(last_code_event_id_);
|
||||
RegisterState regs;
|
||||
@ -329,6 +343,19 @@ void CpuProfiler::CodeDisableOptEvent(Code* code, SharedFunctionInfo* shared) {
|
||||
}
|
||||
|
||||
|
||||
void CpuProfiler::CodeDeoptEvent(Code* code, int bailout_id, Address pc,
|
||||
int fp_to_sp_delta) {
|
||||
CodeEventsContainer evt_rec(CodeEventRecord::CODE_DEOPT);
|
||||
CodeDeoptEventRecord* rec = &evt_rec.CodeDeoptEventRecord_;
|
||||
Deoptimizer::DeoptInfo info = Deoptimizer::GetDeoptInfo(code, bailout_id);
|
||||
rec->start = code->address();
|
||||
rec->deopt_reason = Deoptimizer::GetDeoptReason(info.deopt_reason);
|
||||
rec->raw_position = info.raw_position;
|
||||
processor_->Enqueue(evt_rec);
|
||||
processor_->AddDeoptStack(isolate_, pc, fp_to_sp_delta);
|
||||
}
|
||||
|
||||
|
||||
void CpuProfiler::CodeDeleteEvent(Address from) {
|
||||
}
|
||||
|
||||
|
@ -23,12 +23,13 @@ class CpuProfile;
|
||||
class CpuProfilesCollection;
|
||||
class ProfileGenerator;
|
||||
|
||||
#define CODE_EVENTS_TYPE_LIST(V) \
|
||||
V(CODE_CREATION, CodeCreateEventRecord) \
|
||||
V(CODE_MOVE, CodeMoveEventRecord) \
|
||||
V(CODE_DISABLE_OPT, CodeDisableOptEventRecord) \
|
||||
V(SHARED_FUNC_MOVE, SharedFunctionInfoMoveEventRecord) \
|
||||
V(REPORT_BUILTIN, ReportBuiltinEventRecord)
|
||||
#define CODE_EVENTS_TYPE_LIST(V) \
|
||||
V(CODE_CREATION, CodeCreateEventRecord) \
|
||||
V(CODE_MOVE, CodeMoveEventRecord) \
|
||||
V(CODE_DISABLE_OPT, CodeDisableOptEventRecord) \
|
||||
V(CODE_DEOPT, CodeDeoptEventRecord) \
|
||||
V(SHARED_FUNC_MOVE, SharedFunctionInfoMoveEventRecord) \
|
||||
V(REPORT_BUILTIN, ReportBuiltinEventRecord)
|
||||
|
||||
|
||||
class CodeEventRecord {
|
||||
@ -75,6 +76,16 @@ class CodeDisableOptEventRecord : public CodeEventRecord {
|
||||
};
|
||||
|
||||
|
||||
class CodeDeoptEventRecord : public CodeEventRecord {
|
||||
public:
|
||||
Address start;
|
||||
const char* deopt_reason;
|
||||
int raw_position;
|
||||
|
||||
INLINE(void UpdateCodeMap(CodeMap* code_map));
|
||||
};
|
||||
|
||||
|
||||
class SharedFunctionInfoMoveEventRecord : public CodeEventRecord {
|
||||
public:
|
||||
Address from;
|
||||
@ -137,6 +148,7 @@ class ProfilerEventsProcessor : public base::Thread {
|
||||
|
||||
// Puts current stack into tick sample events buffer.
|
||||
void AddCurrentStack(Isolate* isolate);
|
||||
void AddDeoptStack(Isolate* isolate, Address from, int fp_to_sp_delta);
|
||||
|
||||
// Tick sample events are filled directly in the buffer of the circular
|
||||
// queue (because the structure is of fixed width, but usually not all
|
||||
@ -233,6 +245,8 @@ class CpuProfiler : public CodeEventListener {
|
||||
virtual void CodeMovingGCEvent() {}
|
||||
virtual void CodeMoveEvent(Address from, Address to);
|
||||
virtual void CodeDisableOptEvent(Code* code, SharedFunctionInfo* shared);
|
||||
virtual void CodeDeoptEvent(Code* code, int bailout_id, Address pc,
|
||||
int fp_to_sp_delta);
|
||||
virtual void CodeDeleteEvent(Address from);
|
||||
virtual void GetterCallbackEvent(Name* name, Address entry_point);
|
||||
virtual void RegExpCodeCreateEvent(Code* code, String* source);
|
||||
|
@ -592,7 +592,6 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
|
||||
}
|
||||
}
|
||||
compiled_code_ = FindOptimizedCode(function, optimized_code);
|
||||
|
||||
#if DEBUG
|
||||
DCHECK(compiled_code_ != NULL);
|
||||
if (type == EAGER || type == SOFT || type == LAZY) {
|
||||
@ -609,6 +608,10 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
|
||||
CHECK(AllowHeapAllocation::IsAllowed());
|
||||
disallow_heap_allocation_ = new DisallowHeapAllocation();
|
||||
#endif // DEBUG
|
||||
if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
|
||||
PROFILE(isolate_, CodeDeoptEvent(compiled_code_, bailout_id_, from_,
|
||||
fp_to_sp_delta_));
|
||||
}
|
||||
unsigned size = ComputeInputFrameSize();
|
||||
input_ = new(size) FrameDescription(size, function);
|
||||
input_->SetFrameType(frame_type);
|
||||
@ -747,11 +750,6 @@ int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
|
||||
// We rely on this function not causing a GC. It is called from generated code
|
||||
// without having a real stack frame in place.
|
||||
void Deoptimizer::DoComputeOutputFrames() {
|
||||
// Print some helpful diagnostic information.
|
||||
if (FLAG_log_timer_events &&
|
||||
compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
|
||||
LOG(isolate(), CodeDeoptEvent(compiled_code_));
|
||||
}
|
||||
base::ElapsedTimer timer;
|
||||
|
||||
// Determine basic deoptimization information. The optimized frame is
|
||||
@ -3637,4 +3635,36 @@ const char* Deoptimizer::GetDeoptReason(DeoptReason deopt_reason) {
|
||||
#undef DEOPT_MESSAGES_TEXTS
|
||||
return deopt_messages_[deopt_reason];
|
||||
}
|
||||
|
||||
|
||||
Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code* code, int bailout_id) {
|
||||
int last_position = 0;
|
||||
Isolate* isolate = code->GetIsolate();
|
||||
Deoptimizer::DeoptReason last_reason = Deoptimizer::kNoReason;
|
||||
int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) |
|
||||
RelocInfo::ModeMask(RelocInfo::POSITION) |
|
||||
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
|
||||
for (RelocIterator it(code, mask); !it.done(); it.next()) {
|
||||
RelocInfo* info = it.rinfo();
|
||||
if (info->rmode() == RelocInfo::POSITION) {
|
||||
last_position = static_cast<int>(info->data());
|
||||
} else if (info->rmode() == RelocInfo::DEOPT_REASON) {
|
||||
last_reason = static_cast<Deoptimizer::DeoptReason>(info->data());
|
||||
} else if (last_reason != Deoptimizer::kNoReason) {
|
||||
if ((bailout_id ==
|
||||
Deoptimizer::GetDeoptimizationId(isolate, info->target_address(),
|
||||
Deoptimizer::EAGER)) ||
|
||||
(bailout_id ==
|
||||
Deoptimizer::GetDeoptimizationId(isolate, info->target_address(),
|
||||
Deoptimizer::SOFT)) ||
|
||||
(bailout_id ==
|
||||
Deoptimizer::GetDeoptimizationId(isolate, info->target_address(),
|
||||
Deoptimizer::LAZY))) {
|
||||
CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
|
||||
return DeoptInfo(last_position, NULL, last_reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
return DeoptInfo(0, NULL, Deoptimizer::kNoReason);
|
||||
}
|
||||
} } // namespace v8::internal
|
||||
|
@ -184,41 +184,43 @@ class Deoptimizer : public Malloced {
|
||||
|
||||
static const char* GetDeoptReason(DeoptReason deopt_reason);
|
||||
|
||||
struct Reason {
|
||||
Reason(int r, const char* m, DeoptReason d)
|
||||
struct DeoptInfo {
|
||||
DeoptInfo(int r, const char* m, DeoptReason d)
|
||||
: raw_position(r), mnemonic(m), deopt_reason(d) {}
|
||||
|
||||
bool operator==(const Reason& other) const {
|
||||
bool operator==(const DeoptInfo& other) const {
|
||||
return raw_position == other.raw_position &&
|
||||
CStringEquals(mnemonic, other.mnemonic) &&
|
||||
deopt_reason == other.deopt_reason;
|
||||
}
|
||||
|
||||
bool operator!=(const Reason& other) const { return !(*this == other); }
|
||||
bool operator!=(const DeoptInfo& other) const { return !(*this == other); }
|
||||
|
||||
int raw_position;
|
||||
const char* mnemonic;
|
||||
DeoptReason deopt_reason;
|
||||
};
|
||||
|
||||
static DeoptInfo GetDeoptInfo(Code* code, int bailout_id);
|
||||
|
||||
struct JumpTableEntry : public ZoneObject {
|
||||
inline JumpTableEntry(Address entry, const Reason& the_reason,
|
||||
inline JumpTableEntry(Address entry, const DeoptInfo& deopt_info,
|
||||
Deoptimizer::BailoutType type, bool frame)
|
||||
: label(),
|
||||
address(entry),
|
||||
reason(the_reason),
|
||||
deopt_info(deopt_info),
|
||||
bailout_type(type),
|
||||
needs_frame(frame) {}
|
||||
|
||||
bool IsEquivalentTo(const JumpTableEntry& other) const {
|
||||
return address == other.address && bailout_type == other.bailout_type &&
|
||||
needs_frame == other.needs_frame &&
|
||||
(!FLAG_trace_deopt || reason == other.reason);
|
||||
(!FLAG_trace_deopt || deopt_info == other.deopt_info);
|
||||
}
|
||||
|
||||
Label label;
|
||||
Address address;
|
||||
Reason reason;
|
||||
DeoptInfo deopt_info;
|
||||
Deoptimizer::BailoutType bailout_type;
|
||||
bool needs_frame;
|
||||
};
|
||||
|
@ -244,6 +244,9 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
|
||||
__ pushad();
|
||||
|
||||
ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
|
||||
__ mov(Operand::StaticVariable(c_entry_fp_address), ebp);
|
||||
|
||||
const int kSavedRegistersAreaSize = kNumberOfRegisters * kPointerSize +
|
||||
kDoubleRegsSize;
|
||||
|
||||
|
@ -384,7 +384,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
|
||||
__ bind(&table_entry->label);
|
||||
Address entry = table_entry->address;
|
||||
DeoptComment(table_entry->reason);
|
||||
DeoptComment(table_entry->deopt_info);
|
||||
if (table_entry->needs_frame) {
|
||||
DCHECK(!info()->saves_caller_doubles());
|
||||
__ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
|
||||
@ -861,14 +861,14 @@ void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
Deoptimizer::DeoptInfo deopt_info(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
DCHECK(info()->IsStub() || frame_is_built_);
|
||||
if (cc == no_condition && frame_is_built_) {
|
||||
DeoptComment(reason);
|
||||
DeoptComment(deopt_info);
|
||||
__ call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type,
|
||||
!frame_is_built_);
|
||||
// We often have several deopts to the same entry, reuse the last
|
||||
// jump entry if this is the case.
|
||||
|
@ -152,8 +152,8 @@ void LCodeGenBase::Comment(const char* format, ...) {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGenBase::DeoptComment(const Deoptimizer::Reason& reason) {
|
||||
masm()->RecordDeoptReason(reason.deopt_reason, reason.raw_position);
|
||||
void LCodeGenBase::DeoptComment(const Deoptimizer::DeoptInfo& deopt_info) {
|
||||
masm()->RecordDeoptReason(deopt_info.deopt_reason, deopt_info.raw_position);
|
||||
}
|
||||
|
||||
|
||||
|
@ -35,7 +35,7 @@ class LCodeGenBase BASE_EMBEDDED {
|
||||
HGraph* graph() const;
|
||||
|
||||
void FPRINTF_CHECKING Comment(const char* format, ...);
|
||||
void DeoptComment(const Deoptimizer::Reason& reason);
|
||||
void DeoptComment(const Deoptimizer::DeoptInfo& deopt_info);
|
||||
|
||||
bool GenerateBody();
|
||||
virtual void GenerateBodyInstructionPre(LInstruction* instr) {}
|
||||
|
@ -911,9 +911,10 @@ void Logger::SharedLibraryEvent(const std::string& library_path,
|
||||
}
|
||||
|
||||
|
||||
void Logger::CodeDeoptEvent(Code* code) {
|
||||
if (!log_->IsEnabled()) return;
|
||||
DCHECK(FLAG_log_internal_timer_events);
|
||||
void Logger::CodeDeoptEvent(Code* code, int bailout_id, Address from,
|
||||
int fp_to_sp_delta) {
|
||||
PROFILER_LOG(CodeDeoptEvent(code, bailout_id, from, fp_to_sp_delta));
|
||||
if (!log_->IsEnabled() || !FLAG_log_internal_timer_events) return;
|
||||
Log::MessageBuilder msg(log_);
|
||||
int since_epoch = static_cast<int>(timer_.Elapsed().InMicroseconds());
|
||||
msg.Append("code-deopt,%ld,%d", since_epoch, code->CodeSize());
|
||||
|
@ -292,7 +292,8 @@ class Logger {
|
||||
uintptr_t start,
|
||||
uintptr_t end);
|
||||
|
||||
void CodeDeoptEvent(Code* code);
|
||||
void CodeDeoptEvent(Code* code, int bailout_id, Address from,
|
||||
int fp_to_sp_delta);
|
||||
void CurrentTimeEvent();
|
||||
|
||||
void TimerEvent(StartEnd se, const char* name);
|
||||
|
@ -162,6 +162,9 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
}
|
||||
}
|
||||
|
||||
__ li(a2, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
|
||||
__ sw(fp, MemOperand(a2));
|
||||
|
||||
const int kSavedRegistersAreaSize =
|
||||
(kNumberOfRegisters * kPointerSize) + kDoubleRegsSize;
|
||||
|
||||
|
@ -337,7 +337,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
|
||||
DCHECK(table_entry->bailout_type == jump_table_[0].bailout_type);
|
||||
Address entry = table_entry->address;
|
||||
DeoptComment(table_entry->reason);
|
||||
DeoptComment(table_entry->deopt_info);
|
||||
|
||||
// Second-level deopt table entries are contiguous and small, so instead
|
||||
// of loading the full, absolute address of each one, load an immediate
|
||||
@ -856,17 +856,17 @@ void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
|
||||
__ bind(&skip);
|
||||
}
|
||||
|
||||
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
Deoptimizer::DeoptInfo deopt_info(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
DCHECK(info()->IsStub() || frame_is_built_);
|
||||
// Go through jump table if we need to handle condition, build frame, or
|
||||
// restore caller doubles.
|
||||
if (condition == al && frame_is_built_ &&
|
||||
!info()->saves_caller_doubles()) {
|
||||
DeoptComment(reason);
|
||||
DeoptComment(deopt_info);
|
||||
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
|
||||
} else {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type,
|
||||
!frame_is_built_);
|
||||
// We often have several deopts to the same entry, reuse the last
|
||||
// jump entry if this is the case.
|
||||
|
@ -309,7 +309,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
|
||||
__ bind(&table_entry->label);
|
||||
Address entry = table_entry->address;
|
||||
DeoptComment(table_entry->reason);
|
||||
DeoptComment(table_entry->deopt_info);
|
||||
__ li(t9, Operand(ExternalReference::ForDeoptEntry(entry)));
|
||||
if (table_entry->needs_frame) {
|
||||
DCHECK(!info()->saves_caller_doubles());
|
||||
@ -806,17 +806,17 @@ void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
|
||||
__ bind(&skip);
|
||||
}
|
||||
|
||||
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
Deoptimizer::DeoptInfo deopt_info(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
DCHECK(info()->IsStub() || frame_is_built_);
|
||||
// Go through jump table if we need to handle condition, build frame, or
|
||||
// restore caller doubles.
|
||||
if (condition == al && frame_is_built_ &&
|
||||
!info()->saves_caller_doubles()) {
|
||||
DeoptComment(reason);
|
||||
DeoptComment(deopt_info);
|
||||
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
|
||||
} else {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type,
|
||||
!frame_is_built_);
|
||||
// We often have several deopts to the same entry, reuse the last
|
||||
// jump entry if this is the case.
|
||||
|
@ -11084,30 +11084,10 @@ Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) {
|
||||
|
||||
|
||||
void Code::PrintDeoptLocation(FILE* out, int bailout_id) {
|
||||
int last_position = 0;
|
||||
Deoptimizer::DeoptReason last_reason = Deoptimizer::kNoReason;
|
||||
int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) |
|
||||
RelocInfo::ModeMask(RelocInfo::POSITION) |
|
||||
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
|
||||
for (RelocIterator it(this, mask); !it.done(); it.next()) {
|
||||
RelocInfo* info = it.rinfo();
|
||||
if (info->rmode() == RelocInfo::POSITION) {
|
||||
last_position = static_cast<int>(info->data());
|
||||
} else if (info->rmode() == RelocInfo::DEOPT_REASON) {
|
||||
last_reason = static_cast<Deoptimizer::DeoptReason>(info->data());
|
||||
} else if (last_reason != Deoptimizer::kNoReason) {
|
||||
if ((bailout_id == Deoptimizer::GetDeoptimizationId(
|
||||
GetIsolate(), info->target_address(), Deoptimizer::EAGER)) ||
|
||||
(bailout_id == Deoptimizer::GetDeoptimizationId(
|
||||
GetIsolate(), info->target_address(), Deoptimizer::SOFT)) ||
|
||||
(bailout_id == Deoptimizer::GetDeoptimizationId(
|
||||
GetIsolate(), info->target_address(), Deoptimizer::LAZY))) {
|
||||
CHECK(RelocInfo::IsRuntimeEntry(info->rmode()));
|
||||
PrintF(out, " ;;; deoptimize at %d: %s\n", last_position,
|
||||
Deoptimizer::GetDeoptReason(last_reason));
|
||||
return;
|
||||
}
|
||||
}
|
||||
Deoptimizer::DeoptInfo info = Deoptimizer::GetDeoptInfo(this, bailout_id);
|
||||
if (info.deopt_reason != Deoptimizer::kNoReason || info.raw_position != 0) {
|
||||
PrintF(out, " ;;; deoptimize at %d: %s\n", info.raw_position,
|
||||
Deoptimizer::GetDeoptReason(info.deopt_reason));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -327,7 +327,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
|
||||
DCHECK_EQ(jump_table_[0].bailout_type, table_entry->bailout_type);
|
||||
Address entry = table_entry->address;
|
||||
DeoptComment(table_entry->reason);
|
||||
DeoptComment(table_entry->deopt_info);
|
||||
|
||||
// Second-level deopt table entries are contiguous and small, so instead
|
||||
// of loading the full, absolute address of each one, load an immediate
|
||||
@ -812,16 +812,16 @@ void LCodeGen::DeoptimizeIf(Condition cond, LInstruction* instr,
|
||||
__ stop("trap_on_deopt", cond, kDefaultStopCode, cr);
|
||||
}
|
||||
|
||||
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
Deoptimizer::DeoptInfo deopt_info(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
DCHECK(info()->IsStub() || frame_is_built_);
|
||||
// Go through jump table if we need to handle condition, build frame, or
|
||||
// restore caller doubles.
|
||||
if (cond == al && frame_is_built_ && !info()->saves_caller_doubles()) {
|
||||
DeoptComment(reason);
|
||||
DeoptComment(deopt_info);
|
||||
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type,
|
||||
!frame_is_built_);
|
||||
// We often have several deopts to the same entry, reuse the last
|
||||
// jump entry if this is the case.
|
||||
|
@ -25,6 +25,8 @@ CodeEntry::CodeEntry(Logger::LogEventsAndTags tag, const char* name,
|
||||
script_id_(v8::UnboundScript::kNoScriptId),
|
||||
no_frame_ranges_(NULL),
|
||||
bailout_reason_(kEmptyBailoutReason),
|
||||
deopt_reason_(kEmptyBailoutReason),
|
||||
deopt_location_(0),
|
||||
line_info_(line_info),
|
||||
instruction_start_(instruction_start) {}
|
||||
|
||||
|
@ -90,6 +90,10 @@ class CodeEntry {
|
||||
void set_bailout_reason(const char* bailout_reason) {
|
||||
bailout_reason_ = bailout_reason;
|
||||
}
|
||||
void set_deopt_reason(const char* deopt_reason) {
|
||||
deopt_reason_ = deopt_reason;
|
||||
}
|
||||
void set_deopt_location(int location) { deopt_location_ = location; }
|
||||
const char* bailout_reason() const { return bailout_reason_; }
|
||||
|
||||
static inline bool is_js_function_tag(Logger::LogEventsAndTags tag);
|
||||
@ -130,6 +134,8 @@ class CodeEntry {
|
||||
int script_id_;
|
||||
List<OffsetRange>* no_frame_ranges_;
|
||||
const char* bailout_reason_;
|
||||
const char* deopt_reason_;
|
||||
int deopt_location_;
|
||||
JITLineInfoTable* line_info_;
|
||||
Address instruction_start_;
|
||||
|
||||
|
@ -160,6 +160,8 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
const int kSavedRegistersAreaSize = kNumberOfRegisters * kRegisterSize +
|
||||
kDoubleRegsSize;
|
||||
|
||||
__ Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()), rbp);
|
||||
|
||||
// We use this to keep the value of the fifth argument temporarily.
|
||||
// Unfortunately we can't store it directly in r8 (used for passing
|
||||
// this on linux), since it is another parameter passing register on windows.
|
||||
|
@ -304,7 +304,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
|
||||
__ bind(&table_entry->label);
|
||||
Address entry = table_entry->address;
|
||||
DeoptComment(table_entry->reason);
|
||||
DeoptComment(table_entry->deopt_info);
|
||||
if (table_entry->needs_frame) {
|
||||
DCHECK(!info()->saves_caller_doubles());
|
||||
__ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
|
||||
@ -768,17 +768,17 @@ void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
Deoptimizer::DeoptInfo deopt_info(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
DCHECK(info()->IsStub() || frame_is_built_);
|
||||
// Go through jump table if we need to handle condition, build frame, or
|
||||
// restore caller doubles.
|
||||
if (cc == no_condition && frame_is_built_ &&
|
||||
!info()->saves_caller_doubles()) {
|
||||
DeoptComment(reason);
|
||||
DeoptComment(deopt_info);
|
||||
__ call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type,
|
||||
!frame_is_built_);
|
||||
// We often have several deopts to the same entry, reuse the last
|
||||
// jump entry if this is the case.
|
||||
|
@ -381,7 +381,7 @@ bool LCodeGen::GenerateJumpTable() {
|
||||
Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
|
||||
__ bind(&table_entry->label);
|
||||
Address entry = table_entry->address;
|
||||
DeoptComment(table_entry->reason);
|
||||
DeoptComment(table_entry->deopt_info);
|
||||
if (table_entry->needs_frame) {
|
||||
DCHECK(!info()->saves_caller_doubles());
|
||||
__ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
|
||||
@ -1143,14 +1143,14 @@ void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
Deoptimizer::DeoptInfo deopt_info(instr->hydrogen_value()->position().raw(),
|
||||
instr->Mnemonic(), deopt_reason);
|
||||
DCHECK(info()->IsStub() || frame_is_built_);
|
||||
if (cc == no_condition && frame_is_built_) {
|
||||
DeoptComment(reason);
|
||||
DeoptComment(deopt_info);
|
||||
__ call(entry, RelocInfo::RUNTIME_ENTRY);
|
||||
} else {
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
|
||||
Deoptimizer::JumpTableEntry table_entry(entry, deopt_info, bailout_type,
|
||||
!frame_is_built_);
|
||||
// We often have several deopts to the same entry, reuse the last
|
||||
// jump entry if this is the case.
|
||||
|
@ -493,6 +493,17 @@ static void CheckSimpleBranch(v8::Isolate* isolate,
|
||||
}
|
||||
|
||||
|
||||
static const v8::CpuProfileNode* GetSimpleBranch(v8::Isolate* isolate,
|
||||
const v8::CpuProfileNode* node,
|
||||
const char* names[],
|
||||
int length) {
|
||||
for (int i = 0; i < length; i++) {
|
||||
node = GetChild(isolate, node, names[i]);
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
|
||||
static const char* cpu_profiler_test_source = "function loop(timeout) {\n"
|
||||
" this.mmm = 0;\n"
|
||||
" var start = Date.now();\n"
|
||||
@ -1706,3 +1717,48 @@ TEST(DontStopOnFinishedProfileDelete) {
|
||||
outer_profile = NULL;
|
||||
CHECK_EQ(0, iprofiler->GetProfilesCount());
|
||||
}
|
||||
|
||||
|
||||
static const char* collect_deopt_events_test_source =
|
||||
"function opt_function(value) {\n"
|
||||
" return value / 10;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"function test(value) {\n"
|
||||
" return opt_function(value);\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"startProfiling();\n"
|
||||
"\n"
|
||||
"for (var i = 0; i < 10; ++i) test(10);\n"
|
||||
"\n"
|
||||
"%OptimizeFunctionOnNextCall(opt_function)\n"
|
||||
"\n"
|
||||
"for (var i = 0; i < 10; ++i) test(10);\n"
|
||||
"\n"
|
||||
"for (var i = 0; i < 10; ++i) test(undefined);\n"
|
||||
"\n"
|
||||
"stopProfiling();\n"
|
||||
"\n";
|
||||
|
||||
|
||||
TEST(CollectDeoptEvents) {
|
||||
if (!CcTest::i_isolate()->use_crankshaft() || i::FLAG_always_opt) return;
|
||||
i::FLAG_allow_natives_syntax = true;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
v8::Local<v8::Context> env = CcTest::NewContext(PROFILER_EXTENSION);
|
||||
v8::Context::Scope context_scope(env);
|
||||
v8::Isolate* isolate = env->GetIsolate();
|
||||
v8::CpuProfiler* profiler = isolate->GetCpuProfiler();
|
||||
i::CpuProfiler* iprofiler = reinterpret_cast<i::CpuProfiler*>(profiler);
|
||||
|
||||
v8::Script::Compile(v8_str(collect_deopt_events_test_source))->Run();
|
||||
i::CpuProfile* iprofile = iprofiler->GetProfile(0);
|
||||
iprofile->Print();
|
||||
v8::CpuProfile* profile = reinterpret_cast<v8::CpuProfile*>(iprofile);
|
||||
const char* branch[] = {"", "test", "opt_function"};
|
||||
const v8::CpuProfileNode* opt_function = GetSimpleBranch(
|
||||
env->GetIsolate(), profile->GetTopDownRoot(), branch, arraysize(branch));
|
||||
CHECK(opt_function);
|
||||
iprofiler->DeleteProfile(iprofile);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user