From b65b6d735f3b26fb6d6f9fb60aacca7557bc0f6a Mon Sep 17 00:00:00 2001 From: "danno@chromium.org" Date: Tue, 14 May 2013 11:45:33 +0000 Subject: [PATCH] Ensure that soft-deopts don't count against opt_count This makes sure that Crankshaft doesn't disable optimization to early on hot functions that still contain unexecuted code without type information. R=jkummerow@chromium.org Review URL: https://codereview.chromium.org/14738009 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@14658 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/builtins-arm.cc | 5 ++ src/arm/deoptimizer-arm.cc | 17 ++++--- src/arm/lithium-codegen-arm.cc | 42 ++++++++++----- src/arm/lithium-codegen-arm.h | 18 ++----- src/builtins.h | 3 ++ src/deoptimizer.cc | 58 +++++++++++---------- src/deoptimizer.h | 87 +++++++++++++++++++------------- src/disassembler.cc | 9 +++- src/ia32/builtins-ia32.cc | 5 ++ src/ia32/deoptimizer-ia32.cc | 15 ++++-- src/ia32/lithium-codegen-ia32.cc | 45 ++++++++++++----- src/ia32/lithium-codegen-ia32.h | 18 ++----- src/objects.cc | 15 +++--- src/runtime.cc | 3 +- src/x64/builtins-x64.cc | 5 ++ src/x64/deoptimizer-x64.cc | 16 ++++-- src/x64/lithium-codegen-x64.cc | 42 ++++++++++----- src/x64/lithium-codegen-x64.h | 19 ++----- 18 files changed, 256 insertions(+), 166 deletions(-) diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc index 3cc2797e94..121f3c2bc9 100644 --- a/src/arm/builtins-arm.cc +++ b/src/arm/builtins-arm.cc @@ -1340,6 +1340,11 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { } +void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { + Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); } diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc index c2ae5a01ea..5a0a04fcbf 100644 --- a/src/arm/deoptimizer-arm.cc +++ b/src/arm/deoptimizer-arm.cc @@ -544,9 +544,14 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, // Set the continuation for the topmost frame. if (is_topmost && bailout_type_ != DEBUGGER) { Builtins* builtins = isolate_->builtins(); - Code* continuation = (bailout_type_ == EAGER) - ? builtins->builtin(Builtins::kNotifyDeoptimized) - : builtins->builtin(Builtins::kNotifyLazyDeoptimized); + Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized); + if (bailout_type_ == LAZY) { + continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized); + } else if (bailout_type_ == SOFT) { + continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized); + } else { + ASSERT(bailout_type_ == EAGER); + } output_frame->SetContinuation( reinterpret_cast(continuation->entry())); } @@ -639,7 +644,7 @@ void Deoptimizer::EntryGenerator::Generate() { // Get the address of the location in the code object if possible (r3) (return // address for lazy deoptimization) and compute the fp-to-sp delta in // register r4. - if (type() == EAGER) { + if (type() == EAGER || type() == SOFT) { __ mov(r3, Operand::Zero()); // Correct one word for bailout id. __ add(r4, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize))); @@ -694,7 +699,7 @@ void Deoptimizer::EntryGenerator::Generate() { // Remove the bailout id, eventually return address, and the saved registers // from the stack. - if (type() == EAGER || type() == OSR) { + if (type() == EAGER || type() == SOFT || type() == OSR) { __ add(sp, sp, Operand(kSavedRegistersAreaSize + (1 * kPointerSize))); } else { __ add(sp, sp, Operand(kSavedRegistersAreaSize + (2 * kPointerSize))); @@ -813,7 +818,7 @@ void Deoptimizer::TableEntryGenerator::GeneratePrologue() { for (int i = 0; i < count(); i++) { int start = masm()->pc_offset(); USE(start); - if (type() == EAGER) { + if (type() == EAGER || type() == SOFT) { __ nop(); } else { // Emulate ia32 like call by pushing return address to stack. diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index 627bc607e7..4f5ab57b78 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -360,9 +360,7 @@ bool LCodeGen::GenerateDeoptJumpTable() { for (int i = 0; i < deopt_jump_table_.length(); i++) { __ bind(&deopt_jump_table_[i].label); Address entry = deopt_jump_table_[i].address; - bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt; - Deoptimizer::BailoutType type = - is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; + Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); if (id == Deoptimizer::kNotDeoptimizationEntry) { Comment(";;; jump table entry %d.", i); @@ -371,7 +369,7 @@ bool LCodeGen::GenerateDeoptJumpTable() { } if (deopt_jump_table_[i].needs_frame) { __ mov(ip, Operand(ExternalReference::ForDeoptEntry(entry))); - if (is_lazy_deopt) { + if (type == Deoptimizer::LAZY) { if (needs_frame_is_call.is_bound()) { __ b(&needs_frame_is_call); } else { @@ -404,7 +402,7 @@ bool LCodeGen::GenerateDeoptJumpTable() { } } } else { - if (is_lazy_deopt) { + if (type == Deoptimizer::LAZY) { __ mov(lr, Operand(pc), LeaveCC, al); __ mov(pc, Operand(ExternalReference::ForDeoptEntry(entry))); } else { @@ -833,14 +831,13 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, } -void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { +void LCodeGen::DeoptimizeIf(Condition cc, + LEnvironment* environment, + Deoptimizer::BailoutType bailout_type) { RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); ASSERT(environment->HasBeenRegistered()); int id = environment->deoptimization_index(); ASSERT(info()->IsOptimizing() || info()->IsStub()); - Deoptimizer::BailoutType bailout_type = info()->IsStub() - ? Deoptimizer::LAZY - : Deoptimizer::EAGER; Address entry = Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); if (entry == NULL) { @@ -873,9 +870,11 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { // jump entry if this is the case. if (deopt_jump_table_.is_empty() || (deopt_jump_table_.last().address != entry) || - (deopt_jump_table_.last().is_lazy_deopt != needs_lazy_deopt) || + (deopt_jump_table_.last().bailout_type != bailout_type) || (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { - JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt); + Deoptimizer::JumpTableEntry table_entry(entry, + bailout_type, + !frame_is_built_); deopt_jump_table_.Add(table_entry, zone()); } __ b(cc, &deopt_jump_table_.last().label); @@ -883,6 +882,21 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { } +void LCodeGen::DeoptimizeIf(Condition cc, + LEnvironment* environment) { + Deoptimizer::BailoutType bailout_type = info()->IsStub() + ? Deoptimizer::LAZY + : Deoptimizer::EAGER; + DeoptimizeIf(cc, environment, bailout_type); +} + + +void LCodeGen::SoftDeoptimize(LEnvironment* environment) { + ASSERT(!info()->IsStub()); + DeoptimizeIf(al, environment, Deoptimizer::SOFT); +} + + void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle code) { ZoneList > maps(1, zone()); int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); @@ -5722,7 +5736,11 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) { void LCodeGen::DoDeoptimize(LDeoptimize* instr) { - DeoptimizeIf(al, instr->environment()); + if (instr->hydrogen_value()->IsSoftDeoptimize()) { + SoftDeoptimize(instr->environment()); + } else { + DeoptimizeIf(al, instr->environment()); + } } diff --git a/src/arm/lithium-codegen-arm.h b/src/arm/lithium-codegen-arm.h index 294dcf2051..1a34169ebf 100644 --- a/src/arm/lithium-codegen-arm.h +++ b/src/arm/lithium-codegen-arm.h @@ -290,7 +290,11 @@ class LCodeGen BASE_EMBEDDED { void RegisterEnvironmentForDeoptimization(LEnvironment* environment, Safepoint::DeoptMode mode); + void DeoptimizeIf(Condition cc, + LEnvironment* environment, + Deoptimizer::BailoutType bailout_type); void DeoptimizeIf(Condition cc, LEnvironment* environment); + void SoftDeoptimize(LEnvironment* environment); void AddToTranslation(Translation* translation, LOperand* op, @@ -387,18 +391,6 @@ class LCodeGen BASE_EMBEDDED { Register scratch, LEnvironment* environment); - struct JumpTableEntry { - inline JumpTableEntry(Address entry, bool frame, bool is_lazy) - : label(), - address(entry), - needs_frame(frame), - is_lazy_deopt(is_lazy) { } - Label label; - Address address; - bool needs_frame; - bool is_lazy_deopt; - }; - void EnsureSpaceForLazyDeopt(); void DoLoadKeyedExternalArray(LLoadKeyed* instr); void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); @@ -416,7 +408,7 @@ class LCodeGen BASE_EMBEDDED { int current_instruction_; const ZoneList* instructions_; ZoneList deoptimizations_; - ZoneList deopt_jump_table_; + ZoneList deopt_jump_table_; ZoneList > deoptimization_literals_; ZoneList > prototype_maps_; ZoneList > transition_maps_; diff --git a/src/builtins.h b/src/builtins.h index 6fc17c45c0..58d1a8b147 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -107,6 +107,8 @@ enum BuiltinExtraArguments { Code::kNoExtraICState) \ V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \ Code::kNoExtraICState) \ + V(NotifySoftDeoptimized, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, \ Code::kNoExtraICState) \ V(NotifyStubFailure, BUILTIN, UNINITIALIZED, \ @@ -380,6 +382,7 @@ class Builtins { static void Generate_LazyCompile(MacroAssembler* masm); static void Generate_LazyRecompile(MacroAssembler* masm); static void Generate_NotifyDeoptimized(MacroAssembler* masm); + static void Generate_NotifySoftDeoptimized(MacroAssembler* masm); static void Generate_NotifyLazyDeoptimized(MacroAssembler* masm); static void Generate_NotifyOSR(MacroAssembler* masm); static void Generate_NotifyStubFailure(MacroAssembler* masm); diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc index 92a2af23e4..1bf27f17b4 100644 --- a/src/deoptimizer.cc +++ b/src/deoptimizer.cc @@ -50,22 +50,23 @@ static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) { DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator) : allocator_(allocator), - eager_deoptimization_entry_code_entries_(-1), - lazy_deoptimization_entry_code_entries_(-1), - eager_deoptimization_entry_code_(AllocateCodeChunk(allocator)), - lazy_deoptimization_entry_code_(AllocateCodeChunk(allocator)), current_(NULL), #ifdef ENABLE_DEBUGGER_SUPPORT deoptimized_frame_info_(NULL), #endif - deoptimizing_code_list_(NULL) { } + deoptimizing_code_list_(NULL) { + for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { + deopt_entry_code_entries_[i] = -1; + deopt_entry_code_[i] = AllocateCodeChunk(allocator); + } +} DeoptimizerData::~DeoptimizerData() { - allocator_->Free(eager_deoptimization_entry_code_); - eager_deoptimization_entry_code_ = NULL; - allocator_->Free(lazy_deoptimization_entry_code_); - lazy_deoptimization_entry_code_ = NULL; + for (int i = 0; i < Deoptimizer::kBailoutTypesWithCodeEntry; ++i) { + allocator_->Free(deopt_entry_code_[i]); + deopt_entry_code_[i] = NULL; + } DeoptimizingCodeListNode* current = deoptimizing_code_list_; while (current != NULL) { @@ -488,6 +489,7 @@ bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, StackFrame::Type frame_type) { switch (deopt_type) { case EAGER: + case SOFT: case LAZY: case DEBUGGER: return (frame_type == StackFrame::STUB) @@ -504,6 +506,7 @@ bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type, const char* Deoptimizer::MessageFor(BailoutType type) { switch (type) { case EAGER: + case SOFT: case LAZY: return "DEOPT"; case DEBUGGER: @@ -545,6 +548,13 @@ Deoptimizer::Deoptimizer(Isolate* isolate, } if (function != NULL && function->IsOptimized()) { function->shared()->increment_deopt_count(); + if (bailout_type_ == Deoptimizer::SOFT) { + // Soft deopts shouldn't count against the overall re-optimization count + // that can eventually lead to disabling optimization for a function. + int opt_count = function->shared()->opt_count(); + if (opt_count > 0) opt_count--; + function->shared()->set_opt_count(opt_count); + } } compiled_code_ = FindOptimizedCode(function, optimized_code); StackFrame::Type frame_type = function == NULL @@ -562,6 +572,7 @@ Deoptimizer::Deoptimizer(Isolate* isolate, Code* Deoptimizer::FindOptimizedCode(JSFunction* function, Code* optimized_code) { switch (bailout_type_) { + case Deoptimizer::SOFT: case Deoptimizer::EAGER: ASSERT(from_ == NULL); return function->code(); @@ -597,7 +608,9 @@ void Deoptimizer::Trace() { bailout_id_, reinterpret_cast(from_), fp_to_sp_delta_ - (2 * kPointerSize)); - if (bailout_type_ == EAGER) compiled_code_->PrintDeoptLocation(bailout_id_); + if (bailout_type_ == EAGER || bailout_type_ == SOFT) { + compiled_code_->PrintDeoptLocation(bailout_id_); + } } @@ -639,9 +652,8 @@ Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate, ASSERT(mode == CALCULATE_ENTRY_ADDRESS); } DeoptimizerData* data = isolate->deoptimizer_data(); - MemoryChunk* base = (type == EAGER) - ? data->eager_deoptimization_entry_code_ - : data->lazy_deoptimization_entry_code_; + ASSERT(type < kBailoutTypesWithCodeEntry); + MemoryChunk* base = data->deopt_entry_code_[type]; return base->area_start() + (id * table_entry_size_); } @@ -650,9 +662,7 @@ int Deoptimizer::GetDeoptimizationId(Isolate* isolate, Address addr, BailoutType type) { DeoptimizerData* data = isolate->deoptimizer_data(); - MemoryChunk* base = (type == EAGER) - ? data->eager_deoptimization_entry_code_ - : data->lazy_deoptimization_entry_code_; + MemoryChunk* base = data->deopt_entry_code_[type]; Address start = base->area_start(); if (base == NULL || addr < start || @@ -2206,11 +2216,9 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate, // cause us to emit relocation information for the external // references. This is fine because the deoptimizer's code section // isn't meant to be serialized at all. - ASSERT(type == EAGER || type == LAZY); + ASSERT(type == EAGER || type == SOFT || type == LAZY); DeoptimizerData* data = isolate->deoptimizer_data(); - int entry_count = (type == EAGER) - ? data->eager_deoptimization_entry_code_entries_ - : data->lazy_deoptimization_entry_code_entries_; + int entry_count = data->deopt_entry_code_entries_[type]; if (max_entry_id < entry_count) return; entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries); while (max_entry_id >= entry_count) entry_count *= 2; @@ -2223,9 +2231,7 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate, masm.GetCode(&desc); ASSERT(!RelocInfo::RequiresRelocation(desc)); - MemoryChunk* chunk = (type == EAGER) - ? data->eager_deoptimization_entry_code_ - : data->lazy_deoptimization_entry_code_; + MemoryChunk* chunk = data->deopt_entry_code_[type]; ASSERT(static_cast(Deoptimizer::GetMaxDeoptTableSize()) >= desc.instr_size); chunk->CommitArea(desc.instr_size); @@ -2233,11 +2239,7 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate, static_cast(desc.instr_size)); CPU::FlushICache(chunk->area_start(), desc.instr_size); - if (type == EAGER) { - data->eager_deoptimization_entry_code_entries_ = entry_count; - } else { - data->lazy_deoptimization_entry_code_entries_ = entry_count; - } + data->deopt_entry_code_entries_[type] = entry_count; } diff --git a/src/deoptimizer.h b/src/deoptimizer.h index edf6c504f1..d4d3c48698 100644 --- a/src/deoptimizer.h +++ b/src/deoptimizer.h @@ -98,53 +98,34 @@ class OptimizedFunctionFilter BASE_EMBEDDED { class Deoptimizer; -class DeoptimizerData { - public: - explicit DeoptimizerData(MemoryAllocator* allocator); - ~DeoptimizerData(); - -#ifdef ENABLE_DEBUGGER_SUPPORT - void Iterate(ObjectVisitor* v); -#endif - - Code* FindDeoptimizingCode(Address addr); - void RemoveDeoptimizingCode(Code* code); - - private: - MemoryAllocator* allocator_; - int eager_deoptimization_entry_code_entries_; - int lazy_deoptimization_entry_code_entries_; - MemoryChunk* eager_deoptimization_entry_code_; - MemoryChunk* lazy_deoptimization_entry_code_; - Deoptimizer* current_; - -#ifdef ENABLE_DEBUGGER_SUPPORT - DeoptimizedFrameInfo* deoptimized_frame_info_; -#endif - - // List of deoptimized code which still have references from active stack - // frames. These code objects are needed by the deoptimizer when deoptimizing - // a frame for which the code object for the function function has been - // changed from the code present when deoptimizing was done. - DeoptimizingCodeListNode* deoptimizing_code_list_; - - friend class Deoptimizer; - - DISALLOW_COPY_AND_ASSIGN(DeoptimizerData); -}; - - class Deoptimizer : public Malloced { public: enum BailoutType { EAGER, LAZY, + SOFT, OSR, // This last bailout type is not really a bailout, but used by the // debugger to deoptimize stack frames to allow inspection. DEBUGGER }; + static const int kBailoutTypesWithCodeEntry = SOFT + 1; + + struct JumpTableEntry { + inline JumpTableEntry(Address entry, + Deoptimizer::BailoutType type, + bool frame) + : label(), + address(entry), + bailout_type(type), + needs_frame(frame) { } + Label label; + Address address; + Deoptimizer::BailoutType bailout_type; + bool needs_frame; + }; + static bool TraceEnabledFor(BailoutType deopt_type, StackFrame::Type frame_type); static const char* MessageFor(BailoutType type); @@ -626,6 +607,40 @@ class FrameDescription { }; +class DeoptimizerData { + public: + explicit DeoptimizerData(MemoryAllocator* allocator); + ~DeoptimizerData(); + +#ifdef ENABLE_DEBUGGER_SUPPORT + void Iterate(ObjectVisitor* v); +#endif + + Code* FindDeoptimizingCode(Address addr); + void RemoveDeoptimizingCode(Code* code); + + private: + MemoryAllocator* allocator_; + int deopt_entry_code_entries_[Deoptimizer::kBailoutTypesWithCodeEntry]; + MemoryChunk* deopt_entry_code_[Deoptimizer::kBailoutTypesWithCodeEntry]; + Deoptimizer* current_; + +#ifdef ENABLE_DEBUGGER_SUPPORT + DeoptimizedFrameInfo* deoptimized_frame_info_; +#endif + + // List of deoptimized code which still have references from active stack + // frames. These code objects are needed by the deoptimizer when deoptimizing + // a frame for which the code object for the function function has been + // changed from the code present when deoptimizing was done. + DeoptimizingCodeListNode* deoptimizing_code_list_; + + friend class Deoptimizer; + + DISALLOW_COPY_AND_ASSIGN(DeoptimizerData); +}; + + class TranslationBuffer BASE_EMBEDDED { public: explicit TranslationBuffer(Zone* zone) : contents_(256, zone) { } diff --git a/src/disassembler.cc b/src/disassembler.cc index bbb1fb8d8e..e41734babd 100644 --- a/src/disassembler.cc +++ b/src/disassembler.cc @@ -293,7 +293,14 @@ static int DecodeIt(Isolate* isolate, addr, Deoptimizer::LAZY); if (id == Deoptimizer::kNotDeoptimizationEntry) { - out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode)); + id = Deoptimizer::GetDeoptimizationId(isolate, + addr, + Deoptimizer::SOFT); + if (id == Deoptimizer::kNotDeoptimizationEntry) { + out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode)); + } else { + out.AddFormatted(" ;; soft deoptimization bailout %d", id); + } } else { out.AddFormatted(" ;; lazy deoptimization bailout %d", id); } diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc index c6e10f4b41..2b45d7654d 100644 --- a/src/ia32/builtins-ia32.cc +++ b/src/ia32/builtins-ia32.cc @@ -657,6 +657,11 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { } +void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { + Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); } diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc index d1d9254094..21c7ff5dc1 100644 --- a/src/ia32/deoptimizer-ia32.cc +++ b/src/ia32/deoptimizer-ia32.cc @@ -659,9 +659,14 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, // Set the continuation for the topmost frame. if (is_topmost && bailout_type_ != DEBUGGER) { Builtins* builtins = isolate_->builtins(); - Code* continuation = (bailout_type_ == EAGER) - ? builtins->builtin(Builtins::kNotifyDeoptimized) - : builtins->builtin(Builtins::kNotifyLazyDeoptimized); + Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized); + if (bailout_type_ == LAZY) { + continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized); + } else if (bailout_type_ == SOFT) { + continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized); + } else { + ASSERT(bailout_type_ == EAGER); + } output_frame->SetContinuation( reinterpret_cast(continuation->entry())); } @@ -740,7 +745,7 @@ void Deoptimizer::EntryGenerator::Generate() { // Get the address of the location in the code object if possible // and compute the fp-to-sp delta in register edx. - if (type() == EAGER) { + if (type() == EAGER || type() == SOFT) { __ Set(ecx, Immediate(0)); __ lea(edx, Operand(esp, kSavedRegistersAreaSize + 1 * kPointerSize)); } else { @@ -793,7 +798,7 @@ void Deoptimizer::EntryGenerator::Generate() { __ fnclex(); // Remove the bailout id and the double registers from the stack. - if (type() == EAGER) { + if (type() == EAGER || type() == SOFT) { __ add(esp, Immediate(kDoubleRegsSize + kPointerSize)); } else { __ add(esp, Immediate(kDoubleRegsSize + 2 * kPointerSize)); diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index 3fbd044df5..7089c76224 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -385,9 +385,7 @@ bool LCodeGen::GenerateJumpTable() { for (int i = 0; i < jump_table_.length(); i++) { __ bind(&jump_table_[i].label); Address entry = jump_table_[i].address; - bool is_lazy_deopt = jump_table_[i].is_lazy_deopt; - Deoptimizer::BailoutType type = - is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; + Deoptimizer::BailoutType type = jump_table_[i].bailout_type; int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); if (id == Deoptimizer::kNotDeoptimizationEntry) { Comment(";;; jump table entry %d.", i); @@ -396,7 +394,7 @@ bool LCodeGen::GenerateJumpTable() { } if (jump_table_[i].needs_frame) { __ push(Immediate(ExternalReference::ForDeoptEntry(entry))); - if (is_lazy_deopt) { + if (type == Deoptimizer::LAZY) { if (needs_frame_is_call.is_bound()) { __ jmp(&needs_frame_is_call); } else { @@ -441,7 +439,7 @@ bool LCodeGen::GenerateJumpTable() { } } } else { - if (is_lazy_deopt) { + if (type == Deoptimizer::LAZY) { __ call(entry, RelocInfo::RUNTIME_ENTRY); } else { __ jmp(entry, RelocInfo::RUNTIME_ENTRY); @@ -893,16 +891,15 @@ void LCodeGen::RegisterEnvironmentForDeoptimization( } -void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { +void LCodeGen::DeoptimizeIf(Condition cc, + LEnvironment* environment, + Deoptimizer::BailoutType bailout_type) { RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); ASSERT(environment->HasBeenRegistered()); // It's an error to deoptimize with the x87 fp stack in use. ASSERT(x87_stack_depth_ == 0); int id = environment->deoptimization_index(); ASSERT(info()->IsOptimizing() || info()->IsStub()); - Deoptimizer::BailoutType bailout_type = info()->IsStub() - ? Deoptimizer::LAZY - : Deoptimizer::EAGER; Address entry = Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); if (entry == NULL) { @@ -948,9 +945,8 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { } ASSERT(info()->IsStub() || frame_is_built_); - bool needs_lazy_deopt = info()->IsStub(); if (cc == no_condition && frame_is_built_) { - if (needs_lazy_deopt) { + if (bailout_type == Deoptimizer::LAZY) { __ call(entry, RelocInfo::RUNTIME_ENTRY); } else { __ jmp(entry, RelocInfo::RUNTIME_ENTRY); @@ -961,8 +957,10 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { if (jump_table_.is_empty() || jump_table_.last().address != entry || jump_table_.last().needs_frame != !frame_is_built_ || - jump_table_.last().is_lazy_deopt != needs_lazy_deopt) { - JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt); + jump_table_.last().bailout_type != bailout_type) { + Deoptimizer::JumpTableEntry table_entry(entry, + bailout_type, + !frame_is_built_); jump_table_.Add(table_entry, zone()); } if (cc == no_condition) { @@ -974,6 +972,21 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { } +void LCodeGen::DeoptimizeIf(Condition cc, + LEnvironment* environment) { + Deoptimizer::BailoutType bailout_type = info()->IsStub() + ? Deoptimizer::LAZY + : Deoptimizer::EAGER; + DeoptimizeIf(cc, environment, bailout_type); +} + + +void LCodeGen::SoftDeoptimize(LEnvironment* environment) { + ASSERT(!info()->IsStub()); + DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT); +} + + void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle code) { ZoneList > maps(1, zone()); int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); @@ -6316,7 +6329,11 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) { void LCodeGen::DoDeoptimize(LDeoptimize* instr) { - DeoptimizeIf(no_condition, instr->environment()); + if (instr->hydrogen_value()->IsSoftDeoptimize()) { + SoftDeoptimize(instr->environment()); + } else { + DeoptimizeIf(no_condition, instr->environment()); + } } diff --git a/src/ia32/lithium-codegen-ia32.h b/src/ia32/lithium-codegen-ia32.h index 1fea25bdbf..9f8d4fd363 100644 --- a/src/ia32/lithium-codegen-ia32.h +++ b/src/ia32/lithium-codegen-ia32.h @@ -276,7 +276,11 @@ class LCodeGen BASE_EMBEDDED { void RegisterEnvironmentForDeoptimization(LEnvironment* environment, Safepoint::DeoptMode mode); + void DeoptimizeIf(Condition cc, + LEnvironment* environment, + Deoptimizer::BailoutType bailout_type); void DeoptimizeIf(Condition cc, LEnvironment* environment); + void SoftDeoptimize(LEnvironment* environment); void AddToTranslation(Translation* translation, LOperand* op, @@ -397,23 +401,11 @@ class LCodeGen BASE_EMBEDDED { MacroAssembler* const masm_; CompilationInfo* const info_; - struct JumpTableEntry { - inline JumpTableEntry(Address entry, bool frame, bool is_lazy) - : label(), - address(entry), - needs_frame(frame), - is_lazy_deopt(is_lazy) { } - Label label; - Address address; - bool needs_frame; - bool is_lazy_deopt; - }; - int current_block_; int current_instruction_; const ZoneList* instructions_; ZoneList deoptimizations_; - ZoneList jump_table_; + ZoneList jump_table_; ZoneList > deoptimization_literals_; ZoneList > prototype_maps_; ZoneList > transition_maps_; diff --git a/src/objects.cc b/src/objects.cc index 35abf27e6b..53774d788c 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -10147,12 +10147,15 @@ void Code::PrintDeoptLocation(int bailout_id) { RelocInfo* info = it.rinfo(); if (info->rmode() == RelocInfo::COMMENT) { last_comment = reinterpret_cast(info->data()); - } else if (last_comment != NULL && - bailout_id == Deoptimizer::GetDeoptimizationId( - GetIsolate(), info->target_address(), Deoptimizer::EAGER)) { - CHECK(RelocInfo::IsRuntimeEntry(info->rmode())); - PrintF(" %s\n", last_comment); - return; + } else if (last_comment != NULL) { + if ((bailout_id == Deoptimizer::GetDeoptimizationId( + GetIsolate(), info->target_address(), Deoptimizer::EAGER)) || + (bailout_id == Deoptimizer::GetDeoptimizationId( + GetIsolate(), info->target_address(), Deoptimizer::SOFT))) { + CHECK(RelocInfo::IsRuntimeEntry(info->rmode())); + PrintF(" %s\n", last_comment); + return; + } } } } diff --git a/src/runtime.cc b/src/runtime.cc index d3baaa440c..1875588825 100644 --- a/src/runtime.cc +++ b/src/runtime.cc @@ -7974,7 +7974,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) { RUNTIME_ASSERT(frame->function()->IsJSFunction()); Handle function(JSFunction::cast(frame->function()), isolate); Handle optimized_code(function->code()); - RUNTIME_ASSERT(type != Deoptimizer::EAGER || function->IsOptimized()); + RUNTIME_ASSERT((type != Deoptimizer::EAGER && + type != Deoptimizer::SOFT) || function->IsOptimized()); // Avoid doing too much work when running with --always-opt and keep // the optimized code around. diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index e7daa7f9d6..b1b99b473b 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -729,6 +729,11 @@ void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { } +void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { + Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); } diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc index 927496c458..0e2b49ed02 100644 --- a/src/x64/deoptimizer-x64.cc +++ b/src/x64/deoptimizer-x64.cc @@ -530,9 +530,15 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator, // Set the continuation for the topmost frame. if (is_topmost && bailout_type_ != DEBUGGER) { - Code* continuation = (bailout_type_ == EAGER) - ? isolate_->builtins()->builtin(Builtins::kNotifyDeoptimized) - : isolate_->builtins()->builtin(Builtins::kNotifyLazyDeoptimized); + Builtins* builtins = isolate_->builtins(); + Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized); + if (bailout_type_ == LAZY) { + continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized); + } else if (bailout_type_ == SOFT) { + continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized); + } else { + ASSERT(bailout_type_ == EAGER); + } output_frame->SetContinuation( reinterpret_cast(continuation->entry())); } @@ -618,7 +624,7 @@ void Deoptimizer::EntryGenerator::Generate() { // Get the address of the location in the code object if possible // and compute the fp-to-sp delta in register arg5. - if (type() == EAGER) { + if (type() == EAGER || type() == SOFT) { __ Set(arg_reg_4, 0); __ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize)); } else { @@ -669,7 +675,7 @@ void Deoptimizer::EntryGenerator::Generate() { } // Remove the bailout id from the stack. - if (type() == EAGER) { + if (type() == EAGER || type() == SOFT) { __ addq(rsp, Immediate(kPointerSize)); } else { __ addq(rsp, Immediate(2 * kPointerSize)); diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index 225d2cceb3..cc48382537 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -301,9 +301,7 @@ bool LCodeGen::GenerateJumpTable() { for (int i = 0; i < jump_table_.length(); i++) { __ bind(&jump_table_[i].label); Address entry = jump_table_[i].address; - bool is_lazy_deopt = jump_table_[i].is_lazy_deopt; - Deoptimizer::BailoutType type = - is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; + Deoptimizer::BailoutType type = jump_table_[i].bailout_type; int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); if (id == Deoptimizer::kNotDeoptimizationEntry) { Comment(";;; jump table entry %d.", i); @@ -312,7 +310,7 @@ bool LCodeGen::GenerateJumpTable() { } if (jump_table_[i].needs_frame) { __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); - if (is_lazy_deopt) { + if (type == Deoptimizer::LAZY) { if (needs_frame_is_call.is_bound()) { __ jmp(&needs_frame_is_call); } else { @@ -348,7 +346,7 @@ bool LCodeGen::GenerateJumpTable() { } } } else { - if (is_lazy_deopt) { + if (type == Deoptimizer::LAZY) { __ call(entry, RelocInfo::RUNTIME_ENTRY); } else { __ jmp(entry, RelocInfo::RUNTIME_ENTRY); @@ -719,14 +717,13 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, } -void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { +void LCodeGen::DeoptimizeIf(Condition cc, + LEnvironment* environment, + Deoptimizer::BailoutType bailout_type) { RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); ASSERT(environment->HasBeenRegistered()); int id = environment->deoptimization_index(); ASSERT(info()->IsOptimizing() || info()->IsStub()); - Deoptimizer::BailoutType bailout_type = info()->IsStub() - ? Deoptimizer::LAZY - : Deoptimizer::EAGER; Address entry = Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); if (entry == NULL) { @@ -759,8 +756,10 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { if (jump_table_.is_empty() || jump_table_.last().address != entry || jump_table_.last().needs_frame != !frame_is_built_ || - jump_table_.last().is_lazy_deopt != needs_lazy_deopt) { - JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt); + jump_table_.last().bailout_type != bailout_type) { + Deoptimizer::JumpTableEntry table_entry(entry, + bailout_type, + !frame_is_built_); jump_table_.Add(table_entry, zone()); } if (cc == no_condition) { @@ -772,6 +771,21 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { } +void LCodeGen::DeoptimizeIf(Condition cc, + LEnvironment* environment) { + Deoptimizer::BailoutType bailout_type = info()->IsStub() + ? Deoptimizer::LAZY + : Deoptimizer::EAGER; + DeoptimizeIf(cc, environment, bailout_type); +} + + +void LCodeGen::SoftDeoptimize(LEnvironment* environment) { + ASSERT(!info()->IsStub()); + DeoptimizeIf(no_condition, environment, Deoptimizer::SOFT); +} + + void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle code) { ZoneList > maps(1, zone()); int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); @@ -5414,7 +5428,11 @@ void LCodeGen::DoLazyBailout(LLazyBailout* instr) { void LCodeGen::DoDeoptimize(LDeoptimize* instr) { - DeoptimizeIf(no_condition, instr->environment()); + if (instr->hydrogen_value()->IsSoftDeoptimize()) { + SoftDeoptimize(instr->environment()); + } else { + DeoptimizeIf(no_condition, instr->environment()); + } } diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h index aa0ab9c0dd..d0dd90eeb6 100644 --- a/src/x64/lithium-codegen-x64.h +++ b/src/x64/lithium-codegen-x64.h @@ -247,8 +247,11 @@ class LCodeGen BASE_EMBEDDED { int argc); void RegisterEnvironmentForDeoptimization(LEnvironment* environment, Safepoint::DeoptMode mode); + void DeoptimizeIf(Condition cc, + LEnvironment* environment, + Deoptimizer::BailoutType bailout_type); void DeoptimizeIf(Condition cc, LEnvironment* environment); - + void SoftDeoptimize(LEnvironment* environment); void AddToTranslation(Translation* translation, LOperand* op, bool is_tagged, @@ -340,18 +343,6 @@ class LCodeGen BASE_EMBEDDED { int* offset, AllocationSiteMode mode); - struct JumpTableEntry { - inline JumpTableEntry(Address entry, bool frame, bool is_lazy) - : label(), - address(entry), - needs_frame(frame), - is_lazy_deopt(is_lazy) { } - Label label; - Address address; - bool needs_frame; - bool is_lazy_deopt; - }; - void EnsureSpaceForLazyDeopt(int space_needed); void DoLoadKeyedExternalArray(LLoadKeyed* instr); void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); @@ -369,7 +360,7 @@ class LCodeGen BASE_EMBEDDED { int current_instruction_; const ZoneList* instructions_; ZoneList deoptimizations_; - ZoneList jump_table_; + ZoneList jump_table_; ZoneList > deoptimization_literals_; ZoneList > prototype_maps_; ZoneList > transition_maps_;