From b10d24ff2c685835e203075de4f11e12cd3d33cc Mon Sep 17 00:00:00 2001 From: rmcilroy Date: Fri, 18 Dec 2015 10:34:21 -0800 Subject: [PATCH] [Interpreter] Add basic deoptimization support from TurboFan to Ignition. Adds support for generating deoptimization translations for interpreter stack frames, and building interpreter frames for these translations when a function deopts. Also adds builtins for InterpreterNotifyDeoptimized which resume the function's continuation at the correct point in the interpreter after deopt. MIPS patch contributed by balazs.kilvady@igmtec.com BUG=v8:4280 LOG=N TEST=test-deoptimization.cc with --ignition and --turbo Review URL: https://codereview.chromium.org/1528913003 Cr-Commit-Position: refs/heads/master@{#32971} --- src/arm/builtins-arm.cc | 87 ++++++++++ src/arm64/builtins-arm64.cc | 88 ++++++++++ src/builtins.h | 180 ++++++++++---------- src/compiler/code-generator.cc | 4 +- src/deoptimizer.cc | 290 ++++++++++++++++++++++++++++++++- src/deoptimizer.h | 11 +- src/frames.cc | 43 +++-- src/frames.h | 6 + src/ia32/builtins-ia32.cc | 86 +++++++++- src/mips/builtins-mips.cc | 90 ++++++++++ src/mips64/builtins-mips64.cc | 90 ++++++++++ src/objects.cc | 11 ++ src/x64/builtins-x64.cc | 88 ++++++++++ 13 files changed, 963 insertions(+), 111 deletions(-) diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc index 4d7e176a9b..b0d12c52ca 100644 --- a/src/arm/builtins-arm.cc +++ b/src/arm/builtins-arm.cc @@ -946,6 +946,93 @@ void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { } +static void Generate_InterpreterNotifyDeoptimizedHelper( + MacroAssembler* masm, Deoptimizer::BailoutType type) { + // Enter an internal frame. + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ push(kInterpreterAccumulatorRegister); // Save accumulator register. + + // Pass the deoptimization type to the runtime system. + __ mov(r1, Operand(Smi::FromInt(static_cast(type)))); + __ push(r1); + __ CallRuntime(Runtime::kNotifyDeoptimized, 1); + + __ pop(kInterpreterAccumulatorRegister); // Restore accumulator register. + // Tear down internal frame. + } + + // Drop state (we don't use these for interpreter deopts) and push PC at top + // of stack (to simulate initial call to bytecode handler in interpreter entry + // trampoline). + __ pop(r1); + __ Drop(1); + __ push(r1); + + // Initialize register file register and dispatch table register. + __ add(kInterpreterRegisterFileRegister, fp, + Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); + __ LoadRoot(kInterpreterDispatchTableRegister, + Heap::kInterpreterTableRootIndex); + __ add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister, + Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + + // Get the context from the frame. + // TODO(rmcilroy): Update interpreter frame to expect current context at the + // context slot instead of the function context. + __ ldr(kContextRegister, + MemOperand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kContextFromRegisterPointer)); + + // Get the bytecode array pointer from the frame. + __ ldr(r1, + MemOperand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kFunctionFromRegisterPointer)); + __ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(kInterpreterBytecodeArrayRegister, + FieldMemOperand(r1, SharedFunctionInfo::kFunctionDataOffset)); + + if (FLAG_debug_code) { + // Check function data field is actually a BytecodeArray object. + __ SmiTst(kInterpreterBytecodeArrayRegister); + __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); + __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg, + BYTECODE_ARRAY_TYPE); + __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); + } + + // Get the target bytecode offset from the frame. + __ ldr(kInterpreterBytecodeOffsetRegister, + MemOperand( + kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); + __ SmiUntag(kInterpreterBytecodeOffsetRegister); + + // Dispatch to the target bytecode. + __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister, + kInterpreterBytecodeOffsetRegister)); + __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL, + kPointerSizeLog2)); + __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); + __ mov(pc, ip); +} + + +void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); +} + + +void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + +void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); +} + + void Builtins::Generate_CompileLazy(MacroAssembler* masm) { CallRuntimePassFunction(masm, Runtime::kCompileLazy); GenerateTailCallToReturnedCode(masm); diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc index 18de7f8a09..1da38d018b 100644 --- a/src/arm64/builtins-arm64.cc +++ b/src/arm64/builtins-arm64.cc @@ -905,6 +905,94 @@ void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) { } +static void Generate_InterpreterNotifyDeoptimizedHelper( + MacroAssembler* masm, Deoptimizer::BailoutType type) { + // Enter an internal frame. + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ Push(kInterpreterAccumulatorRegister); // Save accumulator register. + + // Pass the deoptimization type to the runtime system. + __ Mov(x1, Operand(Smi::FromInt(static_cast(type)))); + __ Push(x1); + __ CallRuntime(Runtime::kNotifyDeoptimized, 1); + + __ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register. + // Tear down internal frame. + } + + // Drop state (we don't use these for interpreter deopts) and push PC at top + // of stack (to simulate initial call to bytecode handler in interpreter entry + // trampoline). + __ Pop(x1); + __ Drop(1); + __ Push(x1); + + // Initialize register file register and dispatch table register. + __ Add(kInterpreterRegisterFileRegister, fp, + Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); + __ LoadRoot(kInterpreterDispatchTableRegister, + Heap::kInterpreterTableRootIndex); + __ Add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister, + Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + + + // Get the context from the frame. + // TODO(rmcilroy): Update interpreter frame to expect current context at the + // context slot instead of the function context. + __ Ldr(kContextRegister, + MemOperand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kContextFromRegisterPointer)); + + // Get the bytecode array pointer from the frame. + __ Ldr(x1, + MemOperand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kFunctionFromRegisterPointer)); + __ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ Ldr(kInterpreterBytecodeArrayRegister, + FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset)); + + if (FLAG_debug_code) { + // Check function data field is actually a BytecodeArray object. + __ AssertNotSmi(kInterpreterBytecodeArrayRegister, + kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); + __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1, + BYTECODE_ARRAY_TYPE); + __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); + } + + // Get the target bytecode offset from the frame. + __ Ldr(kInterpreterBytecodeOffsetRegister, + MemOperand( + kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); + __ SmiUntag(kInterpreterBytecodeOffsetRegister); + + // Dispatch to the target bytecode. + __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister, + kInterpreterBytecodeOffsetRegister)); + __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2)); + __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1)); + __ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag)); + __ Jump(ip0); +} + + +void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); +} + + +void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + +void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); +} + + void Builtins::Generate_CompileLazy(MacroAssembler* masm) { CallRuntimePassFunction(masm, Runtime::kCompileLazy); GenerateTailCallToReturnedCode(masm); diff --git a/src/builtins.h b/src/builtins.h index f8bb29bf91..e0154e1993 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -97,93 +97,96 @@ inline bool operator&(BuiltinExtraArguments lhs, BuiltinExtraArguments rhs) { V(RestrictedStrictArgumentsPropertiesThrower, kNone) // Define list of builtins implemented in assembly. -#define BUILTIN_LIST_A(V) \ - V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(ConstructedNonConstructable, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(CallFunction_ReceiverIsNullOrUndefined, BUILTIN, UNINITIALIZED, \ - kNoExtraICState) \ - V(CallFunction_ReceiverIsNotNullOrUndefined, BUILTIN, UNINITIALIZED, \ - kNoExtraICState) \ - V(CallFunction_ReceiverIsAny, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(Call_ReceiverIsNullOrUndefined, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(Call_ReceiverIsNotNullOrUndefined, BUILTIN, UNINITIALIZED, \ - kNoExtraICState) \ - V(Call_ReceiverIsAny, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(ConstructFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(ConstructProxy, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(Construct, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(Apply, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(HandleFastApiCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(InOptimizationQueue, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(JSBuiltinsConstructStub, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(JSConstructStubApi, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(CompileLazy, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(CompileOptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(CompileOptimizedConcurrent, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(NotifySoftDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(NotifyStubFailure, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(NotifyStubFailureSaveDoubles, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(InterpreterEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(InterpreterExitTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(InterpreterPushArgsAndCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(InterpreterPushArgsAndConstruct, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(LoadIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(StoreIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(LoadIC_Getter_ForDeopt, LOAD_IC, MONOMORPHIC, kNoExtraICState) \ - V(KeyedLoadIC_Megamorphic, KEYED_LOAD_IC, MEGAMORPHIC, kNoExtraICState) \ - \ - V(KeyedLoadIC_Megamorphic_Strong, KEYED_LOAD_IC, MEGAMORPHIC, \ - LoadICState::kStrongModeState) \ - \ - V(StoreIC_Setter_ForDeopt, STORE_IC, MONOMORPHIC, \ - StoreICState::kStrictModeState) \ - \ - V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, kNoExtraICState) \ - V(KeyedStoreIC_PreMonomorphic, KEYED_STORE_IC, PREMONOMORPHIC, \ - kNoExtraICState) \ - V(KeyedStoreIC_Megamorphic, KEYED_STORE_IC, MEGAMORPHIC, kNoExtraICState) \ - \ - V(KeyedStoreIC_Initialize_Strict, KEYED_STORE_IC, UNINITIALIZED, \ - StoreICState::kStrictModeState) \ - V(KeyedStoreIC_PreMonomorphic_Strict, KEYED_STORE_IC, PREMONOMORPHIC, \ - StoreICState::kStrictModeState) \ - V(KeyedStoreIC_Megamorphic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \ - StoreICState::kStrictModeState) \ - \ - V(FunctionCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(FunctionApply, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(ReflectApply, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(ReflectConstruct, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(InternalArrayCode, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(ArrayCode, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(StringConstructor, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(StringConstructor_ConstructStub, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(OnStackReplacement, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(InterruptCheck, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(OsrAfterStackCheck, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(StackCheck, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(MarkCodeAsToBeExecutedOnce, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(MarkCodeAsExecutedOnce, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(MarkCodeAsExecutedTwice, BUILTIN, UNINITIALIZED, kNoExtraICState) \ +#define BUILTIN_LIST_A(V) \ + V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(ConstructedNonConstructable, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(CallFunction_ReceiverIsNullOrUndefined, BUILTIN, UNINITIALIZED, \ + kNoExtraICState) \ + V(CallFunction_ReceiverIsNotNullOrUndefined, BUILTIN, UNINITIALIZED, \ + kNoExtraICState) \ + V(CallFunction_ReceiverIsAny, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(Call_ReceiverIsNullOrUndefined, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(Call_ReceiverIsNotNullOrUndefined, BUILTIN, UNINITIALIZED, \ + kNoExtraICState) \ + V(Call_ReceiverIsAny, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(ConstructFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(ConstructProxy, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(Construct, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(Apply, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(HandleFastApiCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(InOptimizationQueue, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(JSBuiltinsConstructStub, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(JSConstructStubApi, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(JSEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(CompileLazy, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(CompileOptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(CompileOptimizedConcurrent, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(NotifySoftDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(NotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(NotifyStubFailure, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(NotifyStubFailureSaveDoubles, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(InterpreterEntryTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(InterpreterExitTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(InterpreterPushArgsAndCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(InterpreterPushArgsAndConstruct, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(InterpreterNotifyDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(InterpreterNotifySoftDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(InterpreterNotifyLazyDeoptimized, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(LoadIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(KeyedLoadIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(StoreIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(LoadIC_Getter_ForDeopt, LOAD_IC, MONOMORPHIC, kNoExtraICState) \ + V(KeyedLoadIC_Megamorphic, KEYED_LOAD_IC, MEGAMORPHIC, kNoExtraICState) \ + \ + V(KeyedLoadIC_Megamorphic_Strong, KEYED_LOAD_IC, MEGAMORPHIC, \ + LoadICState::kStrongModeState) \ + \ + V(StoreIC_Setter_ForDeopt, STORE_IC, MONOMORPHIC, \ + StoreICState::kStrictModeState) \ + \ + V(KeyedStoreIC_Initialize, KEYED_STORE_IC, UNINITIALIZED, kNoExtraICState) \ + V(KeyedStoreIC_PreMonomorphic, KEYED_STORE_IC, PREMONOMORPHIC, \ + kNoExtraICState) \ + V(KeyedStoreIC_Megamorphic, KEYED_STORE_IC, MEGAMORPHIC, kNoExtraICState) \ + \ + V(KeyedStoreIC_Initialize_Strict, KEYED_STORE_IC, UNINITIALIZED, \ + StoreICState::kStrictModeState) \ + V(KeyedStoreIC_PreMonomorphic_Strict, KEYED_STORE_IC, PREMONOMORPHIC, \ + StoreICState::kStrictModeState) \ + V(KeyedStoreIC_Megamorphic_Strict, KEYED_STORE_IC, MEGAMORPHIC, \ + StoreICState::kStrictModeState) \ + \ + V(FunctionCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(FunctionApply, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(ReflectApply, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(ReflectConstruct, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(InternalArrayCode, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(ArrayCode, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(StringConstructor, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(StringConstructor_ConstructStub, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(OnStackReplacement, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(InterruptCheck, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(OsrAfterStackCheck, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(StackCheck, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ + V(MarkCodeAsToBeExecutedOnce, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(MarkCodeAsExecutedOnce, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(MarkCodeAsExecutedTwice, BUILTIN, UNINITIALIZED, kNoExtraICState) \ CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, V) // Define list of builtin handlers implemented in assembly. @@ -373,6 +376,9 @@ class Builtins { static void Generate_InterpreterExitTrampoline(MacroAssembler* masm); static void Generate_InterpreterPushArgsAndCall(MacroAssembler* masm); static void Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm); + static void Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm); + static void Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm); + static void Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm); #define DECLARE_CODE_AGE_BUILTIN_GENERATOR(C) \ static void Generate_Make##C##CodeYoungAgainEvenMarking( \ diff --git a/src/compiler/code-generator.cc b/src/compiler/code-generator.cc index 2aab6823bb..776b724687 100644 --- a/src/compiler/code-generator.cc +++ b/src/compiler/code-generator.cc @@ -559,7 +559,9 @@ void CodeGenerator::BuildTranslationForFrameStateDescriptor( (1 + descriptor->parameters_count()))); break; case FrameStateType::kInterpretedFunction: - // TODO(rmcilroy): Implement interpreted function translation. + translation->BeginInterpretedFrame( + descriptor->bailout_id(), shared_info_id, + static_cast(descriptor->locals_count())); break; case FrameStateType::kArgumentsAdaptor: translation->BeginArgumentsAdaptorFrame( diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc index 23f26c724a..d214aeae35 100644 --- a/src/deoptimizer.cc +++ b/src/deoptimizer.cc @@ -763,6 +763,10 @@ void Deoptimizer::DoComputeOutputFrames() { DoComputeJSFrame(frame_index); jsframe_count_++; break; + case TranslatedFrame::kInterpretedFunction: + DoComputeInterpretedFrame(frame_index); + jsframe_count_++; + break; case TranslatedFrame::kArgumentsAdaptor: DoComputeArgumentsAdaptorFrame(frame_index); break; @@ -828,7 +832,7 @@ void Deoptimizer::DoComputeJSFrame(int frame_index) { // The 'fixed' part of the frame consists of the incoming parameters and // the part described by JavaScriptFrameConstants. - unsigned fixed_frame_size = ComputeFixedSize(function); + unsigned fixed_frame_size = ComputeJavascriptFixedSize(function); unsigned input_frame_size = input_->GetFrameSize(); unsigned output_frame_size = height_in_bytes + fixed_frame_size; @@ -1022,6 +1026,222 @@ void Deoptimizer::DoComputeJSFrame(int frame_index) { } +void Deoptimizer::DoComputeInterpretedFrame(int frame_index) { + TranslatedFrame* translated_frame = + &(translated_state_.frames()[frame_index]); + TranslatedFrame::iterator value_iterator = translated_frame->begin(); + int input_index = 0; + + BailoutId bytecode_offset = translated_frame->node_id(); + unsigned height = translated_frame->height(); + unsigned height_in_bytes = height * kPointerSize; + JSFunction* function = JSFunction::cast(value_iterator->GetRawValue()); + value_iterator++; + input_index++; + if (trace_scope_ != NULL) { + PrintF(trace_scope_->file(), " translating interpreted frame "); + function->PrintName(trace_scope_->file()); + PrintF(trace_scope_->file(), " => bytecode_offset=%d, height=%d\n", + bytecode_offset.ToInt(), height_in_bytes); + } + + // The 'fixed' part of the frame consists of the incoming parameters and + // the part described by InterpreterFrameConstants. + unsigned fixed_frame_size = ComputeInterpretedFixedSize(function); + unsigned input_frame_size = input_->GetFrameSize(); + unsigned output_frame_size = height_in_bytes + fixed_frame_size; + + // Allocate and store the output frame description. + FrameDescription* output_frame = + new (output_frame_size) FrameDescription(output_frame_size, function); + output_frame->SetFrameType(StackFrame::INTERPRETED); + + bool is_bottommost = (0 == frame_index); + bool is_topmost = (output_count_ - 1 == frame_index); + CHECK(frame_index >= 0 && frame_index < output_count_); + CHECK_NULL(output_[frame_index]); + output_[frame_index] = output_frame; + + // The top address for the bottommost output frame can be computed from + // the input frame pointer and the output frame's height. For all + // subsequent output frames, it can be computed from the previous one's + // top address and the current frame's size. + Register fp_reg = InterpretedFrame::fp_register(); + intptr_t top_address; + if (is_bottommost) { + // Subtract interpreter fixed frame size for the context function slots, + // new,target and bytecode offset. + top_address = input_->GetRegister(fp_reg.code()) - + InterpreterFrameConstants::kFixedFrameSizeFromFp - + height_in_bytes; + } else { + top_address = output_[frame_index - 1]->GetTop() - output_frame_size; + } + output_frame->SetTop(top_address); + + // Compute the incoming parameter translation. + int parameter_count = + function->shared()->internal_formal_parameter_count() + 1; + unsigned output_offset = output_frame_size; + unsigned input_offset = input_frame_size; + for (int i = 0; i < parameter_count; ++i) { + output_offset -= kPointerSize; + WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, + output_offset); + } + input_offset -= (parameter_count * kPointerSize); + + // There are no translation commands for the caller's pc and fp, the + // context, the function, new.target and the bytecode offset. Synthesize + // their values and set them up + // explicitly. + // + // The caller's pc for the bottommost output frame is the same as in the + // input frame. For all subsequent output frames, it can be read from the + // previous one. This frame's pc can be computed from the non-optimized + // function code and AST id of the bailout. + output_offset -= kPCOnStackSize; + input_offset -= kPCOnStackSize; + intptr_t value; + if (is_bottommost) { + value = input_->GetFrameSlot(input_offset); + } else { + value = output_[frame_index - 1]->GetPc(); + } + output_frame->SetCallerPc(output_offset, value); + DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n"); + + // The caller's frame pointer for the bottommost output frame is the same + // as in the input frame. For all subsequent output frames, it can be + // read from the previous one. Also compute and set this frame's frame + // pointer. + output_offset -= kFPOnStackSize; + input_offset -= kFPOnStackSize; + if (is_bottommost) { + value = input_->GetFrameSlot(input_offset); + } else { + value = output_[frame_index - 1]->GetFp(); + } + output_frame->SetCallerFp(output_offset, value); + intptr_t fp_value = top_address + output_offset; + DCHECK(!is_bottommost || + (input_->GetRegister(fp_reg.code()) + + has_alignment_padding_ * kPointerSize) == fp_value); + output_frame->SetFp(fp_value); + if (is_topmost) output_frame->SetRegister(fp_reg.code(), fp_value); + DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n"); + DCHECK(!is_bottommost || !has_alignment_padding_ || + (fp_value & kPointerSize) != 0); + + if (FLAG_enable_embedded_constant_pool) { + // For the bottommost output frame the constant pool pointer can be gotten + // from the input frame. For subsequent output frames, it can be read from + // the previous frame. + output_offset -= kPointerSize; + input_offset -= kPointerSize; + if (is_bottommost) { + value = input_->GetFrameSlot(input_offset); + } else { + value = output_[frame_index - 1]->GetConstantPool(); + } + output_frame->SetCallerConstantPool(output_offset, value); + DebugPrintOutputSlot(value, frame_index, output_offset, + "caller's constant_pool\n"); + } + + // For the bottommost output frame the context can be gotten from the input + // frame. For all subsequent output frames it can be gotten from the function + // so long as we don't inline functions that need local contexts. + Register context_reg = InterpretedFrame::context_register(); + output_offset -= kPointerSize; + input_offset -= kPointerSize; + // Read the context from the translations. + Object* context = value_iterator->GetRawValue(); + // The context should not be a placeholder for a materialized object. + CHECK(context != isolate_->heap()->arguments_marker()); + value = reinterpret_cast(context); + output_frame->SetContext(value); + if (is_topmost) output_frame->SetRegister(context_reg.code(), value); + WriteValueToOutput(context, input_index, frame_index, output_offset, + "context "); + value_iterator++; + input_index++; + + // The function was mentioned explicitly in the BEGIN_FRAME. + output_offset -= kPointerSize; + input_offset -= kPointerSize; + value = reinterpret_cast(function); + // The function for the bottommost output frame should also agree with the + // input frame. + DCHECK(!is_bottommost || input_->GetFrameSlot(input_offset) == value); + WriteValueToOutput(function, 0, frame_index, output_offset, "function "); + + // TODO(rmcilroy): Deal with new.target correctly - currently just set it to + // undefined. + output_offset -= kPointerSize; + input_offset -= kPointerSize; + Object* new_target = isolate_->heap()->undefined_value(); + WriteValueToOutput(new_target, 0, frame_index, output_offset, "new_target "); + + // The bytecode offset was mentioned explicitly in the BEGIN_FRAME. + output_offset -= kPointerSize; + input_offset -= kPointerSize; + int raw_bytecode_offset = + BytecodeArray::kHeaderSize - kHeapObjectTag + bytecode_offset.ToInt(); + Smi* smi_bytecode_offset = Smi::FromInt(raw_bytecode_offset); + WriteValueToOutput(smi_bytecode_offset, 0, frame_index, output_offset, + "bytecode offset "); + + // Translate the rest of the interpreter registers in the frame. + for (unsigned i = 0; i < height; ++i) { + output_offset -= kPointerSize; + WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index, + output_offset); + } + CHECK_EQ(0u, output_offset); + + // Set the accumulator register. + output_frame->SetRegister( + kInterpreterAccumulatorRegister.code(), + reinterpret_cast(value_iterator->GetRawValue())); + value_iterator++; + + Builtins* builtins = isolate_->builtins(); + Code* trampoline = builtins->builtin(Builtins::kInterpreterEntryTrampoline); + output_frame->SetPc(reinterpret_cast(trampoline->entry())); + output_frame->SetState(0); + + // Update constant pool. + if (FLAG_enable_embedded_constant_pool) { + intptr_t constant_pool_value = + reinterpret_cast(trampoline->constant_pool()); + output_frame->SetConstantPool(constant_pool_value); + if (is_topmost) { + Register constant_pool_reg = + InterpretedFrame::constant_pool_pointer_register(); + output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value); + } + } + + // Set the continuation for the topmost frame. + if (is_topmost && bailout_type_ != DEBUGGER) { + Code* continuation = + builtins->builtin(Builtins::kInterpreterNotifyDeoptimized); + if (bailout_type_ == LAZY) { + continuation = + builtins->builtin(Builtins::kInterpreterNotifyLazyDeoptimized); + } else if (bailout_type_ == SOFT) { + continuation = + builtins->builtin(Builtins::kInterpreterNotifySoftDeoptimized); + } else { + CHECK_EQ(bailout_type_, EAGER); + } + output_frame->SetContinuation( + reinterpret_cast(continuation->entry())); + } +} + + void Deoptimizer::DoComputeArgumentsAdaptorFrame(int frame_index) { TranslatedFrame* translated_frame = &(translated_state_.frames()[frame_index]); @@ -1756,7 +1976,7 @@ void Deoptimizer::DebugPrintOutputSlot(intptr_t value, int frame_index, unsigned Deoptimizer::ComputeInputFrameSize() const { - unsigned fixed_size = ComputeFixedSize(function_); + unsigned fixed_size = ComputeJavascriptFixedSize(function_); // The fp-to-sp delta already takes the context, constant pool pointer and the // function into account so we have to avoid double counting them. unsigned result = fixed_size + fp_to_sp_delta_ - @@ -1771,7 +1991,7 @@ unsigned Deoptimizer::ComputeInputFrameSize() const { } -unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const { +unsigned Deoptimizer::ComputeJavascriptFixedSize(JSFunction* function) const { // The fixed part of the frame consists of the return address, frame // pointer, function, context, and all the incoming arguments. return ComputeIncomingArgumentSize(function) + @@ -1779,6 +1999,15 @@ unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const { } +unsigned Deoptimizer::ComputeInterpretedFixedSize(JSFunction* function) const { + // The fixed part of the frame consists of the return address, frame + // pointer, function, context, new.target, bytecode offset and all the + // incoming arguments. + return ComputeIncomingArgumentSize(function) + + InterpreterFrameConstants::kFixedFrameSize; +} + + unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const { // The incoming arguments is the values for formal parameters and // the receiver. Every slot contains a pointer. @@ -1872,8 +2101,13 @@ FrameDescription::FrameDescription(uint32_t frame_size, int FrameDescription::ComputeFixedSize() { - return StandardFrameConstants::kFixedFrameSize + - (ComputeParametersCount() + 1) * kPointerSize; + if (type_ == StackFrame::INTERPRETED) { + return InterpreterFrameConstants::kFixedFrameSize + + (ComputeParametersCount() + 1) * kPointerSize; + } else { + return StandardFrameConstants::kFixedFrameSize + + (ComputeParametersCount() + 1) * kPointerSize; + } } @@ -2011,6 +2245,15 @@ void Translation::BeginJSFrame(BailoutId node_id, } +void Translation::BeginInterpretedFrame(BailoutId bytecode_offset, + int literal_id, unsigned height) { + buffer_->Add(INTERPRETED_FRAME, zone()); + buffer_->Add(bytecode_offset.ToInt(), zone()); + buffer_->Add(literal_id, zone()); + buffer_->Add(height, zone()); +} + + void Translation::BeginCompiledStubFrame(int height) { buffer_->Add(COMPILED_STUB_FRAME, zone()); buffer_->Add(height, zone()); @@ -2143,6 +2386,7 @@ int Translation::NumberOfOperandsFor(Opcode opcode) { case CONSTRUCT_STUB_FRAME: return 2; case JS_FRAME: + case INTERPRETED_FRAME: return 3; } FATAL("Unexpected translation type"); @@ -2616,6 +2860,15 @@ TranslatedFrame TranslatedFrame::JSFrame(BailoutId node_id, } +TranslatedFrame TranslatedFrame::InterpretedFrame( + BailoutId bytecode_offset, SharedFunctionInfo* shared_info, int height) { + TranslatedFrame frame(kInterpretedFunction, shared_info->GetIsolate(), + shared_info, height); + frame.node_id_ = bytecode_offset; + return frame; +} + + TranslatedFrame TranslatedFrame::AccessorFrame( Kind kind, SharedFunctionInfo* shared_info) { DCHECK(kind == kSetter || kind == kGetter); @@ -2642,9 +2895,17 @@ int TranslatedFrame::GetValueCount() { case kFunction: { int parameter_count = raw_shared_info_->internal_formal_parameter_count() + 1; + // + 1 for function. return height_ + parameter_count + 1; } + case kInterpretedFunction: { + int parameter_count = + raw_shared_info_->internal_formal_parameter_count() + 1; + // + 3 for function, context and accumulator. + return height_ + parameter_count + 3; + } + case kGetter: return 2; // Function and receiver. @@ -2700,6 +2961,24 @@ TranslatedFrame TranslatedState::CreateNextTranslatedFrame( return TranslatedFrame::JSFrame(node_id, shared_info, height); } + case Translation::INTERPRETED_FRAME: { + BailoutId bytecode_offset = BailoutId(iterator->Next()); + SharedFunctionInfo* shared_info = + SharedFunctionInfo::cast(literal_array->get(iterator->Next())); + int height = iterator->Next(); + if (trace_file != nullptr) { + base::SmartArrayPointer name = + shared_info->DebugName()->ToCString(); + PrintF(trace_file, " reading input frame %s", name.get()); + int arg_count = shared_info->internal_formal_parameter_count() + 1; + PrintF(trace_file, + " => bytecode_offset=%d, args=%d, height=%d; inputs:\n", + bytecode_offset.ToInt(), arg_count, height); + } + return TranslatedFrame::InterpretedFrame(bytecode_offset, shared_info, + height); + } + case Translation::ARGUMENTS_ADAPTOR_FRAME: { SharedFunctionInfo* shared_info = SharedFunctionInfo::cast(literal_array->get(iterator->Next())); @@ -2812,6 +3091,7 @@ TranslatedValue TranslatedState::CreateNextTranslatedValue( switch (opcode) { case Translation::BEGIN: case Translation::JS_FRAME: + case Translation::INTERPRETED_FRAME: case Translation::ARGUMENTS_ADAPTOR_FRAME: case Translation::CONSTRUCT_STUB_FRAME: case Translation::GETTER_STUB_FRAME: diff --git a/src/deoptimizer.h b/src/deoptimizer.h index 58e86ed26f..10685b6193 100644 --- a/src/deoptimizer.h +++ b/src/deoptimizer.h @@ -112,6 +112,7 @@ class TranslatedFrame { public: enum Kind { kFunction, + kInterpretedFunction, kGetter, kSetter, kArgumentsAdaptor, @@ -172,6 +173,9 @@ class TranslatedFrame { // Constructor static methods. static TranslatedFrame JSFrame(BailoutId node_id, SharedFunctionInfo* shared_info, int height); + static TranslatedFrame InterpretedFrame(BailoutId bytecode_offset, + SharedFunctionInfo* shared_info, + int height); static TranslatedFrame AccessorFrame(Kind kind, SharedFunctionInfo* shared_info); static TranslatedFrame ArgumentsAdaptorFrame(SharedFunctionInfo* shared_info, @@ -589,6 +593,7 @@ class Deoptimizer : public Malloced { void DoComputeOutputFrames(); void DoComputeJSFrame(int frame_index); + void DoComputeInterpretedFrame(int frame_index); void DoComputeArgumentsAdaptorFrame(int frame_index); void DoComputeConstructStubFrame(int frame_index); void DoComputeAccessorStubFrame(int frame_index, bool is_setter_stub_frame); @@ -606,7 +611,8 @@ class Deoptimizer : public Malloced { const char* debug_hint_string); unsigned ComputeInputFrameSize() const; - unsigned ComputeFixedSize(JSFunction* function) const; + unsigned ComputeJavascriptFixedSize(JSFunction* function) const; + unsigned ComputeInterpretedFixedSize(JSFunction* function) const; unsigned ComputeIncomingArgumentSize(JSFunction* function) const; static unsigned ComputeOutgoingArgumentSize(Code* code, unsigned bailout_id); @@ -951,6 +957,7 @@ class TranslationIterator BASE_EMBEDDED { #define TRANSLATION_OPCODE_LIST(V) \ V(BEGIN) \ V(JS_FRAME) \ + V(INTERPRETED_FRAME) \ V(CONSTRUCT_STUB_FRAME) \ V(GETTER_STUB_FRAME) \ V(SETTER_STUB_FRAME) \ @@ -996,6 +1003,8 @@ class Translation BASE_EMBEDDED { // Commands. void BeginJSFrame(BailoutId node_id, int literal_id, unsigned height); + void BeginInterpretedFrame(BailoutId bytecode_offset, int literal_id, + unsigned height); void BeginCompiledStubFrame(int height); void BeginArgumentsAdaptorFrame(int literal_id, unsigned height); void BeginConstructStubFrame(int literal_id, unsigned height); diff --git a/src/frames.cc b/src/frames.cc index 2d188cc97c..2d054d00b6 100644 --- a/src/frames.cc +++ b/src/frames.cc @@ -937,8 +937,9 @@ void OptimizedFrame::Summarize(List* frames) { TranslationIterator it(data->TranslationByteArray(), data->TranslationIndex(deopt_index)->value()); - Translation::Opcode opcode = static_cast(it.Next()); - DCHECK_EQ(Translation::BEGIN, opcode); + Translation::Opcode frame_opcode = + static_cast(it.Next()); + DCHECK_EQ(Translation::BEGIN, frame_opcode); it.Next(); // Drop frame count. int jsframe_count = it.Next(); @@ -946,8 +947,9 @@ void OptimizedFrame::Summarize(List* frames) { // in the deoptimization translation are ordered bottom-to-top. bool is_constructor = IsConstructor(); while (jsframe_count != 0) { - opcode = static_cast(it.Next()); - if (opcode == Translation::JS_FRAME) { + frame_opcode = static_cast(it.Next()); + if (frame_opcode == Translation::JS_FRAME || + frame_opcode == Translation::INTERPRETED_FRAME) { jsframe_count--; BailoutId const ast_id = BailoutId(it.Next()); SharedFunctionInfo* const shared_info = @@ -956,7 +958,7 @@ void OptimizedFrame::Summarize(List* frames) { // The translation commands are ordered and the function is always // at the first position, and the receiver is next. - opcode = static_cast(it.Next()); + Translation::Opcode opcode = static_cast(it.Next()); // Get the correct function in the optimized frame. JSFunction* function; @@ -993,25 +995,33 @@ void OptimizedFrame::Summarize(List* frames) { } Code* const code = shared_info->code(); - DeoptimizationOutputData* const output_data = - DeoptimizationOutputData::cast(code->deoptimization_data()); - unsigned const entry = - Deoptimizer::GetOutputInfo(output_data, ast_id, shared_info); - unsigned const pc_offset = - FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize; - DCHECK_NE(0U, pc_offset); + unsigned pc_offset; + if (frame_opcode == Translation::JS_FRAME) { + DeoptimizationOutputData* const output_data = + DeoptimizationOutputData::cast(code->deoptimization_data()); + unsigned const entry = + Deoptimizer::GetOutputInfo(output_data, ast_id, shared_info); + pc_offset = + FullCodeGenerator::PcField::decode(entry) + Code::kHeaderSize; + DCHECK_NE(0U, pc_offset); + } else { + // TODO(rmcilroy): Modify FrameSummary to enable us to summarize + // based on the BytecodeArray and bytecode offset. + DCHECK_EQ(frame_opcode, Translation::INTERPRETED_FRAME); + pc_offset = 0; + } FrameSummary summary(receiver, function, code, pc_offset, is_constructor); frames->Add(summary); is_constructor = false; - } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) { + } else if (frame_opcode == Translation::CONSTRUCT_STUB_FRAME) { // The next encountered JS_FRAME will be marked as a constructor call. - it.Skip(Translation::NumberOfOperandsFor(opcode)); + it.Skip(Translation::NumberOfOperandsFor(frame_opcode)); DCHECK(!is_constructor); is_constructor = true; } else { // Skip over operands to advance to the next opcode. - it.Skip(Translation::NumberOfOperandsFor(opcode)); + it.Skip(Translation::NumberOfOperandsFor(frame_opcode)); } } DCHECK(!is_constructor); @@ -1083,7 +1093,8 @@ void OptimizedFrame::GetFunctions(List* functions) const { opcode = static_cast(it.Next()); // Skip over operands to advance to the next opcode. it.Skip(Translation::NumberOfOperandsFor(opcode)); - if (opcode == Translation::JS_FRAME) { + if (opcode == Translation::JS_FRAME || + opcode == Translation::INTERPRETED_FRAME) { jsframe_count--; // The translation commands are ordered and the function is always at the diff --git a/src/frames.h b/src/frames.h index 76b8cb8939..674d7daeca 100644 --- a/src/frames.h +++ b/src/frames.h @@ -176,6 +176,12 @@ class ConstructFrameConstants : public AllStatic { class InterpreterFrameConstants : public AllStatic { public: + // Fixed frame includes new.target and bytecode offset. + static const int kFixedFrameSize = + StandardFrameConstants::kFixedFrameSize + 2 * kPointerSize; + static const int kFixedFrameSizeFromFp = + StandardFrameConstants::kFixedFrameSizeFromFp + 2 * kPointerSize; + // FP-relative. static const int kRegisterFilePointerFromFp = -StandardFrameConstants::kFixedFrameSizeFromFp - 3 * kPointerSize; diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc index 4cd00d9f83..7355e6838d 100644 --- a/src/ia32/builtins-ia32.cc +++ b/src/ia32/builtins-ia32.cc @@ -603,7 +603,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { __ LoadRoot(ebx, Heap::kInterpreterTableRootIndex); __ add(ebx, Immediate(FixedArray::kHeaderSize - kHeapObjectTag)); - // Push context as a stack located parameter to the bytecode handler. + // Push dispatch table as a stack located parameter to the bytecode handler. DCHECK_EQ(-1, kInterpreterDispatchTableSpillSlot); __ push(ebx); @@ -733,6 +733,90 @@ void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { } +static void Generate_InterpreterNotifyDeoptimizedHelper( + MacroAssembler* masm, Deoptimizer::BailoutType type) { + // Enter an internal frame. + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ Push(kInterpreterAccumulatorRegister); // Save accumulator register. + + // Pass the deoptimization type to the runtime system. + __ Push(Smi::FromInt(static_cast(type))); + + __ CallRuntime(Runtime::kNotifyDeoptimized, 1); + + __ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register. + // Tear down internal frame. + } + + // Initialize register file register. + __ mov(kInterpreterRegisterFileRegister, ebp); + __ add(kInterpreterRegisterFileRegister, + Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp)); + + // Get the bytecode array pointer from the frame. + __ mov(ebx, Operand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kFunctionFromRegisterPointer)); + __ mov(ebx, FieldOperand(ebx, JSFunction::kSharedFunctionInfoOffset)); + __ mov(kInterpreterBytecodeArrayRegister, + FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset)); + + if (FLAG_debug_code) { + // Check function data field is actually a BytecodeArray object. + __ AssertNotSmi(kInterpreterBytecodeArrayRegister); + __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, + ebx); + __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); + } + + // Get the target bytecode offset from the frame. + __ mov( + kInterpreterBytecodeOffsetRegister, + Operand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); + __ SmiUntag(kInterpreterBytecodeOffsetRegister); + + // Push dispatch table as a stack located parameter to the bytecode handler - + // overwrite the state slot (we don't use these for interpreter deopts). + __ LoadRoot(ebx, Heap::kInterpreterTableRootIndex); + __ add(ebx, Immediate(FixedArray::kHeaderSize - kHeapObjectTag)); + DCHECK_EQ(-1, kInterpreterDispatchTableSpillSlot); + __ mov(ebx, Operand(esp, -2 * kPointerSize)); + + // Dispatch to the target bytecode. + __ movzx_b(esi, Operand(kInterpreterBytecodeArrayRegister, + kInterpreterBytecodeOffsetRegister, times_1, 0)); + __ mov(ebx, Operand(ebx, esi, times_pointer_size, 0)); + + // Get the context from the frame. + // TODO(rmcilroy): Update interpreter frame to expect current context at the + // context slot instead of the function context. + __ mov(kContextRegister, + Operand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kContextFromRegisterPointer)); + + // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging + // and header removal. + __ add(ebx, Immediate(Code::kHeaderSize - kHeapObjectTag)); + __ jmp(ebx); +} + + +void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); +} + + +void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + +void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); +} + + void Builtins::Generate_CompileLazy(MacroAssembler* masm) { CallRuntimePassFunction(masm, Runtime::kCompileLazy); GenerateTailCallToReturnedCode(masm); diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc index fcd5ad0fd6..e970af2e3b 100644 --- a/src/mips/builtins-mips.cc +++ b/src/mips/builtins-mips.cc @@ -942,6 +942,96 @@ void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { } +static void Generate_InterpreterNotifyDeoptimizedHelper( + MacroAssembler* masm, Deoptimizer::BailoutType type) { + // Enter an internal frame. + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ push(kInterpreterAccumulatorRegister); // Save accumulator register. + + // Pass the deoptimization type to the runtime system. + __ li(a1, Operand(Smi::FromInt(static_cast(type)))); + __ push(a1); + __ CallRuntime(Runtime::kNotifyDeoptimized, 1); + + __ pop(kInterpreterAccumulatorRegister); // Restore accumulator register. + // Tear down internal frame. + } + + // Drop state (we don't use these for interpreter deopts) and push PC at top + // of stack (to simulate initial call to bytecode handler in interpreter entry + // trampoline). + __ lw(a1, MemOperand(sp)); + __ Drop(1); + __ sw(a1, MemOperand(sp)); + + // Initialize register file register and dispatch table register. + __ Addu(kInterpreterRegisterFileRegister, fp, + Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); + __ LoadRoot(kInterpreterDispatchTableRegister, + Heap::kInterpreterTableRootIndex); + __ Addu(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister, + Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + + // Get the context from the frame. + // TODO(rmcilroy): Update interpreter frame to expect current context at the + // context slot instead of the function context. + __ lw(kContextRegister, + MemOperand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kContextFromRegisterPointer)); + + // Get the bytecode array pointer from the frame. + __ lw(a1, + MemOperand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kFunctionFromRegisterPointer)); + __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ lw(kInterpreterBytecodeArrayRegister, + FieldMemOperand(a1, SharedFunctionInfo::kFunctionDataOffset)); + + if (FLAG_debug_code) { + // Check function data field is actually a BytecodeArray object. + __ SmiTst(kInterpreterBytecodeArrayRegister, at); + __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, + Operand(zero_reg)); + __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); + __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, + Operand(BYTECODE_ARRAY_TYPE)); + } + + // Get the target bytecode offset from the frame. + __ lw(kInterpreterBytecodeOffsetRegister, + MemOperand( + kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); + __ SmiUntag(kInterpreterBytecodeOffsetRegister); + + // Dispatch to the target bytecode. + __ Addu(a1, kInterpreterBytecodeArrayRegister, + kInterpreterBytecodeOffsetRegister); + __ lbu(a1, MemOperand(a1)); + __ sll(a1, a1, kPointerSizeLog2); + __ Addu(a1, kInterpreterDispatchTableRegister, a1); + __ lw(a1, MemOperand(a1)); + __ Addu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); + __ Jump(a1); +} + + +void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); +} + + +void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + +void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); +} + + void Builtins::Generate_CompileLazy(MacroAssembler* masm) { CallRuntimePassFunction(masm, Runtime::kCompileLazy); GenerateTailCallToReturnedCode(masm); diff --git a/src/mips64/builtins-mips64.cc b/src/mips64/builtins-mips64.cc index ea0a61596e..9414628a37 100644 --- a/src/mips64/builtins-mips64.cc +++ b/src/mips64/builtins-mips64.cc @@ -933,6 +933,96 @@ void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { } +static void Generate_InterpreterNotifyDeoptimizedHelper( + MacroAssembler* masm, Deoptimizer::BailoutType type) { + // Enter an internal frame. + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ push(kInterpreterAccumulatorRegister); // Save accumulator register. + + // Pass the deoptimization type to the runtime system. + __ li(a1, Operand(Smi::FromInt(static_cast(type)))); + __ push(a1); + __ CallRuntime(Runtime::kNotifyDeoptimized, 1); + + __ pop(kInterpreterAccumulatorRegister); // Restore accumulator register. + // Tear down internal frame. + } + + // Drop state (we don't use these for interpreter deopts) and push PC at top + // of stack (to simulate initial call to bytecode handler in interpreter entry + // trampoline). + __ ld(a1, MemOperand(sp)); + __ Drop(1); + __ sd(a1, MemOperand(sp)); + + // Initialize register file register and dispatch table register. + __ Daddu(kInterpreterRegisterFileRegister, fp, + Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp)); + __ LoadRoot(kInterpreterDispatchTableRegister, + Heap::kInterpreterTableRootIndex); + __ Daddu(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister, + Operand(FixedArray::kHeaderSize - kHeapObjectTag)); + + // Get the context from the frame. + // TODO(rmcilroy): Update interpreter frame to expect current context at the + // context slot instead of the function context. + __ ld(kContextRegister, + MemOperand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kContextFromRegisterPointer)); + + // Get the bytecode array pointer from the frame. + __ ld(a1, + MemOperand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kFunctionFromRegisterPointer)); + __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ ld(kInterpreterBytecodeArrayRegister, + FieldMemOperand(a1, SharedFunctionInfo::kFunctionDataOffset)); + + if (FLAG_debug_code) { + // Check function data field is actually a BytecodeArray object. + __ SmiTst(kInterpreterBytecodeArrayRegister, at); + __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, + Operand(zero_reg)); + __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); + __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, + Operand(BYTECODE_ARRAY_TYPE)); + } + + // Get the target bytecode offset from the frame. + __ ld(kInterpreterBytecodeOffsetRegister, + MemOperand( + kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); + __ SmiUntag(kInterpreterBytecodeOffsetRegister); + + // Dispatch to the target bytecode. + __ Daddu(a1, kInterpreterBytecodeArrayRegister, + kInterpreterBytecodeOffsetRegister); + __ lbu(a1, MemOperand(a1)); + __ dsll(a1, a1, kPointerSizeLog2); + __ Daddu(a1, kInterpreterDispatchTableRegister, a1); + __ ld(a1, MemOperand(a1)); + __ Daddu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); + __ Jump(a1); +} + + +void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); +} + + +void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + +void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); +} + + void Builtins::Generate_CompileLazy(MacroAssembler* masm) { CallRuntimePassFunction(masm, Runtime::kCompileLazy); GenerateTailCallToReturnedCode(masm); diff --git a/src/objects.cc b/src/objects.cc index 431559d4f3..0183fb68da 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -14350,6 +14350,17 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint( break; } + case Translation::INTERPRETED_FRAME: { + int bytecode_offset = iterator.Next(); + int shared_info_id = iterator.Next(); + unsigned height = iterator.Next(); + Object* shared_info = LiteralArray()->get(shared_info_id); + os << "{bytecode_offset=" << bytecode_offset << ", function=" + << Brief(SharedFunctionInfo::cast(shared_info)->DebugName()) + << ", height=" << height << "}"; + break; + } + case Translation::JS_FRAME_FUNCTION: { os << "{function}"; break; diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index f0916b0256..0d877fad55 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -784,6 +784,94 @@ void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { } +static void Generate_InterpreterNotifyDeoptimizedHelper( + MacroAssembler* masm, Deoptimizer::BailoutType type) { + // Enter an internal frame. + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ Push(kInterpreterAccumulatorRegister); // Save accumulator register. + + // Pass the deoptimization type to the runtime system. + __ Push(Smi::FromInt(static_cast(type))); + + __ CallRuntime(Runtime::kNotifyDeoptimized, 1); + + __ Pop(kInterpreterAccumulatorRegister); // Restore accumulator register. + // Tear down internal frame. + } + + // Drop state (we don't use these for interpreter deopts) and push PC at top + // of stack (to simulate initial call to bytecode handler in interpreter entry + // trampoline). + __ Pop(rbx); + __ Drop(1); + __ Push(rbx); + + // Initialize register file register and dispatch table register. + __ movp(kInterpreterRegisterFileRegister, rbp); + __ addp(kInterpreterRegisterFileRegister, + Immediate(InterpreterFrameConstants::kRegisterFilePointerFromFp)); + __ LoadRoot(kInterpreterDispatchTableRegister, + Heap::kInterpreterTableRootIndex); + __ addp(kInterpreterDispatchTableRegister, + Immediate(FixedArray::kHeaderSize - kHeapObjectTag)); + + // Get the context from the frame. + // TODO(rmcilroy): Update interpreter frame to expect current context at the + // context slot instead of the function context. + __ movp(kContextRegister, + Operand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kContextFromRegisterPointer)); + + // Get the bytecode array pointer from the frame. + __ movp(rbx, + Operand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kFunctionFromRegisterPointer)); + __ movp(rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset)); + __ movp(kInterpreterBytecodeArrayRegister, + FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset)); + + if (FLAG_debug_code) { + // Check function data field is actually a BytecodeArray object. + __ AssertNotSmi(kInterpreterBytecodeArrayRegister); + __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, + rbx); + __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); + } + + // Get the target bytecode offset from the frame. + __ movp( + kInterpreterBytecodeOffsetRegister, + Operand(kInterpreterRegisterFileRegister, + InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer)); + __ SmiToInteger32(kInterpreterBytecodeOffsetRegister, + kInterpreterBytecodeOffsetRegister); + + // Dispatch to the target bytecode. + __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister, + kInterpreterBytecodeOffsetRegister, times_1, 0)); + __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx, + times_pointer_size, 0)); + __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag)); + __ jmp(rbx); +} + + +void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); +} + + +void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); +} + + +void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { + Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); +} + + void Builtins::Generate_CompileLazy(MacroAssembler* masm) { CallRuntimePassFunction(masm, Runtime::kCompileLazy); GenerateTailCallToReturnedCode(masm);