X87: [Interpreter] Remove InterpreterExitTrampoline and replace with returning to the entry trampoline.

port 39738bc905 (r36310)

  original commit message:
  In order to support compiling to baseline on return we need to be able to
  return to the actual return address. With this change this is what the
  Return bytecode now does, removing the need for the
  InterpreterExitTrampoline.

  This change also removes the InterpreterNotifyDeoptXXX builtins and
  unifies FCG and Igntion to both use NotifyDeoptXXX. As part of this
  change, FullCodegenerator::State is moved to Deoptimize::BailoutState.

BUG=

Review-Url: https://codereview.chromium.org/1987053006
Cr-Commit-Position: refs/heads/master@{#36344}
This commit is contained in:
zhengxing.li 2016-05-18 21:02:19 -07:00 committed by Commit bot
parent 26d94abd29
commit 32ba3c91d1
2 changed files with 104 additions and 138 deletions

View File

@ -168,7 +168,8 @@ void FullCodeGenerator::Generate() {
__ push(edi);
__ Push(info->scope()->GetScopeInfo(info->isolate()));
__ CallRuntime(Runtime::kNewScriptContext);
PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
PrepareForBailoutForId(BailoutId::ScriptContext(),
BailoutState::TOS_REGISTER);
// The new target value is not used, clobbering is safe.
DCHECK_NULL(info->scope()->new_target_var());
} else {
@ -224,7 +225,8 @@ void FullCodeGenerator::Generate() {
// Register holding this function and new target are both trashed in case we
// bailout here. But since that can happen only when new target is not used
// and we allocate a context, the value of |function_in_register| is correct.
PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
PrepareForBailoutForId(BailoutId::FunctionContext(),
BailoutState::NO_REGISTERS);
// Possibly set up a local binding to the this function which is used in
// derived constructors with super calls.
@ -286,7 +288,8 @@ void FullCodeGenerator::Generate() {
}
// Visit the declarations and body.
PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
PrepareForBailoutForId(BailoutId::FunctionEntry(),
BailoutState::NO_REGISTERS);
{
Comment cmnt(masm_, "[ Declarations");
VisitDeclarations(scope()->declarations());
@ -299,7 +302,8 @@ void FullCodeGenerator::Generate() {
{
Comment cmnt(masm_, "[ Stack check");
PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
PrepareForBailoutForId(BailoutId::Declarations(),
BailoutState::NO_REGISTERS);
Label ok;
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(isolate());
@ -366,11 +370,11 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
EmitProfilingCounterReset();
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
// Record a mapping of the OSR id to this PC. This is used if the OSR
// entry becomes the target of a bailout. We don't expect it to be, but
// we want it to work if it is.
PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
}
void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
@ -677,7 +681,7 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
Label skip;
if (should_normalize) __ jmp(&skip, Label::kNear);
PrepareForBailout(expr, TOS_REG);
PrepareForBailout(expr, BailoutState::TOS_REGISTER);
if (should_normalize) {
__ cmp(eax, isolate()->factory()->true_value());
Split(equal, if_true, if_false, NULL);
@ -733,7 +737,7 @@ void FullCodeGenerator::VisitVariableDeclaration(
__ mov(ContextOperand(esi, variable->index()),
Immediate(isolate()->factory()->the_hole_value()));
// No write barrier since the hole value is in old space.
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
}
break;
@ -754,7 +758,7 @@ void FullCodeGenerator::VisitVariableDeclaration(
__ push(
Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
__ CallRuntime(Runtime::kDeclareLookupSlot);
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
break;
}
}
@ -793,7 +797,7 @@ void FullCodeGenerator::VisitFunctionDeclaration(
__ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
result_register(), ecx, kDontSaveFPRegs,
EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
break;
}
@ -803,7 +807,7 @@ void FullCodeGenerator::VisitFunctionDeclaration(
VisitForStackValue(declaration->fun());
PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes()));
CallRuntimeWithOperands(Runtime::kDeclareLookupSlot);
PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
break;
}
}
@ -834,7 +838,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Keep the switch value on the stack until a case matches.
VisitForStackValue(stmt->tag());
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
ZoneList<CaseClause*>* clauses = stmt->cases();
CaseClause* default_clause = NULL; // Can occur anywhere in the list.
@ -883,7 +887,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
Label skip;
__ jmp(&skip, Label::kNear);
PrepareForBailout(clause, TOS_REG);
PrepareForBailout(clause, BailoutState::TOS_REGISTER);
__ cmp(eax, isolate()->factory()->true_value());
__ j(not_equal, &next_test);
__ Drop(1);
@ -911,12 +915,12 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
Comment cmnt(masm_, "[ Case body");
CaseClause* clause = clauses->at(i);
__ bind(clause->body_target());
PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
VisitStatements(clause->statements());
}
__ bind(nested_statement.break_label());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
}
@ -949,7 +953,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
ToObjectStub stub(isolate());
__ CallStub(&stub);
__ bind(&done_convert);
PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
__ push(eax);
// Check cache validity in generated code. If we cannot guarantee cache
@ -966,7 +970,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&call_runtime);
__ push(eax);
__ CallRuntime(Runtime::kForInEnumerate);
PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
isolate()->factory()->meta_map());
__ j(not_equal, &fixed_array);
@ -998,11 +1002,11 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register eax. Iterate through that.
__ bind(&fixed_array);
__ push(Immediate(Smi::FromInt(1))); // Smi(1) undicates slow check
__ push(Immediate(Smi::FromInt(1))); // Smi(1) indicates slow check
__ push(eax); // Array
__ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
__ push(eax); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS);
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ push(Immediate(Smi::FromInt(0))); // Initial index.
// Generate code for doing the condition check.
@ -1040,7 +1044,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ push(ecx); // Enumerable.
__ push(ebx); // Current entry.
__ CallRuntime(Runtime::kForInFilter);
PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
__ cmp(eax, isolate()->factory()->undefined_value());
__ j(equal, loop_statement.continue_label());
__ mov(ebx, eax);
@ -1052,11 +1056,11 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Perform the assignment as if via '='.
{ EffectContext context(this);
EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
}
// Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
// Generate code for the body of the loop.
Visit(stmt->body());
@ -1073,7 +1077,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
DropOperands(5);
// Exit and decrement the loop depth.
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
__ bind(&exit);
decrement_loop_depth();
}
@ -1231,7 +1235,7 @@ void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
TypeofMode typeof_mode) {
SetExpressionPosition(proxy);
PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
Variable* var = proxy->var();
// Three cases: global variables, lookup variables, and all other types of
@ -1343,7 +1347,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ CallStub(&stub);
RestoreContext();
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
// If result_saved is true the result is on top of the stack. If
// result_saved is false the result is in eax.
@ -1379,7 +1383,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
EmitLoadStoreICSlot(property->GetSlot(0));
CallStoreIC();
PrepareForBailoutForId(key->id(), NO_REGISTERS);
PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
if (NeedsHomeObject(value)) {
EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
}
@ -1407,7 +1411,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
DCHECK(property->emit_store());
CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
NO_REGISTERS);
BailoutState::NO_REGISTERS);
break;
case ObjectLiteral::Property::GETTER:
if (property->emit_store()) {
@ -1463,7 +1467,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
DCHECK(property->emit_store());
CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
NO_REGISTERS);
BailoutState::NO_REGISTERS);
} else {
EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
VisitForStackValue(value);
@ -1536,7 +1540,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
__ CallStub(&stub);
}
PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
bool result_saved = false; // Is the result saved to the stack?
ZoneList<Expression*>* subexprs = expr->values();
@ -1566,7 +1570,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<Code> ic =
CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
CallIC(ic);
PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
PrepareForBailoutForId(expr->GetIdForElement(array_index),
BailoutState::NO_REGISTERS);
}
// In case the array literal contains spread expressions it has two parts. The
@ -1586,7 +1591,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
VisitForStackValue(subexpr);
CallRuntimeWithOperands(Runtime::kAppendElement);
PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
PrepareForBailoutForId(expr->GetIdForElement(array_index),
BailoutState::NO_REGISTERS);
}
if (result_saved) {
@ -1665,23 +1671,27 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
switch (assign_type) {
case VARIABLE:
EmitVariableLoad(expr->target()->AsVariableProxy());
PrepareForBailout(expr->target(), TOS_REG);
PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
break;
case NAMED_SUPER_PROPERTY:
EmitNamedSuperPropertyLoad(property);
PrepareForBailoutForId(property->LoadId(), TOS_REG);
PrepareForBailoutForId(property->LoadId(),
BailoutState::TOS_REGISTER);
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
PrepareForBailoutForId(property->LoadId(), TOS_REG);
PrepareForBailoutForId(property->LoadId(),
BailoutState::TOS_REGISTER);
break;
case KEYED_SUPER_PROPERTY:
EmitKeyedSuperPropertyLoad(property);
PrepareForBailoutForId(property->LoadId(), TOS_REG);
PrepareForBailoutForId(property->LoadId(),
BailoutState::TOS_REGISTER);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
PrepareForBailoutForId(property->LoadId(), TOS_REG);
PrepareForBailoutForId(property->LoadId(),
BailoutState::TOS_REGISTER);
break;
}
}
@ -1700,7 +1710,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
}
// Deoptimization point in case the binary operation may have side effects.
PrepareForBailout(expr->binary_operation(), TOS_REG);
PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
} else {
VisitForAccumulatorValue(expr->value());
}
@ -1712,7 +1722,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
case VARIABLE:
EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
expr->op(), expr->AssignmentSlot());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
context()->Plug(eax);
break;
case NAMED_PROPERTY:
@ -2171,7 +2181,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
PopOperand(StoreDescriptor::ReceiverRegister());
EmitLoadStoreICSlot(expr->AssignmentSlot());
CallStoreIC();
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
context()->Plug(eax);
}
@ -2217,7 +2227,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
EmitLoadStoreICSlot(expr->AssignmentSlot());
CallIC(ic);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
context()->Plug(eax);
}
@ -2238,7 +2248,7 @@ void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
if (callee->IsVariableProxy()) {
{ StackValueContext context(this);
EmitVariableLoad(callee->AsVariableProxy());
PrepareForBailout(callee, NO_REGISTERS);
PrepareForBailout(callee, BailoutState::NO_REGISTERS);
}
// Push undefined as receiver. This is patched in the method prologue if it
// is a sloppy mode method.
@ -2250,7 +2260,8 @@ void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
DCHECK(!callee->AsProperty()->IsSuperAccess());
__ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
EmitNamedPropertyLoad(callee->AsProperty());
PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
PrepareForBailoutForId(callee->AsProperty()->LoadId(),
BailoutState::TOS_REGISTER);
// Push the target function under the receiver.
PushOperand(Operand(esp, 0));
__ mov(Operand(esp, kPointerSize), eax);
@ -2285,7 +2296,7 @@ void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
// - home_object
// - key
CallRuntimeWithOperands(Runtime::kLoadFromSuper);
PrepareForBailoutForId(prop->LoadId(), TOS_REG);
PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
// Replace home_object with target function.
__ mov(Operand(esp, kPointerSize), eax);
@ -2310,7 +2321,8 @@ void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
__ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
__ mov(LoadDescriptor::NameRegister(), eax);
EmitKeyedPropertyLoad(callee->AsProperty());
PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
PrepareForBailoutForId(callee->AsProperty()->LoadId(),
BailoutState::TOS_REGISTER);
// Push the target function under the receiver.
PushOperand(Operand(esp, 0));
@ -2342,7 +2354,7 @@ void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
// - home_object
// - key
CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
PrepareForBailoutForId(prop->LoadId(), TOS_REG);
PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
// Replace home_object with target function.
__ mov(Operand(esp, kPointerSize), eax);
@ -2362,7 +2374,7 @@ void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
VisitForStackValue(args->at(i));
}
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
SetCallPosition(expr, expr->tail_call_mode());
if (expr->tail_call_mode() == TailCallMode::kAllow) {
if (FLAG_trace) {
@ -2430,7 +2442,7 @@ void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
__ CallRuntime(Runtime::kLoadLookupSlotForCall);
PushOperand(eax); // Function.
PushOperand(edx); // Receiver.
PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
// If fast case code has been generated, emit code to push the function
// and receiver and have the slow path jump around this code.
@ -2475,7 +2487,7 @@ void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
// Touch up the stack with the resolved function.
__ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
SetCallPosition(expr);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
@ -2524,7 +2536,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
CallConstructStub stub(isolate());
__ call(stub.GetCode(), RelocInfo::CODE_TARGET);
OperandStackDepthDecrement(arg_count + 1);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
RestoreContext();
context()->Plug(eax);
}
@ -2960,7 +2972,7 @@ void FullCodeGenerator::EmitCall(CallRuntime* expr) {
for (Expression* const arg : *args) {
VisitForStackValue(arg);
}
PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
// Move target to edi.
int const argc = args->length() - 2;
__ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
@ -3167,7 +3179,8 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
&materialize_true);
if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
__ bind(&materialize_true);
PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
PrepareForBailoutForId(expr->MaterializeTrueId(),
BailoutState::NO_REGISTERS);
if (context()->IsAccumulatorValue()) {
__ mov(eax, isolate()->factory()->true_value());
} else {
@ -3175,7 +3188,8 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
}
__ jmp(&done, Label::kNear);
__ bind(&materialize_false);
PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
PrepareForBailoutForId(expr->MaterializeFalseId(),
BailoutState::NO_REGISTERS);
if (context()->IsAccumulatorValue()) {
__ mov(eax, isolate()->factory()->false_value());
} else {
@ -3274,9 +3288,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// We need a second deoptimization point after loading the value
// in case evaluating the property load my have a side effect.
if (assign_type == VARIABLE) {
PrepareForBailout(expr->expression(), TOS_REG);
PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
} else {
PrepareForBailoutForId(prop->LoadId(), TOS_REG);
PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
}
// Inline smi case if we are in a loop.
@ -3331,7 +3345,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Convert old value into a number.
ToNumberStub convert_stub(isolate());
__ CallStub(&convert_stub);
PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
// Save result for postfix expressions.
if (expr->is_postfix()) {
@ -3379,7 +3393,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
{ EffectContext context(this);
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
Token::ASSIGN, expr->CountSlot());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
PrepareForBailoutForId(expr->AssignmentId(),
BailoutState::TOS_REGISTER);
context.Plug(eax);
}
// For all contexts except EffectContext We have the result on
@ -3391,7 +3406,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Perform the assignment as if via '='.
EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
Token::ASSIGN, expr->CountSlot());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
PrepareForBailoutForId(expr->AssignmentId(),
BailoutState::TOS_REGISTER);
context()->Plug(eax);
}
break;
@ -3401,7 +3417,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
PopOperand(StoreDescriptor::ReceiverRegister());
EmitLoadStoreICSlot(expr->CountSlot());
CallStoreIC();
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
context()->PlugTOS();
@ -3440,7 +3456,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
EmitLoadStoreICSlot(expr->CountSlot());
CallIC(ic);
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
if (expr->is_postfix()) {
// Result is on the stack
if (!context()->IsEffect()) {

View File

@ -621,9 +621,22 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
times_pointer_size, 0));
__ call(ebx);
masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
// Even though the first bytecode handler was called, we will never return.
__ Abort(kUnexpectedReturnFromBytecodeHandler);
// The return value is in eax.
// Get the arguments + reciever count.
__ mov(ebx, Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ mov(ebx, FieldOperand(ebx, BytecodeArray::kParameterSizeOffset));
// Leave the frame (also dropping the register file).
__ leave();
// Drop receiver + arguments and return.
__ pop(ecx);
__ add(esp, ebx);
__ push(ecx);
__ ret(0);
// Load debug copy of the bytecode array.
__ bind(&load_debug_bytecode_array);
@ -649,27 +662,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ jmp(ecx);
}
void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
// Interpreter handler is turbofanned code, need to reset the FPU before
// return
__ fninit();
// The return value is in accumulator, which is already in eax.
// Leave the frame (also dropping the register file).
__ leave();
// Drop receiver + arguments and return.
__ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
BytecodeArray::kParameterSizeOffset));
__ pop(ecx);
__ add(esp, ebx);
__ push(ecx);
__ ret(0);
}
static void Generate_InterpreterPushArgs(MacroAssembler* masm,
Register array_limit) {
// ----------- S t a t e -------------
@ -687,7 +679,6 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm,
__ j(greater, &loop_header, Label::kNear);
}
// static
void Builtins::Generate_InterpreterPushArgsAndCallImpl(
MacroAssembler* masm, TailCallMode tail_call_mode) {
@ -756,8 +747,18 @@ void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
__ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// Set the return address to the correct point in the interpreter entry
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
__ LoadHeapObject(ebx,
masm->isolate()->builtins()->InterpreterEntryTrampoline());
__ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
__ push(ebx);
static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
// Initialize the dispatch table register.
__ mov(kInterpreterDispatchTableRegister,
Immediate(ExternalReference::interpreter_dispatch_table_address(
@ -788,58 +789,6 @@ static void Generate_EnterBytecodeDispatch(MacroAssembler* masm) {
__ jmp(ebx);
}
static void Generate_InterpreterNotifyDeoptimizedHelper(
MacroAssembler* masm, Deoptimizer::BailoutType type) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Pass the deoptimization type to the runtime system.
__ Push(Smi::FromInt(static_cast<int>(type)));
__ CallRuntime(Runtime::kNotifyDeoptimized);
// Tear down internal frame.
}
// Drop state (we don't use these for interpreter deopts) and and pop the
// accumulator value into the accumulator register and push PC at top
// of stack (to simulate initial call to bytecode handler in interpreter entry
// trampoline).
__ Pop(ebx);
__ Drop(1);
__ Pop(kInterpreterAccumulatorRegister);
__ Push(ebx);
// Enter the bytecode dispatch.
Generate_EnterBytecodeDispatch(masm);
}
void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
}
void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
}
void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// Set the address of the interpreter entry trampoline as a return address.
// This simulates the initial call to bytecode handlers in interpreter entry
// trampoline. The return will never actually be taken, but our stack walker
// uses this address to determine whether a frame is interpreted.
__ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline());
Generate_EnterBytecodeDispatch(masm);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argument count (preserved for callee)
@ -1136,13 +1085,14 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
// Switch on the state.
Label not_no_registers, not_tos_eax;
__ cmp(ecx, FullCodeGenerator::NO_REGISTERS);
__ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS));
__ j(not_equal, &not_no_registers, Label::kNear);
__ ret(1 * kPointerSize); // Remove state.
__ bind(&not_no_registers);
DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
__ mov(eax, Operand(esp, 2 * kPointerSize));
__ cmp(ecx, FullCodeGenerator::TOS_REG);
__ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER));
__ j(not_equal, &not_tos_eax, Label::kNear);
__ ret(2 * kPointerSize); // Remove state, eax.