Deoptimizer support for hydrogen stubs that accept a variable number of arguments.
BUG= Review URL: https://codereview.chromium.org/12490013 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@14111 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
359d4a2869
commit
2502668f50
@ -95,7 +95,7 @@ static void InitializeArrayConstructorDescriptor(Isolate* isolate,
|
||||
// stack param count needs (constructor pointer, and single argument)
|
||||
descriptor->stack_parameter_count_ = &r0;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->extra_expression_stack_count_ = 1;
|
||||
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
|
||||
descriptor->deoptimization_handler_ =
|
||||
FUNCTION_ADDR(ArrayConstructor_StubFailure);
|
||||
}
|
||||
@ -7962,6 +7962,9 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
int parameter_count_offset =
|
||||
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
|
||||
__ ldr(r1, MemOperand(fp, parameter_count_offset));
|
||||
if (function_mode_ == JS_FUNCTION_STUB_MODE) {
|
||||
__ add(r1, r1, Operand(1));
|
||||
}
|
||||
masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
|
||||
__ mov(r1, Operand(r1, LSL, kPointerSizeLog2));
|
||||
__ add(sp, sp, r1);
|
||||
|
@ -129,7 +129,8 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
|
||||
if (descriptor_->stack_parameter_count_ != NULL) {
|
||||
ASSERT(descriptor_->environment_length() == (param_count + 1));
|
||||
stack_parameter_count = new(zone) HParameter(param_count,
|
||||
HParameter::REGISTER_PARAMETER);
|
||||
HParameter::REGISTER_PARAMETER,
|
||||
Representation::Integer32());
|
||||
// it's essential to bind this value to the environment in case of deopt
|
||||
start_environment->Bind(param_count, stack_parameter_count);
|
||||
AddInstruction(stack_parameter_count);
|
||||
@ -147,13 +148,26 @@ bool CodeStubGraphBuilderBase::BuildGraph() {
|
||||
AddSimulate(BailoutId::StubEntry());
|
||||
|
||||
HValue* return_value = BuildCodeStub();
|
||||
|
||||
// We might have extra expressions to pop from the stack in addition to the
|
||||
// arguments above
|
||||
HInstruction* stack_pop_count = stack_parameter_count;
|
||||
if (descriptor_->function_mode_ == JS_FUNCTION_STUB_MODE) {
|
||||
HInstruction* amount = graph()->GetConstant1();
|
||||
stack_pop_count = AddInstruction(
|
||||
HAdd::New(zone, context_, stack_parameter_count, amount));
|
||||
stack_pop_count->ChangeRepresentation(Representation::Integer32());
|
||||
stack_pop_count->ClearFlag(HValue::kCanOverflow);
|
||||
}
|
||||
|
||||
HReturn* hreturn_instruction = new(zone) HReturn(return_value,
|
||||
context_,
|
||||
stack_parameter_count);
|
||||
stack_pop_count);
|
||||
current_block()->Finish(hreturn_instruction);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
template <class Stub>
|
||||
class CodeStubGraphBuilder: public CodeStubGraphBuilderBase {
|
||||
public:
|
||||
|
@ -619,10 +619,8 @@ void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
|
||||
void StubFailureTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
|
||||
int i = 0;
|
||||
for (; i <= StubFailureTrampolineStub::kMaxExtraExpressionStackCount; ++i) {
|
||||
StubFailureTrampolineStub(i).GetCode(isolate);
|
||||
}
|
||||
StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE).GetCode(isolate);
|
||||
StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE).GetCode(isolate);
|
||||
}
|
||||
|
||||
|
||||
|
@ -258,15 +258,17 @@ class PlatformCodeStub : public CodeStub {
|
||||
};
|
||||
|
||||
|
||||
enum StubFunctionMode { NOT_JS_FUNCTION_STUB_MODE, JS_FUNCTION_STUB_MODE };
|
||||
|
||||
struct CodeStubInterfaceDescriptor {
|
||||
CodeStubInterfaceDescriptor()
|
||||
: register_param_count_(-1),
|
||||
stack_parameter_count_(NULL),
|
||||
extra_expression_stack_count_(0),
|
||||
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
|
||||
register_params_(NULL) { }
|
||||
int register_param_count_;
|
||||
const Register* stack_parameter_count_;
|
||||
int extra_expression_stack_count_;
|
||||
StubFunctionMode function_mode_;
|
||||
Register* register_params_;
|
||||
Address deoptimization_handler_;
|
||||
|
||||
@ -1601,10 +1603,8 @@ class StoreArrayLiteralElementStub : public PlatformCodeStub {
|
||||
|
||||
class StubFailureTrampolineStub : public PlatformCodeStub {
|
||||
public:
|
||||
static const int kMaxExtraExpressionStackCount = 1;
|
||||
|
||||
explicit StubFailureTrampolineStub(int extra_expression_stack_count)
|
||||
: extra_expression_stack_count_(extra_expression_stack_count) {}
|
||||
explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
|
||||
: function_mode_(function_mode) {}
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
|
||||
@ -1612,11 +1612,11 @@ class StubFailureTrampolineStub : public PlatformCodeStub {
|
||||
|
||||
private:
|
||||
Major MajorKey() { return StubFailureTrampoline; }
|
||||
int MinorKey() { return extra_expression_stack_count_; }
|
||||
int MinorKey() { return static_cast<int>(function_mode_); }
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
int extra_expression_stack_count_;
|
||||
StubFunctionMode function_mode_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(StubFailureTrampolineStub);
|
||||
};
|
||||
|
@ -1280,29 +1280,37 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
}
|
||||
|
||||
intptr_t caller_arg_count = 0;
|
||||
if (descriptor->stack_parameter_count_ != NULL) {
|
||||
caller_arg_count =
|
||||
input_->GetRegister(descriptor->stack_parameter_count_->code());
|
||||
}
|
||||
bool arg_count_known = descriptor->stack_parameter_count_ == NULL;
|
||||
|
||||
// Build the Arguments object for the caller's parameters and a pointer to it.
|
||||
output_frame_offset -= kPointerSize;
|
||||
value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
|
||||
(caller_arg_count - 1) * kPointerSize;
|
||||
output_frame->SetFrameSlot(output_frame_offset, value);
|
||||
int args_arguments_offset = output_frame_offset;
|
||||
intptr_t the_hole = reinterpret_cast<intptr_t>(
|
||||
isolate_->heap()->the_hole_value());
|
||||
if (arg_count_known) {
|
||||
value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
|
||||
(caller_arg_count - 1) * kPointerSize;
|
||||
} else {
|
||||
value = the_hole;
|
||||
}
|
||||
|
||||
output_frame->SetFrameSlot(args_arguments_offset, value);
|
||||
if (trace_) {
|
||||
PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
|
||||
V8PRIxPTR " ; args.arguments\n",
|
||||
top_address + output_frame_offset, output_frame_offset, value);
|
||||
V8PRIxPTR " ; args.arguments %s\n",
|
||||
top_address + args_arguments_offset, args_arguments_offset, value,
|
||||
arg_count_known ? "" : "(the hole)");
|
||||
}
|
||||
|
||||
output_frame_offset -= kPointerSize;
|
||||
value = caller_arg_count;
|
||||
output_frame->SetFrameSlot(output_frame_offset, value);
|
||||
int length_frame_offset = output_frame_offset;
|
||||
value = arg_count_known ? caller_arg_count : the_hole;
|
||||
output_frame->SetFrameSlot(length_frame_offset, value);
|
||||
if (trace_) {
|
||||
PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
|
||||
V8PRIxPTR " ; args.length\n",
|
||||
top_address + output_frame_offset, output_frame_offset, value);
|
||||
V8PRIxPTR " ; args.length %s\n",
|
||||
top_address + length_frame_offset, length_frame_offset, value,
|
||||
arg_count_known ? "" : "(the hole)");
|
||||
}
|
||||
|
||||
output_frame_offset -= kPointerSize;
|
||||
@ -1321,6 +1329,20 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
DoTranslateCommand(iterator, 0, output_frame_offset);
|
||||
}
|
||||
|
||||
if (!arg_count_known) {
|
||||
DoTranslateCommand(iterator, 0, length_frame_offset,
|
||||
TRANSLATED_VALUE_IS_NATIVE);
|
||||
caller_arg_count = output_frame->GetFrameSlot(length_frame_offset);
|
||||
value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
|
||||
(caller_arg_count - 1) * kPointerSize;
|
||||
output_frame->SetFrameSlot(args_arguments_offset, value);
|
||||
if (trace_) {
|
||||
PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
|
||||
V8PRIxPTR " ; args.arguments\n",
|
||||
top_address + args_arguments_offset, args_arguments_offset, value);
|
||||
}
|
||||
}
|
||||
|
||||
ASSERT(0 == output_frame_offset);
|
||||
|
||||
// Copy the double registers from the input into the output frame.
|
||||
@ -1331,8 +1353,9 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
|
||||
// Compute this frame's PC, state, and continuation.
|
||||
Code* trampoline = NULL;
|
||||
int extra = descriptor->extra_expression_stack_count_;
|
||||
StubFailureTrampolineStub(extra).FindCodeInCache(&trampoline, isolate_);
|
||||
StubFunctionMode function_mode = descriptor->function_mode_;
|
||||
StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
|
||||
isolate_);
|
||||
ASSERT(trampoline != NULL);
|
||||
output_frame->SetPc(reinterpret_cast<intptr_t>(
|
||||
trampoline->instruction_start()));
|
||||
@ -1476,12 +1499,25 @@ void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
|
||||
#endif
|
||||
|
||||
|
||||
static const char* TraceValueType(bool is_smi, bool is_native) {
|
||||
if (is_native) {
|
||||
return "native";
|
||||
} else if (is_smi) {
|
||||
return "smi";
|
||||
}
|
||||
|
||||
return "heap number";
|
||||
}
|
||||
|
||||
|
||||
void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
int frame_index,
|
||||
unsigned output_offset) {
|
||||
int frame_index,
|
||||
unsigned output_offset,
|
||||
DeoptimizerTranslatedValueType value_type) {
|
||||
disasm::NameConverter converter;
|
||||
// A GC-safe temporary placeholder that we can put in the output frame.
|
||||
const intptr_t kPlaceholder = reinterpret_cast<intptr_t>(Smi::FromInt(0));
|
||||
bool is_native = value_type == TRANSLATED_VALUE_IS_NATIVE;
|
||||
|
||||
// Ignore commands marked as duplicate and act on the first non-duplicate.
|
||||
Translation::Opcode opcode =
|
||||
@ -1524,7 +1560,9 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
case Translation::INT32_REGISTER: {
|
||||
int input_reg = iterator->Next();
|
||||
intptr_t value = input_->GetRegister(input_reg);
|
||||
bool is_smi = Smi::IsValid(value);
|
||||
bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
|
||||
Smi::IsValid(value);
|
||||
|
||||
if (trace_) {
|
||||
PrintF(
|
||||
" 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIdPTR " ; %s (%s)\n",
|
||||
@ -1532,15 +1570,18 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
output_offset,
|
||||
value,
|
||||
converter.NameOfCPURegister(input_reg),
|
||||
is_smi ? "smi" : "heap number");
|
||||
TraceValueType(is_smi, is_native));
|
||||
}
|
||||
if (is_smi) {
|
||||
intptr_t tagged_value =
|
||||
reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
|
||||
output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
|
||||
} else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
|
||||
output_[frame_index]->SetFrameSlot(output_offset, value);
|
||||
} else {
|
||||
// We save the untagged value on the side and store a GC-safe
|
||||
// temporary placeholder in the frame.
|
||||
ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
|
||||
AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
|
||||
static_cast<double>(static_cast<int32_t>(value)));
|
||||
output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
|
||||
@ -1551,7 +1592,8 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
case Translation::UINT32_REGISTER: {
|
||||
int input_reg = iterator->Next();
|
||||
uintptr_t value = static_cast<uintptr_t>(input_->GetRegister(input_reg));
|
||||
bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
|
||||
bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
|
||||
(value <= static_cast<uintptr_t>(Smi::kMaxValue));
|
||||
if (trace_) {
|
||||
PrintF(
|
||||
" 0x%08" V8PRIxPTR ": [top + %d] <- %" V8PRIuPTR
|
||||
@ -1560,15 +1602,18 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
output_offset,
|
||||
value,
|
||||
converter.NameOfCPURegister(input_reg),
|
||||
is_smi ? "smi" : "heap number");
|
||||
TraceValueType(is_smi, is_native));
|
||||
}
|
||||
if (is_smi) {
|
||||
intptr_t tagged_value =
|
||||
reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
|
||||
output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
|
||||
} else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
|
||||
output_[frame_index]->SetFrameSlot(output_offset, value);
|
||||
} else {
|
||||
// We save the untagged value on the side and store a GC-safe
|
||||
// temporary placeholder in the frame.
|
||||
ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
|
||||
AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
|
||||
static_cast<double>(static_cast<uint32_t>(value)));
|
||||
output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
|
||||
@ -1617,7 +1662,8 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
unsigned input_offset =
|
||||
input_->GetOffsetFromSlotIndex(input_slot_index);
|
||||
intptr_t value = input_->GetFrameSlot(input_offset);
|
||||
bool is_smi = Smi::IsValid(value);
|
||||
bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
|
||||
Smi::IsValid(value);
|
||||
if (trace_) {
|
||||
PrintF(" 0x%08" V8PRIxPTR ": ",
|
||||
output_[frame_index]->GetTop() + output_offset);
|
||||
@ -1625,15 +1671,18 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
output_offset,
|
||||
value,
|
||||
input_offset,
|
||||
is_smi ? "smi" : "heap number");
|
||||
TraceValueType(is_smi, is_native));
|
||||
}
|
||||
if (is_smi) {
|
||||
intptr_t tagged_value =
|
||||
reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
|
||||
output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
|
||||
} else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
|
||||
output_[frame_index]->SetFrameSlot(output_offset, value);
|
||||
} else {
|
||||
// We save the untagged value on the side and store a GC-safe
|
||||
// temporary placeholder in the frame.
|
||||
ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
|
||||
AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
|
||||
static_cast<double>(static_cast<int32_t>(value)));
|
||||
output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
|
||||
@ -1647,7 +1696,8 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
input_->GetOffsetFromSlotIndex(input_slot_index);
|
||||
uintptr_t value =
|
||||
static_cast<uintptr_t>(input_->GetFrameSlot(input_offset));
|
||||
bool is_smi = (value <= static_cast<uintptr_t>(Smi::kMaxValue));
|
||||
bool is_smi = (value_type == TRANSLATED_VALUE_IS_TAGGED) &&
|
||||
(value <= static_cast<uintptr_t>(Smi::kMaxValue));
|
||||
if (trace_) {
|
||||
PrintF(" 0x%08" V8PRIxPTR ": ",
|
||||
output_[frame_index]->GetTop() + output_offset);
|
||||
@ -1655,15 +1705,18 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
output_offset,
|
||||
value,
|
||||
input_offset,
|
||||
is_smi ? "smi" : "heap number");
|
||||
TraceValueType(is_smi, is_native));
|
||||
}
|
||||
if (is_smi) {
|
||||
intptr_t tagged_value =
|
||||
reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(value)));
|
||||
output_[frame_index]->SetFrameSlot(output_offset, tagged_value);
|
||||
} else if (value_type == TRANSLATED_VALUE_IS_NATIVE) {
|
||||
output_[frame_index]->SetFrameSlot(output_offset, value);
|
||||
} else {
|
||||
// We save the untagged value on the side and store a GC-safe
|
||||
// temporary placeholder in the frame.
|
||||
ASSERT(value_type == TRANSLATED_VALUE_IS_TAGGED);
|
||||
AddDoubleValue(output_[frame_index]->GetTop() + output_offset,
|
||||
static_cast<double>(static_cast<uint32_t>(value)));
|
||||
output_[frame_index]->SetFrameSlot(output_offset, kPlaceholder);
|
||||
|
@ -356,9 +356,17 @@ class Deoptimizer : public Malloced {
|
||||
bool is_setter_stub_frame);
|
||||
void DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
int frame_index);
|
||||
|
||||
enum DeoptimizerTranslatedValueType {
|
||||
TRANSLATED_VALUE_IS_NATIVE,
|
||||
TRANSLATED_VALUE_IS_TAGGED
|
||||
};
|
||||
|
||||
void DoTranslateCommand(TranslationIterator* iterator,
|
||||
int frame_index,
|
||||
unsigned output_offset);
|
||||
int frame_index,
|
||||
unsigned output_offset,
|
||||
DeoptimizerTranslatedValueType value_type = TRANSLATED_VALUE_IS_TAGGED);
|
||||
|
||||
// Translate a command for OSR. Updates the input offset to be used for
|
||||
// the next command. Returns false if translation of the command failed
|
||||
// (e.g., a number conversion failed) and may or may not have updated the
|
||||
|
@ -1311,18 +1311,19 @@ Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
|
||||
|
||||
|
||||
Code* StubFailureTrampolineFrame::unchecked_code() const {
|
||||
int i = 0;
|
||||
for (; i <= StubFailureTrampolineStub::kMaxExtraExpressionStackCount; ++i) {
|
||||
Code* trampoline;
|
||||
StubFailureTrampolineStub(i).FindCodeInCache(&trampoline, isolate());
|
||||
ASSERT(trampoline != NULL);
|
||||
Address current_pc = pc();
|
||||
Address code_start = trampoline->instruction_start();
|
||||
Address code_end = code_start + trampoline->instruction_size();
|
||||
if (code_start <= current_pc && current_pc < code_end) {
|
||||
return trampoline;
|
||||
}
|
||||
Code* trampoline;
|
||||
StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE).
|
||||
FindCodeInCache(&trampoline, isolate());
|
||||
if (trampoline->contains(pc())) {
|
||||
return trampoline;
|
||||
}
|
||||
|
||||
StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE).
|
||||
FindCodeInCache(&trampoline, isolate());
|
||||
if (trampoline->contains(pc())) {
|
||||
return trampoline;
|
||||
}
|
||||
|
||||
UNREACHABLE();
|
||||
return NULL;
|
||||
}
|
||||
|
@ -4693,6 +4693,14 @@ class HParameter: public HTemplateInstruction<0> {
|
||||
set_representation(Representation::Tagged());
|
||||
}
|
||||
|
||||
explicit HParameter(unsigned index,
|
||||
ParameterKind kind,
|
||||
Representation r)
|
||||
: index_(index),
|
||||
kind_(kind) {
|
||||
set_representation(r);
|
||||
}
|
||||
|
||||
unsigned index() const { return index_; }
|
||||
ParameterKind kind() const { return kind_; }
|
||||
|
||||
|
@ -100,7 +100,7 @@ static void InitializeArrayConstructorDescriptor(Isolate* isolate,
|
||||
// stack param count needs (constructor pointer, and single argument)
|
||||
descriptor->stack_parameter_count_ = &eax;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->extra_expression_stack_count_ = 1;
|
||||
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
|
||||
descriptor->deoptimization_handler_ =
|
||||
FUNCTION_ADDR(ArrayConstructor_StubFailure);
|
||||
}
|
||||
@ -7825,8 +7825,10 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
__ mov(ebx, MemOperand(ebp, parameter_count_offset));
|
||||
masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
|
||||
__ pop(ecx);
|
||||
__ lea(esp, MemOperand(esp, ebx, times_pointer_size,
|
||||
extra_expression_stack_count_ * kPointerSize));
|
||||
int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
|
||||
? kPointerSize
|
||||
: 0;
|
||||
__ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
|
||||
__ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ static void InitializeArrayConstructorDescriptor(Isolate* isolate,
|
||||
// stack param count needs (constructor pointer, and single argument)
|
||||
descriptor->stack_parameter_count_ = &rax;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->extra_expression_stack_count_ = 1;
|
||||
descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
|
||||
descriptor->deoptimization_handler_ =
|
||||
FUNCTION_ADDR(ArrayConstructor_StubFailure);
|
||||
}
|
||||
@ -6783,8 +6783,10 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
__ movq(rbx, MemOperand(rbp, parameter_count_offset));
|
||||
masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
|
||||
__ pop(rcx);
|
||||
__ lea(rsp, MemOperand(rsp, rbx, times_pointer_size,
|
||||
extra_expression_stack_count_ * kPointerSize));
|
||||
int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
|
||||
? kPointerSize
|
||||
: 0;
|
||||
__ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
|
||||
__ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user