move CallApiFunctionAndReturn to code-stubs-*

BUG=

Review URL: https://codereview.chromium.org/860013002

Cr-Commit-Position: refs/heads/master@{#26167}
This commit is contained in:
dcarney 2015-01-20 08:04:51 -08:00 committed by Commit bot
parent 3fd5b96f3b
commit e62d974ba1
18 changed files with 1005 additions and 1028 deletions

View File

@ -4610,6 +4610,148 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
} }
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Restores context. stack_space
// - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
static void CallApiFunctionAndReturn(MacroAssembler* masm,
Register function_address,
ExternalReference thunk_ref,
int stack_space,
MemOperand* stack_space_operand,
MemOperand return_value_operand,
MemOperand* context_restore_operand) {
Isolate* isolate = masm->isolate();
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate);
const int kNextOffset = 0;
const int kLimitOffset = AddressOffset(
ExternalReference::handle_scope_limit_address(isolate), next_address);
const int kLevelOffset = AddressOffset(
ExternalReference::handle_scope_level_address(isolate), next_address);
DCHECK(function_address.is(r1) || function_address.is(r2));
Label profiler_disabled;
Label end_profiler_check;
__ mov(r9, Operand(ExternalReference::is_profiling_address(isolate)));
__ ldrb(r9, MemOperand(r9, 0));
__ cmp(r9, Operand(0));
__ b(eq, &profiler_disabled);
// Additional parameter is the address of the actual callback.
__ mov(r3, Operand(thunk_ref));
__ jmp(&end_profiler_check);
__ bind(&profiler_disabled);
__ Move(r3, function_address);
__ bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
__ mov(r9, Operand(next_address));
__ ldr(r4, MemOperand(r9, kNextOffset));
__ ldr(r5, MemOperand(r9, kLimitOffset));
__ ldr(r6, MemOperand(r9, kLevelOffset));
__ add(r6, r6, Operand(1));
__ str(r6, MemOperand(r9, kLevelOffset));
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1, r0);
__ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::log_enter_external_function(isolate),
1);
__ PopSafepointRegisters();
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub(isolate);
stub.GenerateCall(masm, r3);
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1, r0);
__ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::log_leave_external_function(isolate),
1);
__ PopSafepointRegisters();
}
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label return_value_loaded;
// load value from ReturnValue
__ ldr(r0, return_value_operand);
__ bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
__ str(r4, MemOperand(r9, kNextOffset));
if (__ emit_debug_code()) {
__ ldr(r1, MemOperand(r9, kLevelOffset));
__ cmp(r1, r6);
__ Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
}
__ sub(r6, r6, Operand(1));
__ str(r6, MemOperand(r9, kLevelOffset));
__ ldr(ip, MemOperand(r9, kLimitOffset));
__ cmp(r5, ip);
__ b(ne, &delete_allocated_handles);
// Check if the function scheduled an exception.
__ bind(&leave_exit_frame);
__ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
__ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate)));
__ ldr(r5, MemOperand(ip));
__ cmp(r4, r5);
__ b(ne, &promote_scheduled_exception);
__ bind(&exception_handled);
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
__ ldr(cp, *context_restore_operand);
}
// LeaveExitFrame expects unwind space to be in a register.
if (stack_space_operand != NULL) {
__ ldr(r4, *stack_space_operand);
} else {
__ mov(r4, Operand(stack_space));
}
__ LeaveExitFrame(false, r4, !restore_context, stack_space_operand != NULL);
__ mov(pc, lr);
__ bind(&promote_scheduled_exception);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ CallExternalReference(
ExternalReference(Runtime::kPromoteScheduledException, isolate), 0);
}
__ jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
__ bind(&delete_allocated_handles);
__ str(r5, MemOperand(r9, kLimitOffset));
__ mov(r4, r0);
__ PrepareCallCFunction(1, r5);
__ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
1);
__ mov(r0, r4);
__ jmp(&leave_exit_frame);
}
static void CallApiFunctionStubHelper(MacroAssembler* masm, static void CallApiFunctionStubHelper(MacroAssembler* masm,
const ParameterCount& argc, const ParameterCount& argc,
bool return_first_arg, bool return_first_arg,
@ -4733,9 +4875,9 @@ static void CallApiFunctionStubHelper(MacroAssembler* masm,
stack_space = argc.immediate() + FCA::kArgsLength + 1; stack_space = argc.immediate() + FCA::kArgsLength + 1;
stack_space_operand = NULL; stack_space_operand = NULL;
} }
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, stack_space, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
stack_space_operand, return_value_operand, stack_space_operand, return_value_operand,
&context_restore_operand); &context_restore_operand);
} }
@ -4782,9 +4924,9 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
ExternalReference thunk_ref = ExternalReference thunk_ref =
ExternalReference::invoke_accessor_getter_callback(isolate()); ExternalReference::invoke_accessor_getter_callback(isolate());
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
kStackUnwindSpace, NULL, kStackUnwindSpace, NULL,
MemOperand(fp, 6 * kPointerSize), NULL); MemOperand(fp, 6 * kPointerSize), NULL);
} }

View File

@ -2361,141 +2361,6 @@ void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
} }
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
void MacroAssembler::CallApiFunctionAndReturn(
Register function_address, ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand, MemOperand return_value_operand,
MemOperand* context_restore_operand) {
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
const int kNextOffset = 0;
const int kLimitOffset = AddressOffset(
ExternalReference::handle_scope_limit_address(isolate()),
next_address);
const int kLevelOffset = AddressOffset(
ExternalReference::handle_scope_level_address(isolate()),
next_address);
DCHECK(function_address.is(r1) || function_address.is(r2));
Label profiler_disabled;
Label end_profiler_check;
mov(r9, Operand(ExternalReference::is_profiling_address(isolate())));
ldrb(r9, MemOperand(r9, 0));
cmp(r9, Operand(0));
b(eq, &profiler_disabled);
// Additional parameter is the address of the actual callback.
mov(r3, Operand(thunk_ref));
jmp(&end_profiler_check);
bind(&profiler_disabled);
Move(r3, function_address);
bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
mov(r9, Operand(next_address));
ldr(r4, MemOperand(r9, kNextOffset));
ldr(r5, MemOperand(r9, kLimitOffset));
ldr(r6, MemOperand(r9, kLevelOffset));
add(r6, r6, Operand(1));
str(r6, MemOperand(r9, kLevelOffset));
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1, r0);
mov(r0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub(isolate());
stub.GenerateCall(this, r3);
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1, r0);
mov(r0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label return_value_loaded;
// load value from ReturnValue
ldr(r0, return_value_operand);
bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
str(r4, MemOperand(r9, kNextOffset));
if (emit_debug_code()) {
ldr(r1, MemOperand(r9, kLevelOffset));
cmp(r1, r6);
Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
}
sub(r6, r6, Operand(1));
str(r6, MemOperand(r9, kLevelOffset));
ldr(ip, MemOperand(r9, kLimitOffset));
cmp(r5, ip);
b(ne, &delete_allocated_handles);
// Check if the function scheduled an exception.
bind(&leave_exit_frame);
LoadRoot(r4, Heap::kTheHoleValueRootIndex);
mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate())));
ldr(r5, MemOperand(ip));
cmp(r4, r5);
b(ne, &promote_scheduled_exception);
bind(&exception_handled);
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
ldr(cp, *context_restore_operand);
}
// LeaveExitFrame expects unwind space to be in a register.
if (stack_space_operand != NULL) {
ldr(r4, *stack_space_operand);
} else {
mov(r4, Operand(stack_space));
}
LeaveExitFrame(false, r4, !restore_context, stack_space_operand != NULL);
mov(pc, lr);
bind(&promote_scheduled_exception);
{
FrameScope frame(this, StackFrame::INTERNAL);
CallExternalReference(
ExternalReference(Runtime::kPromoteScheduledException, isolate()),
0);
}
jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
bind(&delete_allocated_handles);
str(r5, MemOperand(r9, kLimitOffset));
mov(r4, r0);
PrepareCallCFunction(1, r5);
mov(r0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(
ExternalReference::delete_handle_scope_extensions(isolate()), 1);
mov(r0, r4);
jmp(&leave_exit_frame);
}
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame_ || !stub->SometimesSetsUpAFrame(); return has_frame_ || !stub->SometimesSetsUpAFrame();
} }

View File

@ -1114,16 +1114,6 @@ class MacroAssembler: public Assembler {
void MovFromFloatParameter(DwVfpRegister dst); void MovFromFloatParameter(DwVfpRegister dst);
void MovFromFloatResult(DwVfpRegister dst); void MovFromFloatResult(DwVfpRegister dst);
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Restores context. stack_space
// - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
void CallApiFunctionAndReturn(Register function_address,
ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand,
MemOperand return_value_operand,
MemOperand* context_restore_operand);
// Jump to a runtime routine. // Jump to a runtime routine.
void JumpToExternalReference(const ExternalReference& builtin); void JumpToExternalReference(const ExternalReference& builtin);

View File

@ -5043,6 +5043,175 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
} }
// The number of register that CallApiFunctionAndReturn will need to save on
// the stack. The space for these registers need to be allocated in the
// ExitFrame before calling CallApiFunctionAndReturn.
static const int kCallApiFunctionSpillSpace = 4;
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions.
// 'stack_space' is the space to be unwound on exit (includes the call JS
// arguments space and the additional space allocated for the fast call).
// 'spill_offset' is the offset from the stack pointer where
// CallApiFunctionAndReturn can spill registers.
static void CallApiFunctionAndReturn(
MacroAssembler* masm, Register function_address,
ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand, int spill_offset,
MemOperand return_value_operand, MemOperand* context_restore_operand) {
ASM_LOCATION("CallApiFunctionAndReturn");
Isolate* isolate = masm->isolate();
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate);
const int kNextOffset = 0;
const int kLimitOffset = AddressOffset(
ExternalReference::handle_scope_limit_address(isolate), next_address);
const int kLevelOffset = AddressOffset(
ExternalReference::handle_scope_level_address(isolate), next_address);
DCHECK(function_address.is(x1) || function_address.is(x2));
Label profiler_disabled;
Label end_profiler_check;
__ Mov(x10, ExternalReference::is_profiling_address(isolate));
__ Ldrb(w10, MemOperand(x10));
__ Cbz(w10, &profiler_disabled);
__ Mov(x3, thunk_ref);
__ B(&end_profiler_check);
__ Bind(&profiler_disabled);
__ Mov(x3, function_address);
__ Bind(&end_profiler_check);
// Save the callee-save registers we are going to use.
// TODO(all): Is this necessary? ARM doesn't do it.
STATIC_ASSERT(kCallApiFunctionSpillSpace == 4);
__ Poke(x19, (spill_offset + 0) * kXRegSize);
__ Poke(x20, (spill_offset + 1) * kXRegSize);
__ Poke(x21, (spill_offset + 2) * kXRegSize);
__ Poke(x22, (spill_offset + 3) * kXRegSize);
// Allocate HandleScope in callee-save registers.
// We will need to restore the HandleScope after the call to the API function,
// by allocating it in callee-save registers they will be preserved by C code.
Register handle_scope_base = x22;
Register next_address_reg = x19;
Register limit_reg = x20;
Register level_reg = w21;
__ Mov(handle_scope_base, next_address);
__ Ldr(next_address_reg, MemOperand(handle_scope_base, kNextOffset));
__ Ldr(limit_reg, MemOperand(handle_scope_base, kLimitOffset));
__ Ldr(level_reg, MemOperand(handle_scope_base, kLevelOffset));
__ Add(level_reg, level_reg, 1);
__ Str(level_reg, MemOperand(handle_scope_base, kLevelOffset));
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ Mov(x0, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_enter_external_function(isolate),
1);
__ PopSafepointRegisters();
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub(isolate);
stub.GenerateCall(masm, x3);
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ Mov(x0, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_leave_external_function(isolate),
1);
__ PopSafepointRegisters();
}
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label return_value_loaded;
// Load value from ReturnValue.
__ Ldr(x0, return_value_operand);
__ Bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
__ Str(next_address_reg, MemOperand(handle_scope_base, kNextOffset));
if (__ emit_debug_code()) {
__ Ldr(w1, MemOperand(handle_scope_base, kLevelOffset));
__ Cmp(w1, level_reg);
__ Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
}
__ Sub(level_reg, level_reg, 1);
__ Str(level_reg, MemOperand(handle_scope_base, kLevelOffset));
__ Ldr(x1, MemOperand(handle_scope_base, kLimitOffset));
__ Cmp(limit_reg, x1);
__ B(ne, &delete_allocated_handles);
__ Bind(&leave_exit_frame);
// Restore callee-saved registers.
__ Peek(x19, (spill_offset + 0) * kXRegSize);
__ Peek(x20, (spill_offset + 1) * kXRegSize);
__ Peek(x21, (spill_offset + 2) * kXRegSize);
__ Peek(x22, (spill_offset + 3) * kXRegSize);
// Check if the function scheduled an exception.
__ Mov(x5, ExternalReference::scheduled_exception_address(isolate));
__ Ldr(x5, MemOperand(x5));
__ JumpIfNotRoot(x5, Heap::kTheHoleValueRootIndex,
&promote_scheduled_exception);
__ Bind(&exception_handled);
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
__ Ldr(cp, *context_restore_operand);
}
if (stack_space_operand != NULL) {
__ Ldr(w2, *stack_space_operand);
}
__ LeaveExitFrame(false, x1, !restore_context);
if (stack_space_operand != NULL) {
__ Drop(x2, 1);
} else {
__ Drop(stack_space);
}
__ Ret();
__ Bind(&promote_scheduled_exception);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ CallExternalReference(
ExternalReference(Runtime::kPromoteScheduledException, isolate), 0);
}
__ B(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
__ Bind(&delete_allocated_handles);
__ Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset));
// Save the return value in a callee-save register.
Register saved_result = x19;
__ Mov(saved_result, x0);
__ Mov(x0, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
1);
__ Mov(x0, saved_result);
__ B(&leave_exit_frame);
}
static void CallApiFunctionStubHelper(MacroAssembler* masm, static void CallApiFunctionStubHelper(MacroAssembler* masm,
const ParameterCount& argc, const ParameterCount& argc,
bool return_first_arg, bool return_first_arg,
@ -5160,9 +5329,9 @@ static void CallApiFunctionStubHelper(MacroAssembler* masm,
} }
const int spill_offset = 1 + kApiStackSpace; const int spill_offset = 1 + kApiStackSpace;
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, stack_space, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
stack_space_operand, spill_offset, stack_space_operand, spill_offset,
return_value_operand, &context_restore_operand); return_value_operand, &context_restore_operand);
} }
@ -5216,9 +5385,9 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
ExternalReference::invoke_accessor_getter_callback(isolate()); ExternalReference::invoke_accessor_getter_callback(isolate());
const int spill_offset = 1 + kApiStackSpace; const int spill_offset = 1 + kApiStackSpace;
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
kStackUnwindSpace, NULL, spill_offset, kStackUnwindSpace, NULL, spill_offset,
MemOperand(fp, 6 * kPointerSize), NULL); MemOperand(fp, 6 * kPointerSize), NULL);
} }

View File

@ -1748,161 +1748,6 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
} }
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
void MacroAssembler::CallApiFunctionAndReturn(
Register function_address, ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand, int spill_offset,
MemOperand return_value_operand, MemOperand* context_restore_operand) {
ASM_LOCATION("CallApiFunctionAndReturn");
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
const int kNextOffset = 0;
const int kLimitOffset = AddressOffset(
ExternalReference::handle_scope_limit_address(isolate()),
next_address);
const int kLevelOffset = AddressOffset(
ExternalReference::handle_scope_level_address(isolate()),
next_address);
DCHECK(function_address.is(x1) || function_address.is(x2));
Label profiler_disabled;
Label end_profiler_check;
Mov(x10, ExternalReference::is_profiling_address(isolate()));
Ldrb(w10, MemOperand(x10));
Cbz(w10, &profiler_disabled);
Mov(x3, thunk_ref);
B(&end_profiler_check);
Bind(&profiler_disabled);
Mov(x3, function_address);
Bind(&end_profiler_check);
// Save the callee-save registers we are going to use.
// TODO(all): Is this necessary? ARM doesn't do it.
STATIC_ASSERT(kCallApiFunctionSpillSpace == 4);
Poke(x19, (spill_offset + 0) * kXRegSize);
Poke(x20, (spill_offset + 1) * kXRegSize);
Poke(x21, (spill_offset + 2) * kXRegSize);
Poke(x22, (spill_offset + 3) * kXRegSize);
// Allocate HandleScope in callee-save registers.
// We will need to restore the HandleScope after the call to the API function,
// by allocating it in callee-save registers they will be preserved by C code.
Register handle_scope_base = x22;
Register next_address_reg = x19;
Register limit_reg = x20;
Register level_reg = w21;
Mov(handle_scope_base, next_address);
Ldr(next_address_reg, MemOperand(handle_scope_base, kNextOffset));
Ldr(limit_reg, MemOperand(handle_scope_base, kLimitOffset));
Ldr(level_reg, MemOperand(handle_scope_base, kLevelOffset));
Add(level_reg, level_reg, 1);
Str(level_reg, MemOperand(handle_scope_base, kLevelOffset));
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
Mov(x0, ExternalReference::isolate_address(isolate()));
CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub(isolate());
stub.GenerateCall(this, x3);
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
Mov(x0, ExternalReference::isolate_address(isolate()));
CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label return_value_loaded;
// Load value from ReturnValue.
Ldr(x0, return_value_operand);
Bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
Str(next_address_reg, MemOperand(handle_scope_base, kNextOffset));
if (emit_debug_code()) {
Ldr(w1, MemOperand(handle_scope_base, kLevelOffset));
Cmp(w1, level_reg);
Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
}
Sub(level_reg, level_reg, 1);
Str(level_reg, MemOperand(handle_scope_base, kLevelOffset));
Ldr(x1, MemOperand(handle_scope_base, kLimitOffset));
Cmp(limit_reg, x1);
B(ne, &delete_allocated_handles);
Bind(&leave_exit_frame);
// Restore callee-saved registers.
Peek(x19, (spill_offset + 0) * kXRegSize);
Peek(x20, (spill_offset + 1) * kXRegSize);
Peek(x21, (spill_offset + 2) * kXRegSize);
Peek(x22, (spill_offset + 3) * kXRegSize);
// Check if the function scheduled an exception.
Mov(x5, ExternalReference::scheduled_exception_address(isolate()));
Ldr(x5, MemOperand(x5));
JumpIfNotRoot(x5, Heap::kTheHoleValueRootIndex, &promote_scheduled_exception);
Bind(&exception_handled);
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
Ldr(cp, *context_restore_operand);
}
if (stack_space_operand != NULL) {
Ldr(w2, *stack_space_operand);
}
LeaveExitFrame(false, x1, !restore_context);
if (stack_space_operand != NULL) {
Drop(x2, 1);
} else {
Drop(stack_space);
}
Ret();
Bind(&promote_scheduled_exception);
{
FrameScope frame(this, StackFrame::INTERNAL);
CallExternalReference(
ExternalReference(
Runtime::kPromoteScheduledException, isolate()), 0);
}
B(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
Bind(&delete_allocated_handles);
Str(limit_reg, MemOperand(handle_scope_base, kLimitOffset));
// Save the return value in a callee-save register.
Register saved_result = x19;
Mov(saved_result, x0);
Mov(x0, ExternalReference::isolate_address(isolate()));
CallCFunction(
ExternalReference::delete_handle_scope_extensions(isolate()), 1);
Mov(x0, saved_result);
B(&leave_exit_frame);
}
void MacroAssembler::CallExternalReference(const ExternalReference& ext, void MacroAssembler::CallExternalReference(const ExternalReference& ext,
int num_arguments) { int num_arguments) {
Mov(x0, num_arguments); Mov(x0, num_arguments);

View File

@ -1130,24 +1130,6 @@ class MacroAssembler : public Assembler {
int num_reg_arguments, int num_reg_arguments,
int num_double_arguments); int num_double_arguments);
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions.
// 'stack_space' is the space to be unwound on exit (includes the call JS
// arguments space and the additional space allocated for the fast call).
// 'spill_offset' is the offset from the stack pointer where
// CallApiFunctionAndReturn can spill registers.
void CallApiFunctionAndReturn(Register function_address,
ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand,
int spill_offset,
MemOperand return_value_operand,
MemOperand* context_restore_operand);
// The number of register that CallApiFunctionAndReturn will need to save on
// the stack. The space for these registers need to be allocated in the
// ExitFrame before calling CallApiFunctionAndReturn.
static const int kCallApiFunctionSpillSpace = 4;
// Jump to a runtime routine. // Jump to a runtime routine.
void JumpToExternalReference(const ExternalReference& builtin); void JumpToExternalReference(const ExternalReference& builtin);
// Tail call of a runtime routine (jump). // Tail call of a runtime routine (jump).

View File

@ -4681,6 +4681,193 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
} }
// Generates an Operand for saving parameters after PrepareCallApiFunction.
static Operand ApiParameterOperand(int index) {
return Operand(esp, index * kPointerSize);
}
// Prepares stack to put arguments (aligns and so on). Reserves
// space for return value if needed (assumes the return value is a handle).
// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
// etc. Saves context (esi). If space was reserved for return value then
// stores the pointer to the reserved slot into esi.
static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
__ EnterApiExitFrame(argc);
if (__ emit_debug_code()) {
__ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
}
}
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Clobbers ebx, edi and
// caller-save registers. Restores context. On return removes
// stack_space * kPointerSize (GCed).
static void CallApiFunctionAndReturn(MacroAssembler* masm,
Register function_address,
ExternalReference thunk_ref,
Operand thunk_last_arg, int stack_space,
Operand* stack_space_operand,
Operand return_value_operand,
Operand* context_restore_operand) {
Isolate* isolate = masm->isolate();
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate);
ExternalReference limit_address =
ExternalReference::handle_scope_limit_address(isolate);
ExternalReference level_address =
ExternalReference::handle_scope_level_address(isolate);
DCHECK(edx.is(function_address));
// Allocate HandleScope in callee-save registers.
__ mov(ebx, Operand::StaticVariable(next_address));
__ mov(edi, Operand::StaticVariable(limit_address));
__ add(Operand::StaticVariable(level_address), Immediate(1));
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1, eax);
__ mov(Operand(esp, 0),
Immediate(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::log_enter_external_function(isolate),
1);
__ PopSafepointRegisters();
}
Label profiler_disabled;
Label end_profiler_check;
__ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
__ cmpb(Operand(eax, 0), 0);
__ j(zero, &profiler_disabled);
// Additional parameter is the address of the actual getter function.
__ mov(thunk_last_arg, function_address);
// Call the api function.
__ mov(eax, Immediate(thunk_ref));
__ call(eax);
__ jmp(&end_profiler_check);
__ bind(&profiler_disabled);
// Call the api function.
__ call(function_address);
__ bind(&end_profiler_check);
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1, eax);
__ mov(Operand(esp, 0),
Immediate(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::log_leave_external_function(isolate),
1);
__ PopSafepointRegisters();
}
Label prologue;
// Load the value from ReturnValue
__ mov(eax, return_value_operand);
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
__ bind(&prologue);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
__ mov(Operand::StaticVariable(next_address), ebx);
__ sub(Operand::StaticVariable(level_address), Immediate(1));
__ Assert(above_equal, kInvalidHandleScopeLevel);
__ cmp(edi, Operand::StaticVariable(limit_address));
__ j(not_equal, &delete_allocated_handles);
__ bind(&leave_exit_frame);
// Check if the function scheduled an exception.
ExternalReference scheduled_exception_address =
ExternalReference::scheduled_exception_address(isolate);
__ cmp(Operand::StaticVariable(scheduled_exception_address),
Immediate(isolate->factory()->the_hole_value()));
__ j(not_equal, &promote_scheduled_exception);
__ bind(&exception_handled);
#if DEBUG
// Check if the function returned a valid JavaScript value.
Label ok;
Register return_value = eax;
Register map = ecx;
__ JumpIfSmi(return_value, &ok, Label::kNear);
__ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
__ CmpInstanceType(map, LAST_NAME_TYPE);
__ j(below_equal, &ok, Label::kNear);
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &ok, Label::kNear);
__ cmp(map, isolate->factory()->heap_number_map());
__ j(equal, &ok, Label::kNear);
__ cmp(return_value, isolate->factory()->undefined_value());
__ j(equal, &ok, Label::kNear);
__ cmp(return_value, isolate->factory()->true_value());
__ j(equal, &ok, Label::kNear);
__ cmp(return_value, isolate->factory()->false_value());
__ j(equal, &ok, Label::kNear);
__ cmp(return_value, isolate->factory()->null_value());
__ j(equal, &ok, Label::kNear);
__ Abort(kAPICallReturnedInvalidObject);
__ bind(&ok);
#endif
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
__ mov(esi, *context_restore_operand);
}
if (stack_space_operand != nullptr) {
__ mov(ebx, *stack_space_operand);
}
__ LeaveApiExitFrame(!restore_context);
if (stack_space_operand != nullptr) {
DCHECK_EQ(0, stack_space);
__ pop(ecx);
__ add(esp, ebx);
__ jmp(ecx);
} else {
__ ret(stack_space * kPointerSize);
}
__ bind(&promote_scheduled_exception);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kPromoteScheduledException, 0);
}
__ jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
ExternalReference delete_extensions =
ExternalReference::delete_handle_scope_extensions(isolate);
__ bind(&delete_allocated_handles);
__ mov(Operand::StaticVariable(limit_address), edi);
__ mov(edi, eax);
__ mov(Operand(esp, 0),
Immediate(ExternalReference::isolate_address(isolate)));
__ mov(eax, Immediate(delete_extensions));
__ call(eax);
__ mov(eax, edi);
__ jmp(&leave_exit_frame);
}
static void CallApiFunctionStubHelper(MacroAssembler* masm, static void CallApiFunctionStubHelper(MacroAssembler* masm,
const ParameterCount& argc, const ParameterCount& argc,
bool return_first_arg, bool return_first_arg,
@ -4763,7 +4950,7 @@ static void CallApiFunctionStubHelper(MacroAssembler* masm,
// it's not controlled by GC. // it's not controlled by GC.
const int kApiStackSpace = 4; const int kApiStackSpace = 4;
__ PrepareCallApiFunction(kApiArgc + kApiStackSpace); PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
// FunctionCallbackInfo::implicit_args_. // FunctionCallbackInfo::implicit_args_.
__ mov(ApiParameterOperand(2), scratch); __ mov(ApiParameterOperand(2), scratch);
@ -4813,9 +5000,10 @@ static void CallApiFunctionStubHelper(MacroAssembler* masm,
stack_space = argc.immediate() + FCA::kArgsLength + 1; stack_space = argc.immediate() + FCA::kArgsLength + 1;
stack_space_operand = nullptr; stack_space_operand = nullptr;
} }
__ CallApiFunctionAndReturn( CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
api_function_address, thunk_ref, ApiParameterOperand(1), stack_space, ApiParameterOperand(1), stack_space,
stack_space_operand, return_value_operand, &context_restore_operand); stack_space_operand, return_value_operand,
&context_restore_operand);
} }
@ -4859,7 +5047,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
// load address of name // load address of name
__ lea(scratch, Operand(esp, 1 * kPointerSize)); __ lea(scratch, Operand(esp, 1 * kPointerSize));
__ PrepareCallApiFunction(kApiArgc); PrepareCallApiFunction(masm, kApiArgc);
__ mov(ApiParameterOperand(0), scratch); // name. __ mov(ApiParameterOperand(0), scratch); // name.
__ add(scratch, Immediate(kPointerSize)); __ add(scratch, Immediate(kPointerSize));
__ mov(ApiParameterOperand(1), scratch); // arguments pointer. __ mov(ApiParameterOperand(1), scratch); // arguments pointer.
@ -4867,9 +5055,9 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
ExternalReference thunk_ref = ExternalReference thunk_ref =
ExternalReference::invoke_accessor_getter_callback(isolate()); ExternalReference::invoke_accessor_getter_callback(isolate());
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
ApiParameterOperand(2), kStackSpace, nullptr, ApiParameterOperand(2), kStackSpace, nullptr,
Operand(ebp, 7 * kPointerSize), NULL); Operand(ebp, 7 * kPointerSize), NULL);
} }

View File

@ -2084,176 +2084,6 @@ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
} }
Operand ApiParameterOperand(int index) {
return Operand(esp, index * kPointerSize);
}
void MacroAssembler::PrepareCallApiFunction(int argc) {
EnterApiExitFrame(argc);
if (emit_debug_code()) {
mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
}
}
void MacroAssembler::CallApiFunctionAndReturn(
Register function_address, ExternalReference thunk_ref,
Operand thunk_last_arg, int stack_space, Operand* stack_space_operand,
Operand return_value_operand, Operand* context_restore_operand) {
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
ExternalReference limit_address =
ExternalReference::handle_scope_limit_address(isolate());
ExternalReference level_address =
ExternalReference::handle_scope_level_address(isolate());
DCHECK(edx.is(function_address));
// Allocate HandleScope in callee-save registers.
mov(ebx, Operand::StaticVariable(next_address));
mov(edi, Operand::StaticVariable(limit_address));
add(Operand::StaticVariable(level_address), Immediate(1));
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1, eax);
mov(Operand(esp, 0),
Immediate(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
Label profiler_disabled;
Label end_profiler_check;
mov(eax, Immediate(ExternalReference::is_profiling_address(isolate())));
cmpb(Operand(eax, 0), 0);
j(zero, &profiler_disabled);
// Additional parameter is the address of the actual getter function.
mov(thunk_last_arg, function_address);
// Call the api function.
mov(eax, Immediate(thunk_ref));
call(eax);
jmp(&end_profiler_check);
bind(&profiler_disabled);
// Call the api function.
call(function_address);
bind(&end_profiler_check);
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1, eax);
mov(Operand(esp, 0),
Immediate(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
Label prologue;
// Load the value from ReturnValue
mov(eax, return_value_operand);
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
bind(&prologue);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
mov(Operand::StaticVariable(next_address), ebx);
sub(Operand::StaticVariable(level_address), Immediate(1));
Assert(above_equal, kInvalidHandleScopeLevel);
cmp(edi, Operand::StaticVariable(limit_address));
j(not_equal, &delete_allocated_handles);
bind(&leave_exit_frame);
// Check if the function scheduled an exception.
ExternalReference scheduled_exception_address =
ExternalReference::scheduled_exception_address(isolate());
cmp(Operand::StaticVariable(scheduled_exception_address),
Immediate(isolate()->factory()->the_hole_value()));
j(not_equal, &promote_scheduled_exception);
bind(&exception_handled);
#if DEBUG
// Check if the function returned a valid JavaScript value.
Label ok;
Register return_value = eax;
Register map = ecx;
JumpIfSmi(return_value, &ok, Label::kNear);
mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
CmpInstanceType(map, LAST_NAME_TYPE);
j(below_equal, &ok, Label::kNear);
CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
j(above_equal, &ok, Label::kNear);
cmp(map, isolate()->factory()->heap_number_map());
j(equal, &ok, Label::kNear);
cmp(return_value, isolate()->factory()->undefined_value());
j(equal, &ok, Label::kNear);
cmp(return_value, isolate()->factory()->true_value());
j(equal, &ok, Label::kNear);
cmp(return_value, isolate()->factory()->false_value());
j(equal, &ok, Label::kNear);
cmp(return_value, isolate()->factory()->null_value());
j(equal, &ok, Label::kNear);
Abort(kAPICallReturnedInvalidObject);
bind(&ok);
#endif
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
mov(esi, *context_restore_operand);
}
if (stack_space_operand != nullptr) {
mov(ebx, *stack_space_operand);
}
LeaveApiExitFrame(!restore_context);
if (stack_space_operand != nullptr) {
DCHECK_EQ(0, stack_space);
pop(ecx);
add(esp, ebx);
jmp(ecx);
} else {
ret(stack_space * kPointerSize);
}
bind(&promote_scheduled_exception);
{
FrameScope frame(this, StackFrame::INTERNAL);
CallRuntime(Runtime::kPromoteScheduledException, 0);
}
jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
ExternalReference delete_extensions =
ExternalReference::delete_handle_scope_extensions(isolate());
bind(&delete_allocated_handles);
mov(Operand::StaticVariable(limit_address), edi);
mov(edi, eax);
mov(Operand(esp, 0),
Immediate(ExternalReference::isolate_address(isolate())));
mov(eax, Immediate(delete_extensions));
call(eax);
mov(eax, edi);
jmp(&leave_exit_frame);
}
void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) { void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
// Set the entry point and jump to the C entry runtime stub. // Set the entry point and jump to the C entry runtime stub.
mov(ebx, Immediate(ext)); mov(ebx, Immediate(ext));

View File

@ -791,24 +791,6 @@ class MacroAssembler: public Assembler {
void CallCFunction(ExternalReference function, int num_arguments); void CallCFunction(ExternalReference function, int num_arguments);
void CallCFunction(Register function, int num_arguments); void CallCFunction(Register function, int num_arguments);
// Prepares stack to put arguments (aligns and so on). Reserves
// space for return value if needed (assumes the return value is a handle).
// Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
// etc. Saves context (esi). If space was reserved for return value then
// stores the pointer to the reserved slot into esi.
void PrepareCallApiFunction(int argc);
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Clobbers ebx, edi and
// caller-save registers. Restores context. On return removes
// stack_space * kPointerSize (GCed).
void CallApiFunctionAndReturn(Register function_address,
ExternalReference thunk_ref,
Operand thunk_last_arg, int stack_space,
Operand* stack_space_operand,
Operand return_value_operand,
Operand* context_restore_operand);
// Jump to a runtime routine. // Jump to a runtime routine.
void JumpToExternalReference(const ExternalReference& ext); void JumpToExternalReference(const ExternalReference& ext);
@ -1089,10 +1071,6 @@ inline Operand GlobalObjectOperand() {
} }
// Generates an Operand for saving parameters after PrepareCallApiFunction.
Operand ApiParameterOperand(int index);
#ifdef GENERATED_CODE_COVERAGE #ifdef GENERATED_CODE_COVERAGE
extern void LogGeneratedCodeCoverage(const char* file_line); extern void LogGeneratedCodeCoverage(const char* file_line);
#define CODE_COVERAGE_STRINGIFY(x) #x #define CODE_COVERAGE_STRINGIFY(x) #x

View File

@ -4833,6 +4833,145 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
} }
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Restores context. stack_space
// - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
static void CallApiFunctionAndReturn(MacroAssembler* masm,
Register function_address,
ExternalReference thunk_ref,
int stack_space,
MemOperand* stack_space_operand,
MemOperand return_value_operand,
MemOperand* context_restore_operand) {
Isolate* isolate = masm->isolate();
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate);
const int kNextOffset = 0;
const int kLimitOffset = AddressOffset(
ExternalReference::handle_scope_limit_address(isolate), next_address);
const int kLevelOffset = AddressOffset(
ExternalReference::handle_scope_level_address(isolate), next_address);
DCHECK(function_address.is(a1) || function_address.is(a2));
Label profiler_disabled;
Label end_profiler_check;
__ li(t9, Operand(ExternalReference::is_profiling_address(isolate)));
__ lb(t9, MemOperand(t9, 0));
__ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
// Additional parameter is the address of the actual callback.
__ li(t9, Operand(thunk_ref));
__ jmp(&end_profiler_check);
__ bind(&profiler_disabled);
__ mov(t9, function_address);
__ bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
__ li(s3, Operand(next_address));
__ lw(s0, MemOperand(s3, kNextOffset));
__ lw(s1, MemOperand(s3, kLimitOffset));
__ lw(s2, MemOperand(s3, kLevelOffset));
__ Addu(s2, s2, Operand(1));
__ sw(s2, MemOperand(s3, kLevelOffset));
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1, a0);
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::log_enter_external_function(isolate),
1);
__ PopSafepointRegisters();
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub(isolate);
stub.GenerateCall(masm, t9);
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1, a0);
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::log_leave_external_function(isolate),
1);
__ PopSafepointRegisters();
}
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label return_value_loaded;
// Load value from ReturnValue.
__ lw(v0, return_value_operand);
__ bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
__ sw(s0, MemOperand(s3, kNextOffset));
if (__ emit_debug_code()) {
__ lw(a1, MemOperand(s3, kLevelOffset));
__ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
}
__ Subu(s2, s2, Operand(1));
__ sw(s2, MemOperand(s3, kLevelOffset));
__ lw(at, MemOperand(s3, kLimitOffset));
__ Branch(&delete_allocated_handles, ne, s1, Operand(at));
// Check if the function scheduled an exception.
__ bind(&leave_exit_frame);
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
__ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
__ lw(t1, MemOperand(at));
__ Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
__ bind(&exception_handled);
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
__ lw(cp, *context_restore_operand);
}
if (stack_space_operand != NULL) {
__ lw(s0, *stack_space_operand);
} else {
__ li(s0, Operand(stack_space));
}
__ LeaveExitFrame(false, s0, !restore_context, EMIT_RETURN,
stack_space_operand != NULL);
__ bind(&promote_scheduled_exception);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ CallExternalReference(
ExternalReference(Runtime::kPromoteScheduledException, isolate), 0);
}
__ jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
__ bind(&delete_allocated_handles);
__ sw(s1, MemOperand(s3, kLimitOffset));
__ mov(s0, v0);
__ mov(a0, v0);
__ PrepareCallCFunction(1, s1);
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
1);
__ mov(v0, s0);
__ jmp(&leave_exit_frame);
}
static void CallApiFunctionStubHelper(MacroAssembler* masm, static void CallApiFunctionStubHelper(MacroAssembler* masm,
const ParameterCount& argc, const ParameterCount& argc,
bool return_first_arg, bool return_first_arg,
@ -4947,9 +5086,9 @@ static void CallApiFunctionStubHelper(MacroAssembler* masm,
stack_space = argc.immediate() + FCA::kArgsLength + 1; stack_space = argc.immediate() + FCA::kArgsLength + 1;
stack_space_operand = NULL; stack_space_operand = NULL;
} }
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, stack_space, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
stack_space_operand, return_value_operand, stack_space_operand, return_value_operand,
&context_restore_operand); &context_restore_operand);
} }
@ -4996,9 +5135,9 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
ExternalReference thunk_ref = ExternalReference thunk_ref =
ExternalReference::invoke_accessor_getter_callback(isolate()); ExternalReference::invoke_accessor_getter_callback(isolate());
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
kStackUnwindSpace, NULL, kStackUnwindSpace, NULL,
MemOperand(fp, 6 * kPointerSize), NULL); MemOperand(fp, 6 * kPointerSize), NULL);
} }

View File

@ -4413,138 +4413,6 @@ void MacroAssembler::TailCallStub(CodeStub* stub,
} }
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}
void MacroAssembler::CallApiFunctionAndReturn(
Register function_address, ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand, MemOperand return_value_operand,
MemOperand* context_restore_operand) {
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
const int kNextOffset = 0;
const int kLimitOffset = AddressOffset(
ExternalReference::handle_scope_limit_address(isolate()),
next_address);
const int kLevelOffset = AddressOffset(
ExternalReference::handle_scope_level_address(isolate()),
next_address);
DCHECK(function_address.is(a1) || function_address.is(a2));
Label profiler_disabled;
Label end_profiler_check;
li(t9, Operand(ExternalReference::is_profiling_address(isolate())));
lb(t9, MemOperand(t9, 0));
Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
// Additional parameter is the address of the actual callback.
li(t9, Operand(thunk_ref));
jmp(&end_profiler_check);
bind(&profiler_disabled);
mov(t9, function_address);
bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
li(s3, Operand(next_address));
lw(s0, MemOperand(s3, kNextOffset));
lw(s1, MemOperand(s3, kLimitOffset));
lw(s2, MemOperand(s3, kLevelOffset));
Addu(s2, s2, Operand(1));
sw(s2, MemOperand(s3, kLevelOffset));
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1, a0);
li(a0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub(isolate());
stub.GenerateCall(this, t9);
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1, a0);
li(a0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label return_value_loaded;
// Load value from ReturnValue.
lw(v0, return_value_operand);
bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
sw(s0, MemOperand(s3, kNextOffset));
if (emit_debug_code()) {
lw(a1, MemOperand(s3, kLevelOffset));
Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
}
Subu(s2, s2, Operand(1));
sw(s2, MemOperand(s3, kLevelOffset));
lw(at, MemOperand(s3, kLimitOffset));
Branch(&delete_allocated_handles, ne, s1, Operand(at));
// Check if the function scheduled an exception.
bind(&leave_exit_frame);
LoadRoot(t0, Heap::kTheHoleValueRootIndex);
li(at, Operand(ExternalReference::scheduled_exception_address(isolate())));
lw(t1, MemOperand(at));
Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
bind(&exception_handled);
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
lw(cp, *context_restore_operand);
}
if (stack_space_operand != NULL) {
lw(s0, *stack_space_operand);
} else {
li(s0, Operand(stack_space));
}
LeaveExitFrame(false, s0, !restore_context, EMIT_RETURN,
stack_space_operand != NULL);
bind(&promote_scheduled_exception);
{
FrameScope frame(this, StackFrame::INTERNAL);
CallExternalReference(
ExternalReference(Runtime::kPromoteScheduledException, isolate()),
0);
}
jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
bind(&delete_allocated_handles);
sw(s1, MemOperand(s3, kLimitOffset));
mov(s0, v0);
mov(a0, v0);
PrepareCallCFunction(1, s1);
li(a0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate()),
1);
mov(v0, s0);
jmp(&leave_exit_frame);
}
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame_ || !stub->SometimesSetsUpAFrame(); return has_frame_ || !stub->SometimesSetsUpAFrame();
} }

View File

@ -1285,16 +1285,6 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
void MovToFloatParameters(DoubleRegister src1, DoubleRegister src2); void MovToFloatParameters(DoubleRegister src1, DoubleRegister src2);
void MovToFloatResult(DoubleRegister src); void MovToFloatResult(DoubleRegister src);
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Restores context. stack_space
// - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
void CallApiFunctionAndReturn(Register function_address,
ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand,
MemOperand return_value_operand,
MemOperand* context_restore_operand);
// Jump to the builtin routine. // Jump to the builtin routine.
void JumpToExternalReference(const ExternalReference& builtin, void JumpToExternalReference(const ExternalReference& builtin,
BranchDelaySlot bd = PROTECT); BranchDelaySlot bd = PROTECT);

View File

@ -4873,6 +4873,146 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
} }
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
DCHECK(static_cast<int>(offset) == offset);
return static_cast<int>(offset);
}
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Restores context. stack_space
// - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
static void CallApiFunctionAndReturn(MacroAssembler* masm,
Register function_address,
ExternalReference thunk_ref,
int stack_space,
MemOperand* stack_space_operand,
MemOperand return_value_operand,
MemOperand* context_restore_operand) {
Isolate* isolate = masm->isolate();
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate);
const int kNextOffset = 0;
const int kLimitOffset = AddressOffset(
ExternalReference::handle_scope_limit_address(isolate), next_address);
const int kLevelOffset = AddressOffset(
ExternalReference::handle_scope_level_address(isolate), next_address);
DCHECK(function_address.is(a1) || function_address.is(a2));
Label profiler_disabled;
Label end_profiler_check;
__ li(t9, Operand(ExternalReference::is_profiling_address(isolate)));
__ lb(t9, MemOperand(t9, 0));
__ Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
// Additional parameter is the address of the actual callback.
__ li(t9, Operand(thunk_ref));
__ jmp(&end_profiler_check);
__ bind(&profiler_disabled);
__ mov(t9, function_address);
__ bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
__ li(s3, Operand(next_address));
__ ld(s0, MemOperand(s3, kNextOffset));
__ ld(s1, MemOperand(s3, kLimitOffset));
__ ld(s2, MemOperand(s3, kLevelOffset));
__ Daddu(s2, s2, Operand(1));
__ sd(s2, MemOperand(s3, kLevelOffset));
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1, a0);
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::log_enter_external_function(isolate),
1);
__ PopSafepointRegisters();
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub(isolate);
stub.GenerateCall(masm, t9);
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1, a0);
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::log_leave_external_function(isolate),
1);
__ PopSafepointRegisters();
}
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label return_value_loaded;
// Load value from ReturnValue.
__ ld(v0, return_value_operand);
__ bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
__ sd(s0, MemOperand(s3, kNextOffset));
if (__ emit_debug_code()) {
__ ld(a1, MemOperand(s3, kLevelOffset));
__ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
}
__ Dsubu(s2, s2, Operand(1));
__ sd(s2, MemOperand(s3, kLevelOffset));
__ ld(at, MemOperand(s3, kLimitOffset));
__ Branch(&delete_allocated_handles, ne, s1, Operand(at));
// Check if the function scheduled an exception.
__ bind(&leave_exit_frame);
__ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
__ li(at, Operand(ExternalReference::scheduled_exception_address(isolate)));
__ ld(a5, MemOperand(at));
__ Branch(&promote_scheduled_exception, ne, a4, Operand(a5));
__ bind(&exception_handled);
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
__ ld(cp, *context_restore_operand);
}
if (stack_space_operand != NULL) {
__ lw(s0, *stack_space_operand);
} else {
__ li(s0, Operand(stack_space));
}
__ LeaveExitFrame(false, s0, !restore_context, EMIT_RETURN,
stack_space_operand != NULL);
__ bind(&promote_scheduled_exception);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ CallExternalReference(
ExternalReference(Runtime::kPromoteScheduledException, isolate), 0);
}
__ jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
__ bind(&delete_allocated_handles);
__ sd(s1, MemOperand(s3, kLimitOffset));
__ mov(s0, v0);
__ mov(a0, v0);
__ PrepareCallCFunction(1, s1);
__ li(a0, Operand(ExternalReference::isolate_address(isolate)));
__ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate),
1);
__ mov(v0, s0);
__ jmp(&leave_exit_frame);
}
static void CallApiFunctionStubHelper(MacroAssembler* masm, static void CallApiFunctionStubHelper(MacroAssembler* masm,
const ParameterCount& argc, const ParameterCount& argc,
bool return_first_arg, bool return_first_arg,
@ -4986,9 +5126,9 @@ static void CallApiFunctionStubHelper(MacroAssembler* masm,
stack_space = argc.immediate() + FCA::kArgsLength + 1; stack_space = argc.immediate() + FCA::kArgsLength + 1;
stack_space_operand = NULL; stack_space_operand = NULL;
} }
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, stack_space, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
stack_space_operand, return_value_operand, stack_space_operand, return_value_operand,
&context_restore_operand); &context_restore_operand);
} }
@ -5035,9 +5175,9 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
ExternalReference thunk_ref = ExternalReference thunk_ref =
ExternalReference::invoke_accessor_getter_callback(isolate()); ExternalReference::invoke_accessor_getter_callback(isolate());
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
kStackUnwindSpace, NULL, kStackUnwindSpace, NULL,
MemOperand(fp, 6 * kPointerSize), NULL); MemOperand(fp, 6 * kPointerSize), NULL);
} }

View File

@ -4359,139 +4359,6 @@ void MacroAssembler::TailCallStub(CodeStub* stub,
} }
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
DCHECK(static_cast<int>(offset) == offset);
return static_cast<int>(offset);
}
void MacroAssembler::CallApiFunctionAndReturn(
Register function_address, ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand, MemOperand return_value_operand,
MemOperand* context_restore_operand) {
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
const int kNextOffset = 0;
const int kLimitOffset = AddressOffset(
ExternalReference::handle_scope_limit_address(isolate()),
next_address);
const int kLevelOffset = AddressOffset(
ExternalReference::handle_scope_level_address(isolate()),
next_address);
DCHECK(function_address.is(a1) || function_address.is(a2));
Label profiler_disabled;
Label end_profiler_check;
li(t9, Operand(ExternalReference::is_profiling_address(isolate())));
lb(t9, MemOperand(t9, 0));
Branch(&profiler_disabled, eq, t9, Operand(zero_reg));
// Additional parameter is the address of the actual callback.
li(t9, Operand(thunk_ref));
jmp(&end_profiler_check);
bind(&profiler_disabled);
mov(t9, function_address);
bind(&end_profiler_check);
// Allocate HandleScope in callee-save registers.
li(s3, Operand(next_address));
ld(s0, MemOperand(s3, kNextOffset));
ld(s1, MemOperand(s3, kLimitOffset));
ld(s2, MemOperand(s3, kLevelOffset));
Daddu(s2, s2, Operand(1));
sd(s2, MemOperand(s3, kLevelOffset));
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1, a0);
li(a0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub(isolate());
stub.GenerateCall(this, t9);
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1, a0);
li(a0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label return_value_loaded;
// Load value from ReturnValue.
ld(v0, return_value_operand);
bind(&return_value_loaded);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
sd(s0, MemOperand(s3, kNextOffset));
if (emit_debug_code()) {
ld(a1, MemOperand(s3, kLevelOffset));
Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
}
Dsubu(s2, s2, Operand(1));
sd(s2, MemOperand(s3, kLevelOffset));
ld(at, MemOperand(s3, kLimitOffset));
Branch(&delete_allocated_handles, ne, s1, Operand(at));
// Check if the function scheduled an exception.
bind(&leave_exit_frame);
LoadRoot(a4, Heap::kTheHoleValueRootIndex);
li(at, Operand(ExternalReference::scheduled_exception_address(isolate())));
ld(a5, MemOperand(at));
Branch(&promote_scheduled_exception, ne, a4, Operand(a5));
bind(&exception_handled);
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
ld(cp, *context_restore_operand);
}
if (stack_space_operand != NULL) {
lw(s0, *stack_space_operand);
} else {
li(s0, Operand(stack_space));
}
LeaveExitFrame(false, s0, !restore_context, EMIT_RETURN,
stack_space_operand != NULL);
bind(&promote_scheduled_exception);
{
FrameScope frame(this, StackFrame::INTERNAL);
CallExternalReference(
ExternalReference(Runtime::kPromoteScheduledException, isolate()),
0);
}
jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
bind(&delete_allocated_handles);
sd(s1, MemOperand(s3, kLimitOffset));
mov(s0, v0);
mov(a0, v0);
PrepareCallCFunction(1, s1);
li(a0, Operand(ExternalReference::isolate_address(isolate())));
CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate()),
1);
mov(v0, s0);
jmp(&leave_exit_frame);
}
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame_ || !stub->SometimesSetsUpAFrame(); return has_frame_ || !stub->SometimesSetsUpAFrame();
} }

View File

@ -1315,16 +1315,6 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
void MovToFloatParameters(DoubleRegister src1, DoubleRegister src2); void MovToFloatParameters(DoubleRegister src1, DoubleRegister src2);
void MovToFloatResult(DoubleRegister src); void MovToFloatResult(DoubleRegister src);
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Restores context. stack_space
// - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
void CallApiFunctionAndReturn(Register function_address,
ExternalReference thunk_ref, int stack_space,
MemOperand* stack_space_operand,
MemOperand return_value_operand,
MemOperand* context_restore_operand);
// Jump to the builtin routine. // Jump to the builtin routine.
void JumpToExternalReference(const ExternalReference& builtin, void JumpToExternalReference(const ExternalReference& builtin,
BranchDelaySlot bd = PROTECT); BranchDelaySlot bd = PROTECT);

View File

@ -4621,6 +4621,194 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
} }
static int Offset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
// Check that fits into int.
DCHECK(static_cast<int>(offset) == offset);
return static_cast<int>(offset);
}
// Prepares stack to put arguments (aligns and so on). WIN64 calling
// convention requires to put the pointer to the return value slot into
// rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
// context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
// inside the exit frame (not GCed) accessible via StackSpaceOperand.
static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
__ EnterApiExitFrame(arg_stack_space);
}
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Clobbers r14, r15, rbx and
// caller-save registers. Restores context. On return removes
// stack_space * kPointerSize (GCed).
static void CallApiFunctionAndReturn(MacroAssembler* masm,
Register function_address,
ExternalReference thunk_ref,
Register thunk_last_arg, int stack_space,
Operand* stack_space_operand,
Operand return_value_operand,
Operand* context_restore_operand) {
Label prologue;
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label write_back;
Isolate* isolate = masm->isolate();
Factory* factory = isolate->factory();
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate);
const int kNextOffset = 0;
const int kLimitOffset = Offset(
ExternalReference::handle_scope_limit_address(isolate), next_address);
const int kLevelOffset = Offset(
ExternalReference::handle_scope_level_address(isolate), next_address);
ExternalReference scheduled_exception_address =
ExternalReference::scheduled_exception_address(isolate);
DCHECK(rdx.is(function_address) || r8.is(function_address));
// Allocate HandleScope in callee-save registers.
Register prev_next_address_reg = r14;
Register prev_limit_reg = rbx;
Register base_reg = r15;
__ Move(base_reg, next_address);
__ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
__ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
__ addl(Operand(base_reg, kLevelOffset), Immediate(1));
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1);
__ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_enter_external_function(isolate),
1);
__ PopSafepointRegisters();
}
Label profiler_disabled;
Label end_profiler_check;
__ Move(rax, ExternalReference::is_profiling_address(isolate));
__ cmpb(Operand(rax, 0), Immediate(0));
__ j(zero, &profiler_disabled);
// Third parameter is the address of the actual getter function.
__ Move(thunk_last_arg, function_address);
__ Move(rax, thunk_ref);
__ jmp(&end_profiler_check);
__ bind(&profiler_disabled);
// Call the api function!
__ Move(rax, function_address);
__ bind(&end_profiler_check);
// Call the api function!
__ call(rax);
if (FLAG_log_timer_events) {
FrameScope frame(masm, StackFrame::MANUAL);
__ PushSafepointRegisters();
__ PrepareCallCFunction(1);
__ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
__ CallCFunction(ExternalReference::log_leave_external_function(isolate),
1);
__ PopSafepointRegisters();
}
// Load the value from ReturnValue
__ movp(rax, return_value_operand);
__ bind(&prologue);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
__ subl(Operand(base_reg, kLevelOffset), Immediate(1));
__ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
__ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
__ j(not_equal, &delete_allocated_handles);
__ bind(&leave_exit_frame);
// Check if the function scheduled an exception.
__ Move(rsi, scheduled_exception_address);
__ Cmp(Operand(rsi, 0), factory->the_hole_value());
__ j(not_equal, &promote_scheduled_exception);
__ bind(&exception_handled);
#if DEBUG
// Check if the function returned a valid JavaScript value.
Label ok;
Register return_value = rax;
Register map = rcx;
__ JumpIfSmi(return_value, &ok, Label::kNear);
__ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
__ CmpInstanceType(map, LAST_NAME_TYPE);
__ j(below_equal, &ok, Label::kNear);
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &ok, Label::kNear);
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(equal, &ok, Label::kNear);
__ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
__ j(equal, &ok, Label::kNear);
__ CompareRoot(return_value, Heap::kTrueValueRootIndex);
__ j(equal, &ok, Label::kNear);
__ CompareRoot(return_value, Heap::kFalseValueRootIndex);
__ j(equal, &ok, Label::kNear);
__ CompareRoot(return_value, Heap::kNullValueRootIndex);
__ j(equal, &ok, Label::kNear);
__ Abort(kAPICallReturnedInvalidObject);
__ bind(&ok);
#endif
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
__ movp(rsi, *context_restore_operand);
}
if (stack_space_operand != nullptr) {
__ movp(rbx, *stack_space_operand);
}
__ LeaveApiExitFrame(!restore_context);
if (stack_space_operand != nullptr) {
DCHECK_EQ(stack_space, 0);
__ PopReturnAddressTo(rcx);
__ addq(rsp, rbx);
__ jmp(rcx);
} else {
__ ret(stack_space * kPointerSize);
}
__ bind(&promote_scheduled_exception);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kPromoteScheduledException, 0);
}
__ jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
__ bind(&delete_allocated_handles);
__ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
__ movp(prev_limit_reg, rax);
__ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
__ LoadAddress(rax,
ExternalReference::delete_handle_scope_extensions(isolate));
__ call(rax);
__ movp(rax, prev_limit_reg);
__ jmp(&leave_exit_frame);
}
static void CallApiFunctionStubHelper(MacroAssembler* masm, static void CallApiFunctionStubHelper(MacroAssembler* masm,
const ParameterCount& argc, const ParameterCount& argc,
bool return_first_arg, bool return_first_arg,
@ -4698,7 +4886,7 @@ static void CallApiFunctionStubHelper(MacroAssembler* masm,
// it's not controlled by GC. // it's not controlled by GC.
const int kApiStackSpace = 4; const int kApiStackSpace = 4;
__ PrepareCallApiFunction(kApiStackSpace); PrepareCallApiFunction(masm, kApiStackSpace);
// FunctionCallbackInfo::implicit_args_. // FunctionCallbackInfo::implicit_args_.
__ movp(StackSpaceOperand(0), scratch); __ movp(StackSpaceOperand(0), scratch);
@ -4756,9 +4944,9 @@ static void CallApiFunctionStubHelper(MacroAssembler* masm,
stack_space = argc.immediate() + FCA::kArgsLength + 1; stack_space = argc.immediate() + FCA::kArgsLength + 1;
stack_space_operand = nullptr; stack_space_operand = nullptr;
} }
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, callback_arg, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
stack_space, stack_space_operand, stack_space, stack_space_operand,
return_value_operand, &context_restore_operand); return_value_operand, &context_restore_operand);
} }
@ -4809,7 +4997,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ leap(name_arg, Operand(rsp, kPCOnStackSize)); __ leap(name_arg, Operand(rsp, kPCOnStackSize));
__ PrepareCallApiFunction(kArgStackSpace); PrepareCallApiFunction(masm, kArgStackSpace);
__ leap(scratch, Operand(name_arg, 1 * kPointerSize)); __ leap(scratch, Operand(name_arg, 1 * kPointerSize));
// v8::PropertyAccessorInfo::args_. // v8::PropertyAccessorInfo::args_.
@ -4832,8 +5020,8 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
Operand return_value_operand = args.GetArgumentOperand( Operand return_value_operand = args.GetArgumentOperand(
PropertyCallbackArguments::kArgsLength - 1 - PropertyCallbackArguments::kArgsLength - 1 -
PropertyCallbackArguments::kReturnValueOffset); PropertyCallbackArguments::kReturnValueOffset);
__ CallApiFunctionAndReturn(api_function_address, thunk_ref, getter_arg, CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
kStackSpace, nullptr, return_value_operand, NULL); kStackSpace, nullptr, return_value_operand, NULL);
} }

View File

@ -692,182 +692,6 @@ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
} }
static int Offset(ExternalReference ref0, ExternalReference ref1) {
int64_t offset = (ref0.address() - ref1.address());
// Check that fits into int.
DCHECK(static_cast<int>(offset) == offset);
return static_cast<int>(offset);
}
void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
EnterApiExitFrame(arg_stack_space);
}
void MacroAssembler::CallApiFunctionAndReturn(
Register function_address, ExternalReference thunk_ref,
Register thunk_last_arg, int stack_space, Operand* stack_space_operand,
Operand return_value_operand, Operand* context_restore_operand) {
Label prologue;
Label promote_scheduled_exception;
Label exception_handled;
Label delete_allocated_handles;
Label leave_exit_frame;
Label write_back;
Factory* factory = isolate()->factory();
ExternalReference next_address =
ExternalReference::handle_scope_next_address(isolate());
const int kNextOffset = 0;
const int kLimitOffset = Offset(
ExternalReference::handle_scope_limit_address(isolate()),
next_address);
const int kLevelOffset = Offset(
ExternalReference::handle_scope_level_address(isolate()),
next_address);
ExternalReference scheduled_exception_address =
ExternalReference::scheduled_exception_address(isolate());
DCHECK(rdx.is(function_address) || r8.is(function_address));
// Allocate HandleScope in callee-save registers.
Register prev_next_address_reg = r14;
Register prev_limit_reg = rbx;
Register base_reg = r15;
Move(base_reg, next_address);
movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
addl(Operand(base_reg, kLevelOffset), Immediate(1));
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1);
LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
PopSafepointRegisters();
}
Label profiler_disabled;
Label end_profiler_check;
Move(rax, ExternalReference::is_profiling_address(isolate()));
cmpb(Operand(rax, 0), Immediate(0));
j(zero, &profiler_disabled);
// Third parameter is the address of the actual getter function.
Move(thunk_last_arg, function_address);
Move(rax, thunk_ref);
jmp(&end_profiler_check);
bind(&profiler_disabled);
// Call the api function!
Move(rax, function_address);
bind(&end_profiler_check);
// Call the api function!
call(rax);
if (FLAG_log_timer_events) {
FrameScope frame(this, StackFrame::MANUAL);
PushSafepointRegisters();
PrepareCallCFunction(1);
LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
PopSafepointRegisters();
}
// Load the value from ReturnValue
movp(rax, return_value_operand);
bind(&prologue);
// No more valid handles (the result handle was the last one). Restore
// previous handle scope.
subl(Operand(base_reg, kLevelOffset), Immediate(1));
movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
j(not_equal, &delete_allocated_handles);
bind(&leave_exit_frame);
// Check if the function scheduled an exception.
Move(rsi, scheduled_exception_address);
Cmp(Operand(rsi, 0), factory->the_hole_value());
j(not_equal, &promote_scheduled_exception);
bind(&exception_handled);
#if DEBUG
// Check if the function returned a valid JavaScript value.
Label ok;
Register return_value = rax;
Register map = rcx;
JumpIfSmi(return_value, &ok, Label::kNear);
movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
CmpInstanceType(map, LAST_NAME_TYPE);
j(below_equal, &ok, Label::kNear);
CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
j(above_equal, &ok, Label::kNear);
CompareRoot(map, Heap::kHeapNumberMapRootIndex);
j(equal, &ok, Label::kNear);
CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
j(equal, &ok, Label::kNear);
CompareRoot(return_value, Heap::kTrueValueRootIndex);
j(equal, &ok, Label::kNear);
CompareRoot(return_value, Heap::kFalseValueRootIndex);
j(equal, &ok, Label::kNear);
CompareRoot(return_value, Heap::kNullValueRootIndex);
j(equal, &ok, Label::kNear);
Abort(kAPICallReturnedInvalidObject);
bind(&ok);
#endif
bool restore_context = context_restore_operand != NULL;
if (restore_context) {
movp(rsi, *context_restore_operand);
}
if (stack_space_operand != nullptr) {
movp(rbx, *stack_space_operand);
}
LeaveApiExitFrame(!restore_context);
if (stack_space_operand != nullptr) {
DCHECK_EQ(stack_space, 0);
PopReturnAddressTo(rcx);
addq(rsp, rbx);
jmp(rcx);
} else {
ret(stack_space * kPointerSize);
}
bind(&promote_scheduled_exception);
{
FrameScope frame(this, StackFrame::INTERNAL);
CallRuntime(Runtime::kPromoteScheduledException, 0);
}
jmp(&exception_handled);
// HandleScope limit has changed. Delete allocated extensions.
bind(&delete_allocated_handles);
movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
movp(prev_limit_reg, rax);
LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate()));
LoadAddress(rax,
ExternalReference::delete_handle_scope_extensions(isolate()));
call(rax);
movp(rax, prev_limit_reg);
jmp(&leave_exit_frame);
}
void MacroAssembler::JumpToExternalReference(const ExternalReference& ext, void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
int result_size) { int result_size) {
// Set the entry point and jump to the C entry runtime stub. // Set the entry point and jump to the C entry runtime stub.

View File

@ -1326,24 +1326,6 @@ class MacroAssembler: public Assembler {
// Jump to a runtime routine. // Jump to a runtime routine.
void JumpToExternalReference(const ExternalReference& ext, int result_size); void JumpToExternalReference(const ExternalReference& ext, int result_size);
// Prepares stack to put arguments (aligns and so on). WIN64 calling
// convention requires to put the pointer to the return value slot into
// rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
// context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
// inside the exit frame (not GCed) accessible via StackSpaceOperand.
void PrepareCallApiFunction(int arg_stack_space);
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Clobbers r14, r15, rbx and
// caller-save registers. Restores context. On return removes
// stack_space * kPointerSize (GCed).
void CallApiFunctionAndReturn(Register function_address,
ExternalReference thunk_ref,
Register thunk_last_arg, int stack_space,
Operand* stack_space_operand,
Operand return_value_operand,
Operand* context_restore_operand);
// Before calling a C-function from generated code, align arguments on stack. // Before calling a C-function from generated code, align arguments on stack.
// After aligning the frame, arguments must be stored in rsp[0], rsp[8], // After aligning the frame, arguments must be stored in rsp[0], rsp[8],
// etc., not pushed. The argument count assumes all arguments are word sized. // etc., not pushed. The argument count assumes all arguments are word sized.