Adapt to new calling convention on ARM:

- Simplified frame entry and frame exit code.
- Added ArgumentsAdaptorTrampoline and check for matching argument counts in the InvokePrologue.
- Removed definition and uses of USE_OLD_CALLING_CONVENTIONS.
- Changed MacroAssembler::InvokeBuiltin to match ia32 version.
- Start introducing convenience instructions in the ARM assembler as needed. These instructions take all Register parameters to avoid extra typing of "Operand(reg)".


To keep the architectures in sync these changes have been made to the ia32 files:
- Changed MacroAssembler::EnterFrame(StackFrame::Type type) to MacroAssembler::EnterInternalFrame().


These parts are still missing:
- unimplemented: Builtins::Generate_FunctionApply - large limit
- unimplemented: Builtins::Generate_ArgumentsAdaptorTrampoline - non-function call
- The files have not been lint'd yet.


Review URL: http://codereview.chromium.org/1930

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@289 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
iposva@chromium.org 2008-09-12 03:29:06 +00:00
parent b6ad53054f
commit c5ee961882
23 changed files with 1022 additions and 700 deletions

View File

@ -459,6 +459,10 @@ class Assembler : public Malloced {
void sub(Register dst, Register src1, const Operand& src2,
SBit s = LeaveCC, Condition cond = al);
void sub(Register dst, Register src1, Register src2,
SBit s = LeaveCC, Condition cond = al) {
sub(dst, src1, Operand(src2), s, cond);
}
void rsb(Register dst, Register src1, const Operand& src2,
SBit s = LeaveCC, Condition cond = al);
@ -476,18 +480,31 @@ class Assembler : public Malloced {
SBit s = LeaveCC, Condition cond = al);
void tst(Register src1, const Operand& src2, Condition cond = al);
void tst(Register src1, Register src2, Condition cond = al) {
tst(src1, Operand(src2), cond);
}
void teq(Register src1, const Operand& src2, Condition cond = al);
void cmp(Register src1, const Operand& src2, Condition cond = al);
void cmp(Register src1, Register src2, Condition cond = al) {
cmp(src1, Operand(src2), cond);
}
void cmn(Register src1, const Operand& src2, Condition cond = al);
void orr(Register dst, Register src1, const Operand& src2,
SBit s = LeaveCC, Condition cond = al);
void orr(Register dst, Register src1, Register src2,
SBit s = LeaveCC, Condition cond = al) {
orr(dst, src1, Operand(src2), s, cond);
}
void mov(Register dst, const Operand& src,
SBit s = LeaveCC, Condition cond = al);
void mov(Register dst, Register src, SBit s = LeaveCC, Condition cond = al) {
mov(dst, Operand(src), s, cond);
}
void bic(Register dst, Register src1, const Operand& src2,
SBit s = LeaveCC, Condition cond = al);

View File

@ -961,7 +961,6 @@ bool Genesis::InstallNatives() {
InstallNativeFunctions();
#ifndef USE_OLD_CALLING_CONVENTIONS
// TODO(1240778): Get rid of the JS implementation of
// Function.prototype.call and simply create a function with the
// faked formal parameter count (-1) and use the illegal builtin as
@ -990,7 +989,6 @@ bool Genesis::InstallNatives() {
Handle<JSFunction>::cast(GetProperty(proto, Factory::apply_symbol()));
apply->shared()->set_code(Builtins::builtin(Builtins::FunctionApply));
}
#endif
// Make sure that the builtins object has fast properties.
// If the ASSERT below fails, please increase the expected number of

View File

@ -43,73 +43,117 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
// r0 contains the number of arguments excluding the receiver.
// JumpToBuiltin expects r0 to contains the number of arguments
// including the receiver.
__ add(r0, r0, Operand(1));
__ mov(r0, Operand(argc + 1));
__ mov(ip, Operand(ExternalReference::builtin_passed_function()));
__ str(r1, MemOperand(ip, 0));
__ JumpToBuiltin(ExternalReference(id));
}
void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// r0: number of arguments
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- r1 : constructor function
// -- lr : return address
// -- sp[...]: constructor arguments
// -----------------------------------
__ EnterJSFrame(0);
// Enter an internal frame.
__ EnterInternalFrame();
// Preserve the two incoming parameters
__ mov(r0, Operand(r0, LSL, kSmiTagSize));
__ push(r0); // smi-tagged arguments count
__ push(r1); // constructor function
// Allocate the new receiver object.
__ ldr(r0, MemOperand(pp, JavaScriptFrameConstants::kFunctionOffset));
__ push(r0);
__ push(r1); // argument for Runtime_NewObject
__ CallRuntime(Runtime::kNewObject, 1);
__ push(r0); // save the receiver
// Push the function and the allocated receiver from the stack.
__ ldr(r1, MemOperand(pp, JavaScriptFrameConstants::kFunctionOffset));
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ ldr(r1, MemOperand(sp, kPointerSize));
__ push(r1); // function
__ push(r0); // receiver
// Restore the arguments length from the stack.
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kArgsLengthOffset));
// Reload the number of arguments from the stack.
// r1: constructor function
// sp[0]: receiver
// sp[1]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
__ ldr(r3, MemOperand(sp, 4 * kPointerSize));
// Setup pointer to last argument - receiver is not counted.
__ sub(r2, pp, Operand(r0, LSL, kPointerSizeLog2));
__ sub(r2, r2, Operand(kPointerSize));
// Setup pointer to last argument.
__ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
// Setup number of arguments for function call below
__ mov(r0, Operand(r3, LSR, kSmiTagSize));
// Copy arguments and receiver to the expression stack.
// r0: number of arguments
// r2: address of last argument (caller sp)
// r1: constructor function
// r3: number of arguments (smi-tagged)
// sp[0]: receiver
// sp[1]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
Label loop, entry;
__ mov(r1, Operand(r0));
__ b(&entry);
__ bind(&loop);
__ ldr(r3, MemOperand(r2, r1, LSL, kPointerSizeLog2));
__ push(r3);
__ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
__ push(ip);
__ bind(&entry);
__ sub(r1, r1, Operand(1), SetCC);
__ sub(r3, r3, Operand(2), SetCC);
__ b(ge, &loop);
// Get the function to call from the stack.
__ ldr(r1, MemOperand(pp, JavaScriptFrameConstants::kFunctionOffset));
// Call the function.
// r0: number of arguments
// r1: constructor function
Label return_site;
__ RecordPosition(position);
ParameterCount actual(r0);
__ InvokeFunction(r1, actual, CALL_FUNCTION);
__ bind(&return_site);
// Restore context from the frame and discard the function.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
// Pop the function from the stack.
// sp[0]: constructor function
// sp[2]: receiver
// sp[3]: constructor function
// sp[4]: number of arguments (smi-tagged)
__ pop();
// Restore context from the frame.
// r0: result
// sp[0]: receiver
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
// If the result is an object (in the ECMA sense), we should get rid
// of the receiver and use the result; see ECMA-262 section 13.2.2-7
// on page 74.
Label use_receiver, exit;
// If the result is a smi, it is *not* an object in the ECMA sense.
// r0: result
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &use_receiver);
// If the type of the result (stored in its map) is less than
// FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
__ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
__ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
__ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset));
__ cmp(r3, Operand(FIRST_JS_OBJECT_TYPE));
__ b(ge, &exit);
// Throw away the result of the constructor invocation and use the
@ -120,7 +164,15 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// Remove receiver from the stack, remove caller arguments, and
// return.
__ bind(&exit);
__ ExitJSFrame(RETURN);
// r0: result
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
__ ldr(r1, MemOperand(sp, 2 * kPointerSize));
__ ExitInternalFrame();
__ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
__ add(sp, sp, Operand(kPointerSize));
__ mov(pc, Operand(lr));
// Compute the offset from the beginning of the JSConstructCall
// builtin code object to the return address after the call.
@ -139,24 +191,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// r4: argv
// r5-r7, cp may be clobbered
// Enter the JS frame
// compute parameter pointer before making changes
__ mov(ip, Operand(sp)); // ip == caller_sp == new pp
// Clear the context before we push it when entering the JS frame.
__ mov(cp, Operand(0));
__ mov(r5, Operand(0)); // spare slot to store caller code object during GC
__ mov(r6, Operand(0)); // no context
__ mov(r7, Operand(0)); // no incoming parameters
__ mov(r8, Operand(0)); // caller_pp == NULL for trampoline frames
ASSERT(cp.bit() == r8.bit()); // adjust the code otherwise
// push in reverse order:
// code (r5==0), context (r6==0), args_len (r7==0), caller_pp (r8==0),
// caller_fp, sp_on_exit (caller_sp), caller_pc
__ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | r8.bit() |
fp.bit() | ip.bit() | lr.bit());
// Setup new frame pointer.
__ add(fp, sp, Operand(-StandardFrameConstants::kCodeOffset));
__ mov(pp, Operand(ip)); // setup new parameter pointer
// Enter an internal frame.
__ EnterInternalFrame();
// Setup the context from the function argument.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
@ -191,22 +230,21 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ mov(r9, Operand(r4));
// Invoke the code and pass argc as r0.
__ mov(r0, Operand(r3));
if (is_construct) {
__ mov(r0, Operand(r3));
__ Call(Handle<Code>(Builtins::builtin(Builtins::JSConstructCall)),
code_target);
} else {
ParameterCount actual(r3);
ParameterCount actual(r0);
__ InvokeFunction(r1, actual, CALL_FUNCTION);
}
// Exit the JS frame and remove the parameters (except function), and return.
// Respect ABI stack constraint.
__ add(sp, fp, Operand(StandardFrameConstants::kCallerFPOffset));
__ ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
__ ExitInternalFrame();
__ mov(pc, lr);
// r0: result
// pp: not restored, should not be used anymore
}
@ -221,22 +259,411 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// TODO(1233523): Implement. Unused for now.
__ stop("Builtins::Generate_FunctionApply");
const int kIndexOffset = -5 * kPointerSize;
const int kLimitOffset = -4 * kPointerSize;
const int kArgsOffset = 2 * kPointerSize;
const int kRecvOffset = 3 * kPointerSize;
const int kFunctionOffset = 4 * kPointerSize;
__ EnterInternalFrame();
__ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
__ push(r0);
__ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
__ push(r0);
__ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_JS);
// Eagerly check for stack-overflow before starting to push the arguments.
// r0: number of arguments
Label okay;
{ Label L;
__ mov(r1, Operand(391864 << kSmiTagSize));
__ cmp(r0, r1);
__ b(cc, &L);
__ bind(&L);
}
ExternalReference stack_guard_limit_address =
ExternalReference::address_of_stack_guard_limit();
__ mov(r2, Operand(stack_guard_limit_address));
__ ldr(r2, MemOperand(r2));
__ sub(r2, sp, r2);
__ sub(r2, r2, Operand(3 * kPointerSize)); // limit, index, receiver
__ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ b(hi, &okay);
// Out of stack space.
__ ldr(r1, MemOperand(fp, kFunctionOffset));
__ push(r1);
__ push(r0);
__ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_JS);
// Push current limit and index.
__ bind(&okay);
__ push(r0); // limit
__ mov(r1, Operand(0)); // initial index
__ push(r1);
// Change context eagerly to get the right global object if necessary.
__ ldr(r0, MemOperand(fp, kFunctionOffset));
__ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
// Compute the receiver.
Label call_to_object, use_global_receiver, push_receiver;
__ ldr(r0, MemOperand(fp, kRecvOffset));
__ tst(r0, Operand(kSmiTagMask));
__ b(eq, &call_to_object);
__ mov(r1, Operand(Factory::null_value()));
__ cmp(r0, r1);
__ b(eq, &use_global_receiver);
__ mov(r1, Operand(Factory::undefined_value()));
__ cmp(r0, r1);
__ b(eq, &use_global_receiver);
// Check if the receiver is already a JavaScript object.
// r0: receiver
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
__ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
__ b(lt, &call_to_object);
__ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
__ b(le, &push_receiver);
// Convert the receiver to a regular object.
// r0: receiver
__ bind(&call_to_object);
__ push(r0);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
__ b(&push_receiver);
// Use the current global object as the receiver.
__ bind(&use_global_receiver);
__ ldr(r0, FieldMemOperand(cp, Context::kHeaderSize +
Context::GLOBAL_INDEX * kPointerSize));
// Push the receiver.
// r0: receiver
__ bind(&push_receiver);
__ push(r0);
// Copy all arguments from the array to the stack.
Label entry, loop;
__ ldr(r0, MemOperand(fp, kIndexOffset));
__ b(&entry);
// Load the current argument from the arguments array and push it to the
// stack.
// r0: current argument index
__ bind(&loop);
__ ldr(r1, MemOperand(fp, kArgsOffset));
__ push(r1);
__ push(r0);
// Call the runtime to access the property in the arguments array.
__ CallRuntime(Runtime::kGetProperty, 2);
__ push(r0);
// Use inline caching to access the arguments.
__ ldr(r0, MemOperand(fp, kIndexOffset));
__ add(r0, r0, Operand(1 << kSmiTagSize));
__ str(r0, MemOperand(fp, kIndexOffset));
// Test if the copy loop has finished copying all the elements from the
// arguments object.
__ bind(&entry);
__ ldr(r1, MemOperand(fp, kLimitOffset));
__ cmp(r0, r1);
__ b(ne, &loop);
// Invoke the function.
ParameterCount actual(r0);
__ mov(r0, Operand(r0, ASR, kSmiTagSize));
__ ldr(r1, MemOperand(fp, kFunctionOffset));
__ InvokeFunction(r1, actual, CALL_FUNCTION);
// Tear down the internal frame and remove function, receiver and args.
__ ExitInternalFrame();
__ add(sp, sp, Operand(3 * kPointerSize));
__ mov(pc, lr);
}
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ mov(r0, Operand(r0, LSL, kSmiTagSize));
__ mov(r4, Operand(ArgumentsAdaptorFrame::SENTINEL));
__ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
__ add(fp, sp, Operand(3 * kPointerSize));
}
static void ExitArgumentsAdaptorFrame(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : result being passed through
// -----------------------------------
// Get the number of arguments passed (as a smi), tear down the frame and
// then tear down the parameters.
__ ldr(r1, MemOperand(fp, -3 * kPointerSize));
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
__ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
__ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
}
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// TODO(1233523): Implement. Unused for now.
__ stop("Builtins::Generate_ArgumentsAdaptorTrampoline");
// ----------- S t a t e -------------
// -- r0 : actual number of arguments
// -- r1 : function (passed through to callee)
// -- r2 : expected number of arguments
// -- r3 : code entry to call
// -----------------------------------
Label entry, invoke, function_prototype_call;
__ bind(&entry);
Label enough, too_few;
__ cmp(r0, Operand(r2));
__ b(lt, &too_few);
__ cmp(r2, Operand(-1));
__ b(eq, &function_prototype_call);
{ // Enough parameters: actual >= excpected
__ bind(&enough);
EnterArgumentsAdaptorFrame(masm);
// Calculate copy start address into r0 and copy end address into r2.
// r0: actual number of arguments as a smi
// r1: function
// r2: expected number of arguments
// r3: code entry to call
__ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
// adjust for return address and receiver
__ add(r0, r0, Operand(2 * kPointerSize));
__ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
// Copy the arguments (including the receiver) to the new stack frame.
// r0: copy start address
// r1: function
// r2: copy end address
// r3: code entry to call
Label copy;
__ bind(&copy);
__ ldr(ip, MemOperand(r0, 0));
__ push(ip);
__ cmp(r0, r2); // Compare before moving to next argument.
__ sub(r0, r0, Operand(kPointerSize));
__ b(ne, &copy);
__ b(&invoke);
}
{ // Too few parameters: Actual < expected
__ bind(&too_few);
EnterArgumentsAdaptorFrame(masm);
// Calculate copy start address into r0 and copy end address is fp.
// r0: actual number of arguments as a smi
// r1: function
// r2: expected number of arguments
// r3: code entry to call
__ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
// Copy the arguments (including the receiver) to the new stack frame.
// r0: copy start address
// r1: function
// r2: expected number of arguments
// r3: code entry to call
Label copy;
__ bind(&copy);
// Adjust load for return address and receiver.
__ ldr(ip, MemOperand(r0, 2 * kPointerSize));
__ push(ip);
__ cmp(r0, fp); // Compare before moving to next argument.
__ sub(r0, r0, Operand(kPointerSize));
__ b(ne, &copy);
// Fill the remaining expected arguments with undefined.
// r1: function
// r2: expected number of arguments
// r3: code entry to call
__ mov(ip, Operand(Factory::undefined_value()));
__ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
__ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
Label fill;
__ bind(&fill);
__ push(ip);
__ cmp(sp, r2);
__ b(ne, &fill);
}
// Call the entry point.
Label return_site;
__ bind(&invoke);
__ Call(r3);
__ bind(&return_site);
ExitArgumentsAdaptorFrame(masm);
__ mov(pc, lr);
// Compute the offset from the beginning of the ArgumentsAdaptorTrampoline
// builtin code object to the return address after the call.
ASSERT(return_site.is_bound());
arguments_adaptor_call_pc_offset_ = return_site.pos() + Code::kHeaderSize;
// -------------------------------------------
// Function.prototype.call implementation.
// -------------------------------------------
// r0: actual number of argument
__ bind(&function_prototype_call);
// 1. Make sure we have at least one argument.
// r0: actual number of argument
{ Label done;
__ tst(r0, Operand(r0));
__ b(ne, &done);
__ mov(r2, Operand(Factory::undefined_value()));
__ push(r2);
__ add(r0, r0, Operand(1));
__ bind(&done);
}
// 2. Get the function to call. Already in r1.
// r0: actual number of argument
{ Label done, non_function, function;
__ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
__ tst(r1, Operand(kSmiTagMask));
__ b(eq, &non_function);
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
__ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
__ cmp(r2, Operand(JS_FUNCTION_TYPE));
__ b(eq, &function);
// Non-function called: Clear the function to force exception.
__ bind(&non_function);
__ mov(r1, Operand(0));
__ b(&done);
// Change the context eagerly because it will be used below to get the
// right global object.
__ bind(&function);
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
__ bind(&done);
}
// 3. Make sure first argument is an object; convert if necessary.
// r0: actual number of arguments
// r1: function
{ Label call_to_object, use_global_receiver, patch_receiver, done;
__ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
__ ldr(r2, MemOperand(r2, -kPointerSize));
// r0: actual number of arguments
// r1: function
// r2: first argument
__ tst(r2, Operand(kSmiTagMask));
__ b(eq, &call_to_object);
__ mov(r3, Operand(Factory::null_value()));
__ cmp(r2, r3);
__ b(eq, &use_global_receiver);
__ mov(r3, Operand(Factory::undefined_value()));
__ cmp(r2, r3);
__ b(eq, &use_global_receiver);
__ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
__ ldrb(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset));
__ cmp(r3, Operand(FIRST_JS_OBJECT_TYPE));
__ b(lt, &call_to_object);
__ cmp(r3, Operand(LAST_JS_OBJECT_TYPE));
__ b(le, &done);
__ bind(&call_to_object);
__ EnterInternalFrame();
// Store number of arguments and function across the call into the runtime.
__ mov(r0, Operand(r0, LSL, kSmiTagSize));
__ push(r0);
__ push(r1);
__ push(r2);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
__ mov(r2, r0);
// Restore number of arguments and function.
__ pop(r1);
__ pop(r0);
__ mov(r0, Operand(r0, ASR, kSmiTagSize));
__ ExitInternalFrame();
__ b(&patch_receiver);
// Use the global object from the called function as the receiver.
__ bind(&use_global_receiver);
const int kGlobalIndex =
Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
__ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
__ bind(&patch_receiver);
__ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
__ str(r2, MemOperand(r3, -kPointerSize));
__ bind(&done);
}
// 4. Shift stuff one slot down the stack
// r0: actual number of arguments (including call() receiver)
// r1: function
{ Label loop;
// Calculate the copy start address (destination). Copy end address is sp.
__ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
__ bind(&loop);
__ ldr(ip, MemOperand(r2, -kPointerSize));
__ str(ip, MemOperand(r2));
__ sub(r2, r2, Operand(kPointerSize));
__ cmp(r2, sp);
__ b(ne, &loop);
}
// 5. Adjust the actual number of arguments and remove the top element.
// r0: actual number of arguments (including call() receiver)
// r1: function
__ sub(r0, r0, Operand(1));
__ add(sp, sp, Operand(kPointerSize));
// 6. Get the code for the function or the non-function builtin.
// If number of expected arguments matches, then call. Otherwise restart
// the arguments adaptor stub.
// r0: actual number of arguments
// r1: function
{ Label invoke;
__ tst(r1, r1);
__ b(ne, &invoke);
__ stop("Generate_ArgumentsAdaptorTrampoline - non-function call");
__ mov(r2, Operand(0)); // expected arguments is 0 for CALL_NON_FUNCTION
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
__ b(&enough);
__ bind(&invoke);
__ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2,
FieldMemOperand(r3,
SharedFunctionInfo::kFormalParameterCountOffset));
__ ldr(r3,
MemOperand(r3, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
__ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
__ cmp(r2, r0); // Check formal and actual parameter counts.
__ b(ne, &entry);
// 7. Jump to the code in r3 without checking arguments.
ParameterCount expected(0);
__ InvokeCode(r3, expected, expected, JUMP_FUNCTION);
}
}

View File

@ -54,8 +54,13 @@ DEFINE_bool(inline_new, true, "use fast inline allocation");
void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax: number of arguments
// -- edi: constructor function
// -----------------------------------
// Enter an internal frame.
__ EnterFrame(StackFrame::INTERNAL);
__ EnterInternalFrame();
// Store a smi-tagged arguments count on the stack.
__ shl(eax, kSmiTagSize);
@ -296,7 +301,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// Restore the arguments count and exit the internal frame.
__ bind(&exit);
__ mov(ebx, Operand(esp, kPointerSize)); // get arguments count
__ ExitFrame(StackFrame::INTERNAL);
__ ExitInternalFrame();
// Remove caller arguments from the stack and return.
ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
@ -318,7 +323,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ xor_(esi, Operand(esi)); // clear esi
// Enter an internal frame.
__ EnterFrame(StackFrame::INTERNAL);
__ EnterInternalFrame();
// Load the previous frame pointer (ebx) to access C arguments
__ mov(ebx, Operand(ebp, 0));
@ -362,7 +367,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Exit the JS frame. Notice that this also removes the empty
// context and the function left on the stack by the code
// invocation.
__ ExitFrame(StackFrame::INTERNAL);
__ ExitInternalFrame();
__ ret(1 * kPointerSize); // remove receiver
}
@ -378,7 +383,7 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ EnterFrame(StackFrame::INTERNAL);
__ EnterInternalFrame();
__ push(Operand(ebp, 4 * kPointerSize)); // push this
__ push(Operand(ebp, 2 * kPointerSize)); // push arguments
@ -482,7 +487,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ mov(edi, Operand(ebp, 4 * kPointerSize));
__ InvokeFunction(edi, actual, CALL_FUNCTION);
__ ExitFrame(StackFrame::INTERNAL);
__ ExitInternalFrame();
__ ret(3 * kPointerSize); // remove this, receiver, and arguments
}
@ -586,8 +591,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
}
// Mark the adaptor frame as special by overwriting the context slot
// in the stack with a sentinel.
// Call the entry point.
Label return_site;
__ bind(&invoke);
__ call(Operand(edx));
@ -661,7 +665,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ j(less_equal, &done);
__ bind(&call_to_object);
__ EnterFrame(StackFrame::INTERNAL); // preserves eax, ebx, edi
__ EnterInternalFrame(); // preserves eax, ebx, edi
// Store the arguments count on the stack (smi tagged).
ASSERT(kSmiTag == 0);
@ -678,7 +682,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ pop(eax);
__ shr(eax, kSmiTagSize);
__ ExitFrame(StackFrame::INTERNAL);
__ ExitInternalFrame();
__ jmp(&patch_receiver);
// Use the global object from the called function as the receiver.
@ -747,7 +751,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
__ SaveRegistersToMemory(kJSCallerSaved);
// Enter an internal frame.
__ EnterFrame(StackFrame::INTERNAL);
__ EnterInternalFrame();
// Store the registers containing object pointers on the expression stack to
// make sure that these are correctly updated during GC.
@ -767,7 +771,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
__ PopRegistersToMemory(pointer_regs);
// Get rid of the internal frame.
__ ExitFrame(StackFrame::INTERNAL);
__ ExitInternalFrame();
// If this call did not replace a call but patched other code then there will
// be an unwanted return address left on the stack. Here we get rid of that.

View File

@ -155,6 +155,28 @@ bool Builtins::IsArgumentsAdaptorCall(Address pc) {
}
Handle<Code> Builtins::GetCode(JavaScript id, bool* resolved) {
Code* code = Builtins::builtin(Builtins::Illegal);
*resolved = false;
if (Top::security_context() != NULL) {
Object* object = Top::security_context_builtins()->javascript_builtin(id);
if (object->IsJSFunction()) {
Handle<JSFunction> function(JSFunction::cast(object));
// Make sure the number of parameters match the formal parameter count.
ASSERT(function->shared()->formal_parameter_count() ==
Builtins::GetArgumentsCount(id));
if (function->is_compiled() || CompileLazy(function, CLEAR_EXCEPTION)) {
code = function->code();
*resolved = true;
}
}
}
return Handle<Code>(code);
}
BUILTIN_0(Illegal) {
UNREACHABLE();
}
@ -354,13 +376,8 @@ BUILTIN_0(HandleApiCall) {
// TODO(1238487): This is not nice. We need to get rid of this
// kludgy behavior and start handling API calls in a more direct
// way - maybe compile specialized stubs lazily?.
#ifdef USE_OLD_CALLING_CONVENTIONS
Handle<JSFunction> function =
Handle<JSFunction>(JSFunction::cast(__argv__[1]));
#else
Handle<JSFunction> function =
Handle<JSFunction>(JSFunction::cast(Builtins::builtin_passed_function));
#endif
if (is_construct) {
Handle<FunctionTemplateInfo> desc =

View File

@ -185,6 +185,7 @@ class Builtins : public AllStatic {
static const char* GetName(JavaScript id) { return javascript_names_[id]; }
static int GetArgumentsCount(JavaScript id) { return javascript_argc_[id]; }
static Handle<Code> GetCode(JavaScript id, bool* resolved);
static int NumberOfJavaScriptBuiltins() { return id_count; }
// Called from stub-cache.cc.

View File

@ -202,15 +202,17 @@ class ArmCodeGenerator: public CodeGenerator {
// index -2 corresponds to the activated closure, -1 corresponds
// to the receiver
ASSERT(-2 <= index && index < scope->num_parameters());
int offset = JavaScriptFrameConstants::kParam0Offset - index * kPointerSize;
return MemOperand(pp, offset);
int offset = (1 + scope->num_parameters() - index) * kPointerSize;
return MemOperand(fp, offset);
}
MemOperand ParameterOperand(int index) const {
return ParameterOperand(scope_, index);
}
MemOperand FunctionOperand() const { return ParameterOperand(-2); }
MemOperand FunctionOperand() const {
return MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset);
}
static MemOperand SlotOperand(MacroAssembler* masm,
Scope* scope,
@ -305,8 +307,8 @@ class ArmCodeGenerator: public CodeGenerator {
void RecordStatementPosition(Node* node);
// Activation frames
void EnterJSFrame(int argc); // preserves r1
void ExitJSFrame(ExitJSFlag flag = RETURN); // preserves r0-r2
void EnterJSFrame();
void ExitJSFrame();
virtual void GenerateShiftDownAndTailCall(ZoneList<Expression*>* args);
virtual void GenerateSetThisFunction(ZoneList<Expression*>* args);
@ -468,7 +470,7 @@ void ArmCodeGenerator::GenCode(FunctionLiteral* fun) {
// cp: callee's context
{ Comment cmnt(masm_, "[ enter JS frame");
EnterJSFrame(scope->num_parameters());
EnterJSFrame();
}
// tos: code slot
#ifdef DEBUG
@ -526,9 +528,7 @@ void ArmCodeGenerator::GenCode(FunctionLiteral* fun) {
Slot* slot = par->slot();
if (slot != NULL && slot->type() == Slot::CONTEXT) {
ASSERT(!scope->is_global_scope()); // no parameters in global scope
int parameter_offset =
JavaScriptFrameConstants::kParam0Offset - i * kPointerSize;
__ ldr(r1, MemOperand(pp, parameter_offset));
__ ldr(r1, ParameterOperand(i));
// Loads r2 with context; used below in RecordWrite.
__ str(r1, SlotOperand(slot, r2));
// Load the offset into r3.
@ -629,8 +629,13 @@ void ArmCodeGenerator::GenCode(FunctionLiteral* fun) {
__ CallRuntime(Runtime::kTraceExit, 1);
}
// Tear down the frame which will restore the caller's frame pointer and the
// link register.
ExitJSFrame();
__ add(sp, sp, Operand((scope_->num_parameters() + 1) * kPointerSize));
__ mov(pc, lr);
// Code generation state must be reset.
scope_ = NULL;
ASSERT(!has_cc());
@ -978,6 +983,7 @@ void Slot::GenerateStoreCode(MacroAssembler* masm,
// Skip write barrier if the written value is a smi.
masm->tst(r0, Operand(kSmiTagMask));
masm->b(eq, &exit);
may_skip_write = true;
// r2 is loaded with context when calling SlotOperand above.
int offset = FixedArray::kHeaderSize + index() * kPointerSize;
masm->mov(r3, Operand(offset));
@ -1281,7 +1287,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
__ push(r1);
__ push(r0);
__ mov(r0, Operand(1)); // set number of arguments
__ InvokeBuiltin("ADD", 1, JUMP_JS);
__ InvokeBuiltin(Builtins::ADD, JUMP_JS);
// done
__ bind(&exit);
break;
@ -1304,7 +1310,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
__ push(r1);
__ push(r0);
__ mov(r0, Operand(1)); // set number of arguments
__ InvokeBuiltin("SUB", 1, JUMP_JS);
__ InvokeBuiltin(Builtins::SUB, JUMP_JS);
// done
__ bind(&exit);
break;
@ -1334,7 +1340,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
__ push(r1);
__ push(r0);
__ mov(r0, Operand(1)); // set number of arguments
__ InvokeBuiltin("MUL", 1, JUMP_JS);
__ InvokeBuiltin(Builtins::MUL, JUMP_JS);
// done
__ bind(&exit);
break;
@ -1361,10 +1367,17 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
__ push(r0);
__ mov(r0, Operand(1)); // 1 argument (not counting receiver).
switch (op_) {
case Token::BIT_OR: __ InvokeBuiltin("BIT_OR", 1, JUMP_JS); break;
case Token::BIT_AND: __ InvokeBuiltin("BIT_AND", 1, JUMP_JS); break;
case Token::BIT_XOR: __ InvokeBuiltin("BIT_XOR", 1, JUMP_JS); break;
default: UNREACHABLE();
case Token::BIT_OR:
__ InvokeBuiltin(Builtins::BIT_OR, JUMP_JS);
break;
case Token::BIT_AND:
__ InvokeBuiltin(Builtins::BIT_AND, JUMP_JS);
break;
case Token::BIT_XOR:
__ InvokeBuiltin(Builtins::BIT_XOR, JUMP_JS);
break;
default:
UNREACHABLE();
}
__ bind(&exit);
break;
@ -1422,9 +1435,9 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
__ push(r0);
__ mov(r0, Operand(1)); // 1 argument (not counting receiver).
switch (op_) {
case Token::SAR: __ InvokeBuiltin("SAR", 1, JUMP_JS); break;
case Token::SHR: __ InvokeBuiltin("SHR", 1, JUMP_JS); break;
case Token::SHL: __ InvokeBuiltin("SHL", 1, JUMP_JS); break;
case Token::SAR: __ InvokeBuiltin(Builtins::SAR, JUMP_JS); break;
case Token::SHR: __ InvokeBuiltin(Builtins::SHR, JUMP_JS); break;
case Token::SHL: __ InvokeBuiltin(Builtins::SHL, JUMP_JS); break;
default: UNREACHABLE();
}
__ bind(&exit);
@ -1480,7 +1493,7 @@ void UnarySubStub::Generate(MacroAssembler* masm) {
__ bind(&slow);
__ push(r0);
__ mov(r0, Operand(0)); // set number of arguments
__ InvokeBuiltin("UNARY_MINUS", 0, JUMP_JS);
__ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
__ bind(&done);
masm->StubReturn(1);
@ -1516,45 +1529,15 @@ void InvokeBuiltinStub::Generate(MacroAssembler* masm) {
__ push(r0);
__ mov(r0, Operand(0)); // set number of arguments
switch (kind_) {
case ToNumber: __ InvokeBuiltin("TO_NUMBER", 0, JUMP_JS); break;
case Inc: __ InvokeBuiltin("INC", 0, JUMP_JS); break;
case Dec: __ InvokeBuiltin("DEC", 0, JUMP_JS); break;
case ToNumber: __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_JS); break;
case Inc: __ InvokeBuiltin(Builtins::INC, JUMP_JS); break;
case Dec: __ InvokeBuiltin(Builtins::DEC, JUMP_JS); break;
default: UNREACHABLE();
}
masm->StubReturn(argc_);
}
class JSExitStub : public CodeStub {
public:
enum Kind { Inc, Dec, ToNumber };
explicit JSExitStub(ExitJSFlag flag) : flag_(flag) { }
private:
ExitJSFlag flag_;
Major MajorKey() { return JSExit; }
int MinorKey() { return static_cast<int>(flag_); }
void Generate(MacroAssembler* masm);
const char* GetName() { return "JSExitStub"; }
#ifdef DEBUG
void Print() {
PrintF("JSExitStub flag %d)\n", static_cast<int>(flag_));
}
#endif
};
void JSExitStub::Generate(MacroAssembler* masm) {
__ ExitJSFrame(flag_);
masm->StubReturn(1);
}
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
// r0 holds exception
ASSERT(StackHandlerConstants::kSize == 6 * kPointerSize); // adjust this code
@ -1701,7 +1684,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
__ mov(r3, Operand(Top::context_address()));
__ ldr(cp, MemOperand(r3));
__ mov(sp, Operand(fp)); // respect ABI stack constraint
__ ldm(ia, sp, pp.bit() | fp.bit() | sp.bit() | pc.bit());
__ ldm(ia, sp, fp.bit() | sp.bit() | pc.bit());
// check if we should retry or throw exception
Label retry;
@ -1755,13 +1738,13 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
// ip = sp + kPointerSize*args_len;
__ add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
// all JS callee-saved are saved and traversed by GC; push in reverse order:
// JS callee-saved, caller_pp, caller_fp, sp_on_exit (ip==pp), caller_pc
__ stm(db_w, sp, pp.bit() | fp.bit() | ip.bit() | lr.bit());
// push in reverse order:
// caller_fp, sp_on_exit, caller_pc
__ stm(db_w, sp, fp.bit() | ip.bit() | lr.bit());
__ mov(fp, Operand(sp)); // setup new frame pointer
// Store the current context in top.
__ mov(ip, Operand(Top::context_address()));
__ mov(ip, Operand(ExternalReference(Top::k_context_address)));
__ str(cp, MemOperand(ip));
// remember top frame
@ -1838,33 +1821,40 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Called from C, so do not pop argc and args on exit (preserve sp)
// No need to save register-passed args
// Save callee-saved registers (incl. cp, pp, and fp), sp, and lr
__ mov(ip, Operand(sp));
__ stm(db_w, sp, kCalleeSaved | ip.bit() | lr.bit());
// Setup frame pointer
__ mov(fp, Operand(sp));
// Add constructor mark.
__ mov(ip, Operand(is_construct ? 1 : 0));
__ push(ip);
// Move arguments into registers expected by Builtins::JSEntryTrampoline
// preserve r0-r3, set r4, r5-r7 may be clobbered
__ stm(db_w, sp, kCalleeSaved | lr.bit());
// Get address of argv, see stm above.
__ add(r4, sp, Operand((kNumCalleeSaved + 3)*kPointerSize));
// r0: code entry
// r1: function
// r2: receiver
// r3: argc
__ add(r4, sp, Operand((kNumCalleeSaved + 1)*kPointerSize));
__ ldr(r4, MemOperand(r4)); // argv
// Save copies of the top frame descriptors on the stack.
__ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
__ ldr(r6, MemOperand(ip));
__ stm(db_w, sp, r6.bit());
// Push a frame with special values setup to mark it as an entry frame.
// r0: code entry
// r1: function
// r2: receiver
// r3: argc
// r4: argv
int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
__ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
__ mov(r7, Operand(~ArgumentsAdaptorFrame::SENTINEL));
__ mov(r6, Operand(Smi::FromInt(marker)));
__ mov(r5, Operand(ExternalReference(Top::k_c_entry_fp_address)));
__ ldr(r5, MemOperand(r5));
__ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | r8.bit());
// Setup frame pointer for the frame to be pushed.
__ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
// Call a faked try-block that does the invoke.
__ bl(&invoke);
// Caught exception: Store result (exception) in the pending
// exception field in the JSEnv and return a failure sentinel.
// Coming in here the fp will be invalid because the PushTryHandler below
// sets it to 0 to signal the existence of the JSEntry frame.
__ mov(ip, Operand(Top::pending_exception_address()));
__ str(r0, MemOperand(ip));
__ mov(r0, Operand(Handle<Failure>(Failure::Exception())));
@ -1872,7 +1862,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Invoke: Link this frame into the handler chain.
__ bind(&invoke);
// Must preserve r0-r3, r5-r7 are available.
// Must preserve r0-r4, r5-r7 are available.
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
// If an exception not caught by another handler occurs, this handler returns
// control to the code after the bl(&invoke) above, which restores all
@ -1920,18 +1910,18 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ bind(&exit); // r0 holds result
// Restore the top frame descriptors from the stack.
__ ldm(ia_w, sp, r3.bit());
__ pop(r3);
__ mov(ip, Operand(ExternalReference(Top::k_c_entry_fp_address)));
__ str(r3, MemOperand(ip));
// Remove constructor mark.
__ pop();
// Reset the stack to the callee saved registers.
__ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
// Restore callee-saved registers, sp, and return.
// Restore callee-saved registers and return.
#ifdef DEBUG
if (FLAG_debug_code) __ mov(lr, Operand(pc));
#endif
__ ldm(ia, sp, kCalleeSaved | sp.bit() | pc.bit());
__ ldm(ia_w, sp, kCalleeSaved | pc.bit());
}
@ -1958,31 +1948,82 @@ class ArgumentsAccessStub: public CodeStub {
void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
if (is_length_) {
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kArgsLengthOffset));
__ mov(r0, Operand(r0, LSL, kSmiTagSize));
__ Ret();
} else {
// Check that the key is a smi.
Label slow;
__ tst(r0, Operand(kSmiTagMask));
// ----------- S t a t e -------------
// -- r0: formal number of parameters for the calling function
// -- r1: key (if value access)
// -- lr: return address
// -----------------------------------
// Check that the key is a smi for non-length accesses.
Label slow;
if (!is_length_) {
__ tst(r1, Operand(kSmiTagMask));
__ b(ne, &slow);
}
// Get the actual number of arguments passed and do bounds
// check. Use unsigned comparison to get negative check for free.
__ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kArgsLengthOffset));
__ cmp(r0, Operand(r1, LSL, kSmiTagSize));
__ b(hs, &slow);
// Check if the calling frame is an arguments adaptor frame.
// r0: formal number of parameters
// r1: key (if access)
Label adaptor;
__ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
__ cmp(r3, Operand(ArgumentsAdaptorFrame::SENTINEL));
__ b(eq, &adaptor);
// Load the argument directly from the stack and return.
__ sub(r1, pp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ ldr(r0, MemOperand(r1, JavaScriptFrameConstants::kParam0Offset));
__ Ret();
static const int kParamDisplacement =
StandardFrameConstants::kCallerSPOffset - kPointerSize;
// Slow-case: Handle non-smi or out-of-bounds access to arguments
// by calling the runtime system.
if (is_length_) {
// Nothing to do: the formal length of parameters has been passed in r0
// by the calling function.
} else {
// Check index against formal parameter count. Use unsigned comparison to
// get the negative check for free.
// r0: formal number of parameters
// r1: index
__ cmp(r1, r0);
__ b(cs, &slow);
// Read the argument from the current frame.
__ sub(r3, r0, r1);
__ add(r3, fp, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
__ ldr(r0, MemOperand(r3, kParamDisplacement));
}
// Return to the calling function.
__ mov(pc, lr);
// An arguments adaptor frame is present. Find the length or the actual
// argument in the calling frame.
// r0: formal number of parameters
// r1: key
// r2: adaptor frame pointer
__ bind(&adaptor);
// Read the arguments length from the adaptor frame. This is the result if
// only accessing the length, otherwise it is used in accessing the value
__ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
if (!is_length_) {
// Check index against actual arguments count. Use unsigned comparison to
// get the negative check for free.
// r0: actual number of parameter
// r1: index
// r2: adaptor frame point
__ cmp(r1, r0);
__ b(cs, &slow);
// Read the argument from the adaptor frame.
__ sub(r3, r0, r1);
__ add(r3, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
__ ldr(r0, MemOperand(r3, kParamDisplacement));
}
// Return to the calling function.
__ mov(pc, lr);
if (!is_length_) {
__ bind(&slow);
__ push(r0);
__ push(r1);
__ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
}
}
@ -2085,13 +2126,13 @@ void ArmCodeGenerator::GenericBinaryOperation(Token::Value op) {
case Token::DIV: {
__ mov(r0, Operand(1));
__ InvokeBuiltin("DIV", 1, CALL_JS);
__ InvokeBuiltin(Builtins::DIV, CALL_JS);
break;
}
case Token::MOD: {
__ mov(r0, Operand(1));
__ InvokeBuiltin("MOD", 1, CALL_JS);
__ InvokeBuiltin(Builtins::MOD, CALL_JS);
break;
}
@ -2346,13 +2387,13 @@ void ArmCodeGenerator::Comparison(Condition cc, bool strict) {
__ push(r1);
// Figure out which native to call and setup the arguments.
const char* native;
Builtins::JavaScript native;
int argc;
if (cc == eq) {
native = strict ? "STRICT_EQUALS" : "EQUALS";
native = strict ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
argc = 1;
} else {
native = "COMPARE";
native = Builtins::COMPARE;
int ncr; // NaN compare result
if (cc == lt || cc == le) {
ncr = GREATER;
@ -2369,7 +2410,7 @@ void ArmCodeGenerator::Comparison(Condition cc, bool strict) {
// tagged as a small integer.
__ push(r0);
__ mov(r0, Operand(argc));
__ InvokeBuiltin(native, argc, CALL_JS);
__ InvokeBuiltin(native, CALL_JS);
__ cmp(r0, Operand(0));
__ b(&exit);
@ -2426,7 +2467,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Slow-case: Non-function called.
masm->bind(&slow);
masm->mov(r0, Operand(argc_)); // Setup the number of arguments.
masm->InvokeBuiltin("CALL_NON_FUNCTION", argc_, JUMP_JS);
masm->InvokeBuiltin(Builtins::CALL_NON_FUNCTION, JUMP_JS);
}
@ -2866,7 +2907,7 @@ void ArmCodeGenerator::VisitForInStatement(ForInStatement* node) {
__ bind(&primitive);
__ push(r0);
__ mov(r0, Operand(0));
__ InvokeBuiltin("TO_OBJECT", 0, CALL_JS);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
__ bind(&jsobject);
@ -2960,7 +3001,7 @@ void ArmCodeGenerator::VisitForInStatement(ForInStatement* node) {
__ push(r0);
__ push(r3); // push entry
__ mov(r0, Operand(1));
__ InvokeBuiltin("FILTER_KEY", 1, CALL_JS);
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS);
__ mov(r3, Operand(r0));
// If the property has been removed while iterating, we just skip it.
@ -3358,7 +3399,7 @@ void ArmCodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
// Retrieve the literal array and check the allocated entry.
// Load the function of this activation.
__ ldr(r1, MemOperand(pp, 0));
__ ldr(r1, FunctionOperand());
// Load the literals array of the function.
__ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
@ -3431,7 +3472,7 @@ void ArmCodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
// Retrieve the literal array and check the allocated entry.
// Load the function of this activation.
__ ldr(r1, MemOperand(pp, 0));
__ ldr(r1, FunctionOperand());
// Load the literals array of the function.
__ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
@ -3502,12 +3543,9 @@ void ArmCodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
// Call runtime to create the array literal.
__ mov(r0, Operand(node->literals()));
__ push(r0);
// TODO(1332579): The second argument to CreateArrayLiteral is
// supposed to be the literals array of the function of this frame.
// Until the new ARM calling convention is in place, that function
// is not always available. Therefore, on ARM we pass in the hole
// until the new calling convention is in place.
__ mov(r0, Operand(Factory::the_hole_value()));
// Load the function of this frame.
__ ldr(r0, FunctionOperand());
__ ldr(r0, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
__ push(r0);
__ CallRuntime(Runtime::kCreateArrayLiteral, 2);
@ -3760,6 +3798,9 @@ void ArmCodeGenerator::VisitCallNew(CallNew* node) {
// r0: the number of arguments.
__ mov(r0, Operand(args->length()));
// Load the function into r1 as per calling convention.
__ ldr(r1, MemOperand(sp, (args->length() + 1) * kPointerSize));
// Call the construct call builtin that handles allocation and
// constructor invocation.
__ RecordPosition(position);
@ -3772,44 +3813,27 @@ void ArmCodeGenerator::VisitCallNew(CallNew* node) {
void ArmCodeGenerator::GenerateSetThisFunction(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
Load(args->at(0));
__ ldr(r0, MemOperand(sp, 0));
__ str(r0, MemOperand(pp, JavaScriptFrameConstants::kFunctionOffset));
__ stop("ArmCodeGenerator::GenerateSetThisFunction - unreachable");
}
void ArmCodeGenerator::GenerateGetThisFunction(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
__ ldr(r0, MemOperand(pp, JavaScriptFrameConstants::kFunctionOffset));
__ push(r0);
__ stop("ArmCodeGenerator::GenerateGetThisFunction - unreachable");
}
void ArmCodeGenerator::GenerateSetThis(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
Load(args->at(0));
__ ldr(r0, MemOperand(sp, 0));
__ str(r0, MemOperand(pp, JavaScriptFrameConstants::kReceiverOffset));
__ stop("ArmCodeGenerator::GenerateSetThis - unreachable");
}
void ArmCodeGenerator::GenerateSetArgumentsLength(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
Load(args->at(0));
__ pop(r0);
__ mov(r0, Operand(r0, LSR, kSmiTagSize));
__ str(r0, MemOperand(fp, JavaScriptFrameConstants::kArgsLengthOffset));
__ mov(r0, Operand(Smi::FromInt(0))); // return a meaningful value
__ push(r0);
__ stop("ArmCodeGenerator::GenerateSetArgumentsLength - unreachable");
}
void ArmCodeGenerator::GenerateGetArgumentsLength(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kArgsLengthOffset));
__ mov(r0, Operand(r0, LSL, kSmiTagSize));
__ push(r0);
__ stop("ArmCodeGenerator::GenerateGetArgumentsLength - unreachable");
}
@ -3863,130 +3887,22 @@ void ArmCodeGenerator::GenerateSetValueOf(ZoneList<Expression*>* args) {
void ArmCodeGenerator::GenerateTailCallWithArguments(
ZoneList<Expression*>* args) {
// r0 = number of arguments (smi)
ASSERT(args->length() == 1);
Load(args->at(0));
__ pop(r0);
__ mov(r0, Operand(r0, LSR, kSmiTagSize));
// r1 = new function (previously written to stack)
__ ldr(r1, MemOperand(pp, JavaScriptFrameConstants::kFunctionOffset));
// Reset parameter pointer and frame pointer to previous frame
ExitJSFrame(DO_NOT_RETURN);
// Jump (tail-call) to the function in register r1.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
__ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCodeOffset));
__ add(pc, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
__ stop("ArmCodeGenerator::GenerateTailCallWithArguments - unreachable");
}
void ArmCodeGenerator::GenerateSetArgument(ZoneList<Expression*>* args) {
ASSERT(args->length() == 3);
// r0 = args[i]; r1 = i
Comment cmnt(masm_, "[ GenerateSetArgument");
Load(args->at(1)); // args[i] (value)
Load(args->at(0)); // i
__ pop(r1); // i
__ pop(r0); // value
#if defined(DEBUG)
{ Label L;
__ tst(r1, Operand(kSmiTagMask));
__ b(eq, &L);
__ stop("SMI expected");
__ bind(&L);
}
#endif // defined(DEBUG)
__ add(r2, pp, Operand(JavaScriptFrameConstants::kParam0Offset));
__ str(r0,
MemOperand(r2, r1, LSL, kPointerSizeLog2 - kSmiTagSize, NegOffset));
__ push(r0);
__ stop("ArmCodeGenerator::GenerateSetArgument - unreachable");
}
void ArmCodeGenerator::GenerateSquashFrame(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
Load(args->at(0)); // old number of arguments
Load(args->at(1)); // new number of arguments, r1 > r0
__ pop(r0);
__ mov(r0, Operand(r0, LSR, kSmiTagSize));
__ pop(r1);
__ mov(r1, Operand(r1, LSR, kSmiTagSize));
// r1 = number of words to move stack.
__ sub(r1, r1, Operand(r0));
// r2 is source.
__ add(r2, fp, Operand(StandardFrameConstants::kCallerPCOffset));
// Move down frame pointer fp.
__ add(fp, fp, Operand(r1, LSL, kPointerSizeLog2));
// r1 is destination.
__ add(r1, fp, Operand(StandardFrameConstants::kCallerPCOffset));
Label move;
__ bind(&move);
__ ldr(r3, MemOperand(r2, -kPointerSize, PostIndex));
__ str(r3, MemOperand(r1, -kPointerSize, PostIndex));
__ cmp(r2, Operand(sp));
__ b(ne, &move);
__ ldr(r3, MemOperand(r2));
__ str(r3, MemOperand(r1));
// Move down stack pointer esp.
__ mov(sp, Operand(r1));
// Put something GC-able in r0.
__ mov(r0, Operand(Smi::FromInt(0)));
__ push(r0);
__ stop("ArmCodeGenerator::GenerateSquashFrame - unreachable");
}
void ArmCodeGenerator::GenerateExpandFrame(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
Load(args->at(1));
Load(args->at(0));
__ pop(r0); // new number of arguments
__ pop(r1); // old number of arguments, r1 > r0
__ mov(r1, Operand(r1, LSR, kSmiTagSize));
// r1 = number of words to move stack.
__ sub(r1, r1, Operand(r0, LSR, kSmiTagSize));
Label end_of_expand_frame;
if (FLAG_check_stack) {
Label not_too_big;
__ sub(r2, sp, Operand(r1, LSL, kPointerSizeLog2));
__ mov(ip, Operand(ExternalReference::address_of_stack_guard_limit()));
__ ldr(ip, MemOperand(ip));
__ cmp(r2, Operand(ip));
__ b(gt, &not_too_big);
__ mov(r0, Operand(Factory::false_value()));
__ b(&end_of_expand_frame);
__ bind(&not_too_big);
}
// r3 is source.
__ mov(r3, Operand(sp));
// r0 is copy limit + 1 word
__ add(r0, fp,
Operand(StandardFrameConstants::kCallerPCOffset + kPointerSize));
// Move up frame pointer fp.
__ sub(fp, fp, Operand(r1, LSL, kPointerSizeLog2));
// Move up stack pointer sp.
__ sub(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
// r1 is destination (r1 = source - r1).
__ mov(r2, Operand(0));
__ sub(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
__ add(r1, r3, Operand(r2));
Label move;
__ bind(&move);
__ ldr(r2, MemOperand(r3, kPointerSize, PostIndex));
__ str(r2, MemOperand(r1, kPointerSize, PostIndex));
__ cmp(r3, Operand(r0));
__ b(ne, &move);
// Put success value in top of stack
__ mov(r0, Operand(Factory::true_value()));
__ bind(&end_of_expand_frame);
__ push(r0);
__ stop("ArmCodeGenerator::GenerateExpandFrame - unreachable");
}
@ -4008,7 +3924,6 @@ void ArmCodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
}
// This should generate code that performs a charCodeAt() call or returns
// undefined in order to trigger the slow case, Runtime_StringCharCodeAt.
// It is not yet implemented on ARM, so it always goes to the slow case.
@ -4019,11 +3934,23 @@ void ArmCodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) {
}
// This is used in the implementation of apply on ia32 but it is not
// used on ARM yet.
void ArmCodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) {
__ stop("ArmCodeGenerator::GenerateIsArray");
ASSERT(args->length() == 1);
Load(args->at(0));
Label answer;
// We need the CC bits to come out as not_equal in the case where the
// object is a smi. This can't be done with the usual test opcode so
// we use XOR to get the right CC bits.
__ pop(r0);
__ and_(r1, r0, Operand(kSmiTagMask));
__ eor(r1, r1, Operand(kSmiTagMask), SetCC);
__ b(ne, &answer);
// It is a heap object - get the map.
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
// Check if the object is a JS array or not.
__ cmp(r1, Operand(JS_ARRAY_TYPE));
__ bind(&answer);
cc_reg_ = eq;
}
@ -4045,11 +3972,11 @@ void ArmCodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
void ArmCodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
// Load the key onto the stack and set register r1 to the formal
// parameters count for the currently executing function.
// Satisfy contract with ArgumentsAccessStub:
// Load the key into r1 and the formal parameters count into r0.
Load(args->at(0));
__ pop(r0);
__ mov(r1, Operand(Smi::FromInt(scope_->num_parameters())));
__ pop(r1);
__ mov(r0, Operand(Smi::FromInt(scope_->num_parameters())));
// Call the shared stub to get to arguments[key].
ArgumentsAccessStub stub(false);
@ -4073,46 +4000,12 @@ void ArmCodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
void ArmCodeGenerator::GenerateShiftDownAndTailCall(
ZoneList<Expression*>* args) {
// r0 = number of arguments
ASSERT(args->length() == 1);
Load(args->at(0));
__ pop(r0);
__ mov(r0, Operand(r0, LSR, kSmiTagSize));
// Get the 'this' function and exit the frame without returning.
__ ldr(r1, MemOperand(pp, JavaScriptFrameConstants::kFunctionOffset));
ExitJSFrame(DO_NOT_RETURN);
// return address in lr
// Move arguments one element down the stack.
Label move;
Label moved;
__ sub(r2, r0, Operand(0), SetCC);
__ b(eq, &moved);
__ bind(&move);
__ sub(ip, r2, Operand(1));
__ ldr(r3, MemOperand(sp, ip, LSL, kPointerSizeLog2));
__ str(r3, MemOperand(sp, r2, LSL, kPointerSizeLog2));
__ sub(r2, r2, Operand(1), SetCC);
__ b(ne, &move);
__ bind(&moved);
// Remove the TOS (copy of last argument)
__ pop();
// Jump (tail-call) to the function in register r1.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
__ ldr(r1, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCodeOffset));
__ add(pc, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
return;
__ stop("ArmCodeGenerator::GenerateShiftDownAndTailCall - unreachable");
}
void ArmCodeGenerator::VisitCallRuntime(CallRuntime* node) {
if (CheckForInlineRuntimeCall(node))
return;
if (CheckForInlineRuntimeCall(node)) return;
ZoneList<Expression*>* args = node->arguments();
Comment cmnt(masm_, "[ CallRuntime");
@ -4167,7 +4060,7 @@ void ArmCodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Load(property->obj());
Load(property->key());
__ mov(r0, Operand(1)); // not counting receiver
__ InvokeBuiltin("DELETE", 1, CALL_JS);
__ InvokeBuiltin(Builtins::DELETE, CALL_JS);
} else if (variable != NULL) {
Slot* slot = variable->slot();
@ -4176,7 +4069,7 @@ void ArmCodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
__ mov(r0, Operand(variable->name()));
__ push(r0);
__ mov(r0, Operand(1)); // not counting receiver
__ InvokeBuiltin("DELETE", 1, CALL_JS);
__ InvokeBuiltin(Builtins::DELETE, CALL_JS);
} else if (slot != NULL && slot->type() == Slot::LOOKUP) {
// lookup the context holding the named variable
@ -4189,7 +4082,7 @@ void ArmCodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
__ mov(r0, Operand(variable->name()));
__ push(r0);
__ mov(r0, Operand(1)); // not counting receiver
__ InvokeBuiltin("DELETE", 1, CALL_JS);
__ InvokeBuiltin(Builtins::DELETE, CALL_JS);
} else {
// Default: Result of deleting non-global, not dynamically
@ -4237,7 +4130,7 @@ void ArmCodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
__ push(r0);
__ mov(r0, Operand(0)); // not counting receiver
__ InvokeBuiltin("BIT_NOT", 0, CALL_JS);
__ InvokeBuiltin(Builtins::BIT_NOT, CALL_JS);
__ b(&continue_label);
__ bind(&smi_label);
@ -4260,7 +4153,7 @@ void ArmCodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
__ b(eq, &continue_label);
__ push(r0);
__ mov(r0, Operand(0)); // not counting receiver
__ InvokeBuiltin("TO_NUMBER", 0, CALL_JS);
__ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS);
__ bind(&continue_label);
break;
}
@ -4658,13 +4551,13 @@ void ArmCodeGenerator::VisitCompareOperation(CompareOperation* node) {
case Token::IN:
__ mov(r0, Operand(1)); // not counting receiver
__ InvokeBuiltin("IN", 1, CALL_JS);
__ InvokeBuiltin(Builtins::IN, CALL_JS);
__ push(r0);
break;
case Token::INSTANCEOF:
__ mov(r0, Operand(1)); // not counting receiver
__ InvokeBuiltin("INSTANCE_OF", 1, CALL_JS);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_JS);
__ push(r0);
break;
@ -4683,14 +4576,31 @@ void ArmCodeGenerator::RecordStatementPosition(Node* node) {
}
void ArmCodeGenerator::EnterJSFrame(int argc) {
__ EnterJSFrame(argc);
void ArmCodeGenerator::EnterJSFrame() {
#if defined(DEBUG)
{ Label done, fail;
__ tst(r1, Operand(kSmiTagMask));
__ b(eq, &fail);
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
__ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
__ cmp(r2, Operand(JS_FUNCTION_TYPE));
__ b(eq, &done);
__ bind(&fail);
__ stop("ArmCodeGenerator::EnterJSFrame - r1 not a function");
__ bind(&done);
}
#endif // DEBUG
__ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
__ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
}
void ArmCodeGenerator::ExitJSFrame(ExitJSFlag flag) {
JSExitStub stub(flag);
__ CallJSExitStub(&stub);
void ArmCodeGenerator::ExitJSFrame() {
// Drop the execution stack down to the frame pointer and restore the caller
// frame pointer and return address.
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
}

View File

@ -56,7 +56,6 @@ class Decoder {
: converter_(converter),
out_buffer_(out_buffer),
out_buffer_pos_(0) {
ASSERT(out_buffer_size_ > 0);
out_buffer_[out_buffer_pos_] = '\0';
}
@ -96,7 +95,6 @@ class Decoder {
// Append the ch to the output buffer.
void Decoder::PrintChar(const char ch) {
ASSERT(out_buffer_pos_ < out_buffer_size_);
out_buffer_[out_buffer_pos_++] = ch;
}
@ -430,7 +428,6 @@ void Decoder::Format(Instr* instr, const char* format) {
}
cur = *format++;
}
ASSERT(out_buffer_pos_ < out_buffer_size_);
out_buffer_[out_buffer_pos_] = '\0';
}

View File

@ -36,22 +36,16 @@ namespace v8 { namespace internal {
StackFrame::Type StackFrame::ComputeType(State* state) {
ASSERT(state->fp != NULL);
if (state->pp == NULL) {
if (Memory::Address_at(state->fp +
EntryFrameConstants::kConstructMarkOffset) != 0) {
return ENTRY_CONSTRUCT;
} else {
return ENTRY;
}
} else if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
return ARGUMENTS_ADAPTOR;
} else if (
Memory::Object_at(state->fp +
StandardFrameConstants::kFunctionOffset)->IsSmi()) {
return INTERNAL;
} else {
return JAVA_SCRIPT;
}
// The marker and function offsets overlap. If the marker isn't a
// smi then the frame is a JavaScript frame -- and the marker is
// really the function.
const int offset = StandardFrameConstants::kMarkerOffset;
Object* marker = Memory::Object_at(state->fp + offset);
if (!marker->IsSmi()) return JAVA_SCRIPT;
return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
}
@ -69,7 +63,6 @@ StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
// Fill in the state.
state->sp = sp;
state->fp = fp;
state->pp = fp + ExitFrameConstants::kPPDisplacement;
state->pc_address = reinterpret_cast<Address*>(sp - 1 * kPointerSize);
return type;
}
@ -81,43 +74,49 @@ void ExitFrame::Iterate(ObjectVisitor* v) const {
int JavaScriptFrame::GetProvidedParametersCount() const {
const int offset = JavaScriptFrameConstants::kArgsLengthOffset;
int result = Memory::int_at(fp() + offset);
// We never remove extra parameters provided on the stack; we only
// fill in undefined values for parameters not provided.
ASSERT(0 <= result && result <= ComputeParametersCount());
return result;
return ComputeParametersCount();
}
Address JavaScriptFrame::GetCallerStackPointer() const {
return state_.pp;
int arguments;
if (Heap::gc_state() != Heap::NOT_IN_GC) {
// The arguments for cooked frames are traversed as if they were
// expression stack elements of the calling frame. The reason for
// this rather strange decision is that we cannot access the
// function during mark-compact GCs when the stack is cooked.
// In fact accessing heap objects (like function->shared() below)
// at all during GC is problematic.
arguments = 0;
} else {
// Compute the number of arguments by getting the number of formal
// parameters of the function. We must remember to take the
// receiver into account (+1).
JSFunction* function = JSFunction::cast(this->function());
arguments = function->shared()->formal_parameter_count() + 1;
}
const int offset = StandardFrameConstants::kCallerSPOffset;
return fp() + offset + (arguments * kPointerSize);
}
Address ArgumentsAdaptorFrame::GetCallerStackPointer() const {
// Argument adaptor frames aren't used on ARM (yet).
UNIMPLEMENTED();
return 0;
const int arguments = Smi::cast(GetExpression(0))->value();
const int offset = StandardFrameConstants::kCallerSPOffset;
return fp() + offset + (arguments + 1) * kPointerSize;
}
Address InternalFrame::GetCallerStackPointer() const {
return state_.pp;
// Internal frames have no arguments. The stack pointer of the
// caller is at a fixed offset from the frame pointer.
return fp() + StandardFrameConstants::kCallerSPOffset;
}
Code* JavaScriptFrame::FindCode() const {
const int offset = StandardFrameConstants::kCodeOffset;
Object* code = Memory::Object_at(fp() + offset);
if (code == NULL) {
// The code object isn't set; find it and set it.
code = Heap::FindCodeObject(pc());
ASSERT(!code->IsFailure());
Memory::Object_at(fp() + offset) = code;
}
ASSERT(code != NULL);
return Code::cast(code);
JSFunction* function = JSFunction::cast(this->function());
return function->shared()->code();
}

View File

@ -93,8 +93,7 @@ class StackHandlerConstants : public AllStatic {
class EntryFrameConstants : public AllStatic {
public:
static const int kCallerFPOffset = -2 * kPointerSize;
static const int kConstructMarkOffset = -1 * kPointerSize;
static const int kCallerFPOffset = -3 * kPointerSize;
};
@ -110,29 +109,23 @@ class ExitFrameConstants : public AllStatic {
// Let the parameters pointer for exit frames point just below the
// frame structure on the stack.
static const int kPPDisplacement = 4 * kPointerSize;
static const int kPPDisplacement = 3 * kPointerSize;
// The caller fields are below the frame pointer on the stack.
static const int kCallerPPOffset = +0 * kPointerSize;
static const int kCallerFPOffset = +1 * kPointerSize;
static const int kCallerPCOffset = +3 * kPointerSize;
static const int kCallerFPOffset = +0 * kPointerSize;
static const int kCallerPPOffset = +1 * kPointerSize;
static const int kCallerPCOffset = +2 * kPointerSize;
};
class StandardFrameConstants : public AllStatic {
public:
static const int kExpressionsOffset = -4 * kPointerSize;
static const int kCodeOffset = -3 * kPointerSize;
static const int kContextOffset = -2 * kPointerSize;
static const int kCallerPPOffset = 0 * kPointerSize;
static const int kCallerFPOffset = +1 * kPointerSize;
static const int kCallerPCOffset = +3 * kPointerSize;
// TODO(1233523): This is - of course - faked. The ARM port does not
// yet pass the callee function in a register, but the
// StackFrame::ComputeType code uses the field to figure out if a
// frame is a real JavaScript frame or an internal frame.
static const int kFunctionOffset = kContextOffset;
static const int kExpressionsOffset = -3 * kPointerSize;
static const int kMarkerOffset = -2 * kPointerSize;
static const int kContextOffset = -1 * kPointerSize;
static const int kCallerFPOffset = 0 * kPointerSize;
static const int kCallerPCOffset = +1 * kPointerSize;
static const int kCallerSPOffset = +2 * kPointerSize;
};
@ -140,34 +133,32 @@ class JavaScriptFrameConstants : public AllStatic {
public:
// FP-relative.
static const int kLocal0Offset = StandardFrameConstants::kExpressionsOffset;
static const int kArgsLengthOffset = -1 * kPointerSize;
// 0 * kPointerSize : StandardFrameConstants::kCallerPPOffset
// 1 * kPointersize : StandardFrameConstents::kCallerFPOffset
static const int kSPOnExitOffset = +2 * kPointerSize;
// 3 * kPointerSize : StandardFrameConstants::kCallerPCOffset
static const int kSavedRegistersOffset = +4 * kPointerSize;
static const int kSavedRegistersOffset = +2 * kPointerSize;
static const int kFunctionOffset = StandardFrameConstants::kMarkerOffset;
// PP-relative.
static const int kParam0Offset = -2 * kPointerSize;
static const int kReceiverOffset = -1 * kPointerSize;
static const int kFunctionOffset = 0 * kPointerSize;
};
class ArgumentsAdaptorFrameConstants : public AllStatic {
public:
static const int kLengthOffset = StandardFrameConstants::kExpressionsOffset;
};
class InternalFrameConstants : public AllStatic {
public:
static const int kCodeOffset = StandardFrameConstants::kCodeOffset;
static const int kCodeOffset = StandardFrameConstants::kExpressionsOffset;
};
inline Address StandardFrame::caller_pp() const {
return Memory::Address_at(fp() + StandardFrameConstants::kCallerPPOffset);
}
inline Object* JavaScriptFrame::function() const {
const int offset = JavaScriptFrameConstants::kFunctionOffset;
return Memory::Object_at(pp() + offset);
Object* result = Memory::Object_at(fp() + offset);
ASSERT(result->IsJSFunction());
return result;
}

View File

@ -254,9 +254,6 @@ StackFrame::Type ExitFrame::GetCallerState(State* state) const {
// Setup the caller state.
state->sp = pp();
state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
#ifdef USE_OLD_CALLING_CONVENTIONS
state->pp = Memory::Address_at(fp() + ExitFrameConstants::kCallerPPOffset);
#endif
state->pc_address
= reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset);
return ComputeType(state);
@ -293,9 +290,6 @@ int StandardFrame::ComputeExpressionsCount() const {
StackFrame::Type StandardFrame::GetCallerState(State* state) const {
state->sp = caller_sp();
state->fp = caller_fp();
#ifdef USE_OLD_CALLING_CONVENTIONS
state->pp = caller_pp();
#endif
state->pc_address = reinterpret_cast<Address*>(ComputePCAddress(fp()));
return ComputeType(state);
}

View File

@ -164,9 +164,6 @@ class StackFrame BASE_EMBEDDED {
struct State {
Address sp;
Address fp;
#ifdef USE_OLD_CALLING_CONVENTIONS
Address pp;
#endif
Address* pc_address;
};
@ -187,13 +184,9 @@ class StackFrame BASE_EMBEDDED {
// Compute the stack frame type for the given state.
static Type ComputeType(State* state);
protected:
// TODO(1233523): Once the ARM code uses the new calling
// conventions, we should be able to make state_ private again.
State state_;
private:
const StackFrameIterator* iterator_;
State state_;
// Get the type and the state of the calling frame.
virtual Type GetCallerState(State* state) const = 0;
@ -338,9 +331,6 @@ class StandardFrame: public StackFrame {
// Accessors.
inline Address caller_sp() const;
inline Address caller_fp() const;
#ifdef USE_OLD_CALLING_CONVENTIONS
inline Address caller_pp() const;
#endif
inline Address caller_pc() const;
// Computes the address of the PC field in the standard frame given

View File

@ -44,13 +44,6 @@ typedef unsigned __int64 uint64_t;
#endif
// TODO(1233523): Get rid of this code that conditionally introduces a
// macro to allow us to check for platforms that use the old
// non-adapted arguments calling conventions.
#if defined(ARM) || defined(__arm__) || defined(__thumb__)
#define USE_OLD_CALLING_CONVENTIONS
#endif
namespace v8 { namespace internal {
// Support for alternative bool type. This is only enabled if the code is

View File

@ -407,17 +407,15 @@ void CallIC::Generate(MacroAssembler* masm,
// -- lr: return address
// -----------------------------------
// Setup number of arguments for EnterJSFrame.
__ mov(r0, Operand(argc));
// Get the receiver of the function from the stack into r1.
__ ldr(r1, MemOperand(sp, argc * kPointerSize));
__ EnterJSFrame(0);
__ pop(); // remove the code slot
// Get the receiver of the function from the stack.
__ ldr(r2, MemOperand(sp, argc * kPointerSize));
// Get the name of the function to call from the stack.
__ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
__ EnterInternalFrame();
// Push the receiver and the name of the function.
__ ldr(r0, MemOperand(pp, 0));
__ mov(r2, Operand(0)); // code slot == 0
__ stm(db_w, sp, r0.bit() | r1.bit() | r2.bit());
__ stm(db_w, sp, r1.bit() | r2.bit());
// Call the entry.
__ mov(r0, Operand(2));
@ -429,7 +427,7 @@ void CallIC::Generate(MacroAssembler* masm,
// Move result to r1.
__ mov(r1, Operand(r0));
__ ExitJSFrame(DO_NOT_RETURN);
__ ExitInternalFrame();
// Patch the function on the stack; 1 ~ receiver.
__ str(r1, MemOperand(sp, (argc + 1) * kPointerSize));

View File

@ -518,7 +518,7 @@ void CallIC::Generate(MacroAssembler* masm,
__ mov(ebx, Operand(esp, (argc + 2) * kPointerSize));
// Enter an internal frame.
__ EnterFrame(StackFrame::INTERNAL);
__ EnterInternalFrame();
// Push the receiver and the name of the function.
__ push(Operand(edx));
@ -532,7 +532,7 @@ void CallIC::Generate(MacroAssembler* masm,
// Move result to edi and exit the internal frame.
__ mov(Operand(edi), eax);
__ ExitFrame(StackFrame::INTERNAL);
__ ExitInternalFrame();
// Invoke the function.
ParameterCount actual(argc);

View File

@ -249,94 +249,28 @@ void MacroAssembler::RecordWrite(Register object, Register offset,
}
void MacroAssembler::EnterJSFrame(int argc) {
// Generate code entering a JS function called from a JS function
// stack: receiver, arguments
// r0: number of arguments (not including function, nor receiver)
// r1: preserved
// sp: stack pointer
// fp: frame pointer
// cp: callee's context
// pp: caller's parameter pointer
// lr: return address
void MacroAssembler::EnterInternalFrame() {
// r0-r3: preserved
int type = StackFrame::INTERNAL;
// compute parameter pointer before making changes
// ip = sp + kPointerSize*(args_len+1); // +1 for receiver
add(ip, sp, Operand(r0, LSL, kPointerSizeLog2));
add(ip, ip, Operand(kPointerSize));
// push extra parameters if we don't have enough
// (this can only happen if argc > 0 to begin with)
if (argc > 0) {
Label loop, done;
// assume enough arguments to be the most common case
sub(r2, r0, Operand(argc), SetCC); // number of missing arguments
b(ge, &done); // enough arguments
// not enough arguments
mov(r3, Operand(Factory::undefined_value()));
bind(&loop);
push(r3);
add(r2, r2, Operand(1), SetCC);
b(lt, &loop);
bind(&done);
}
mov(r3, Operand(r0)); // args_len to be saved
mov(r2, Operand(cp)); // context to be saved
// push in reverse order: context (r2), args_len (r3), caller_pp, caller_fp,
// sp_on_exit (ip == pp, may be patched on exit), return address
stm(db_w, sp, r2.bit() | r3.bit() | pp.bit() | fp.bit() |
ip.bit() | lr.bit());
// Setup new frame pointer.
add(fp, sp, Operand(-StandardFrameConstants::kContextOffset));
mov(pp, Operand(ip)); // setup new parameter pointer
mov(r0, Operand(0)); // spare slot to store caller code object during GC
push(r0);
// r1: preserved
stm(db_w, sp, cp.bit() | fp.bit() | lr.bit());
mov(ip, Operand(Smi::FromInt(type)));
push(ip);
mov(ip, Operand(0));
push(ip); // Push an empty code cache slot.
add(fp, sp, Operand(3 * kPointerSize)); // Adjust FP to point to saved FP.
}
void MacroAssembler::ExitJSFrame(ExitJSFlag flag) {
// r0: result
// sp: stack pointer
// fp: frame pointer
// pp: parameter pointer
void MacroAssembler::ExitInternalFrame() {
// r0: preserved
// r1: preserved
// r2: preserved
if (flag == DO_NOT_RETURN) {
add(r3, fp, Operand(JavaScriptFrameConstants::kSavedRegistersOffset));
}
if (flag == DO_NOT_RETURN) {
// restore sp as caller_sp (not as pp)
str(r3, MemOperand(fp, JavaScriptFrameConstants::kSPOnExitOffset));
}
if (flag == DO_NOT_RETURN && generating_stub()) {
// If we're generating a stub, we need to preserve the link
// register to be able to return to the place the stub was called
// from.
mov(ip, Operand(lr));
}
mov(sp, Operand(fp)); // respect ABI stack constraint
ldm(ia, sp, pp.bit() | fp.bit() | sp.bit() |
((flag == RETURN) ? pc.bit() : lr.bit()));
if (flag == DO_NOT_RETURN && generating_stub()) {
// Return to the place where the stub was called without
// clobbering the value of the link register.
mov(pc, Operand(ip));
}
// r0: result
// sp: points to function arg (if return) or to last arg (if no return)
// fp: restored frame pointer
// pp: restored parameter pointer
// Drop the execution stack down to the frame pointer and restore the caller
// frame pointer and return address.
mov(sp, fp);
ldm(ia_w, sp, fp.bit() | lr.bit());
}
@ -346,12 +280,57 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
Register code_reg,
Label* done,
InvokeFlag flag) {
if (actual.is_immediate()) {
mov(r0, Operand(actual.immediate())); // Push the number of arguments.
} else {
if (!actual.reg().is(r0)) {
mov(r0, Operand(actual.reg()));
bool definitely_matches = false;
Label regular_invoke;
// Check whether the expected and actual arguments count match. If not,
// setup registers according to contract with ArgumentsAdaptorTrampoline:
// r0: actual arguments count
// r1: function (passed through to callee)
// r2: expected arguments count
// r3: callee code entry
// The code below is made a lot easier because the calling code already sets
// up actual and expected registers according to the contract if values are
// passed in registers.
ASSERT(actual.is_immediate() || actual.reg().is(r0));
ASSERT(expected.is_immediate() || expected.reg().is(r2));
ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(r3));
if (expected.is_immediate()) {
ASSERT(actual.is_immediate());
if (expected.immediate() == actual.immediate()) {
definitely_matches = true;
} else {
mov(r0, Operand(actual.immediate()));
mov(r2, Operand(expected.immediate()));
}
} else {
if (actual.is_immediate()) {
cmp(expected.reg(), Operand(actual.immediate()));
b(eq, &regular_invoke);
mov(r0, Operand(actual.immediate()));
} else {
cmp(expected.reg(), Operand(actual.reg()));
b(eq, &regular_invoke);
}
}
if (!definitely_matches) {
if (!code_constant.is_null()) {
mov(r3, Operand(code_constant));
add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
}
Handle<Code> adaptor =
Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
if (flag == CALL_FUNCTION) {
Call(adaptor, code_target);
b(done);
} else {
Jump(adaptor, code_target);
}
bind(&regular_invoke);
}
}
@ -402,18 +381,8 @@ void MacroAssembler::InvokeFunction(Register fun,
// Contract with called JS functions requires that function is passed in r1.
ASSERT(fun.is(r1));
Register code_reg = r3;
Register expected_reg = r2;
// Make sure that the code and expected registers do not collide with the
// actual register being passed in.
if (actual.is_reg()) {
if (actual.reg().is(code_reg)) {
code_reg = r4;
} else if (actual.reg().is(expected_reg)) {
expected_reg = r4;
}
}
Register code_reg = r3;
ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
@ -507,7 +476,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
mov(r0, Operand(Smi::FromInt(StackHandler::kCodeNotPresent))); // new TOS
push(r0);
} else {
// Must preserve r0-r3, r5-r7 are available.
// Must preserve r0-r4, r5-r7 are available.
ASSERT(try_location == IN_JS_ENTRY);
// The parameter pointer is meaningless here and fp does not point to a JS
// frame. So we save NULL for both pp and fp. We expect the code throwing an
@ -688,33 +657,54 @@ void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
}
void MacroAssembler::InvokeBuiltin(const char* name,
int argc,
InvokeJSFlags flags) {
Handle<String> symbol = Factory::LookupAsciiSymbol(name);
Object* object = Top::security_context_builtins()->GetProperty(*symbol);
bool unresolved = true;
Code* code = Builtins::builtin(Builtins::Illegal);
Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
bool* resolved) {
// Contract with compiled functions is that the function is passed in r1.
int builtins_offset =
JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
ldr(r1, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset));
ldr(r1, FieldMemOperand(r1, builtins_offset));
if (object->IsJSFunction()) {
Handle<JSFunction> function(JSFunction::cast(object));
if (function->is_compiled() || CompileLazy(function, CLEAR_EXCEPTION)) {
code = function->code();
unresolved = false;
}
}
return Builtins::GetCode(id, resolved);
}
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
InvokeJSFlags flags) {
bool resolved;
Handle<Code> code = ResolveBuiltin(id, &resolved);
if (flags == CALL_JS) {
Call(Handle<Code>(code), code_target);
Call(code, code_target);
} else {
ASSERT(flags == JUMP_JS);
Jump(Handle<Code>(code), code_target);
Jump(code, code_target);
}
if (unresolved) {
if (!resolved) {
const char* name = Builtins::GetName(id);
int argc = Builtins::GetArgumentsCount(id);
uint32_t flags =
Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Bootstrapper::FixupFlagsIsPCRelative::encode(false);
Bootstrapper::FixupFlagsIsPCRelative::encode(true);
Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
unresolved_.Add(entry);
}
}
void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
bool resolved;
Handle<Code> code = ResolveBuiltin(id, &resolved);
mov(target, Operand(code));
if (!resolved) {
const char* name = Builtins::GetName(id);
int argc = Builtins::GetArgumentsCount(id);
uint32_t flags =
Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
Bootstrapper::FixupFlagsIsPCRelative::encode(true);
Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
unresolved_.Add(entry);
}

View File

@ -98,21 +98,13 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Activation frames
void EnterJSFrame(int argc);
void ExitJSFrame(ExitJSFlag flag);
void EnterInternalFrame();
void ExitInternalFrame();
// ---------------------------------------------------------------------------
// JavaScript invokes
// Helper functions for generating invokes.
void InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
Handle<Code> code_constant,
Register code_reg,
Label* done,
InvokeFlag flag);
// Invoke the JavaScript function code by either calling or jumping.
void InvokeCode(Register code,
const ParameterCount& expected,
@ -131,6 +123,7 @@ class MacroAssembler: public Assembler {
const ParameterCount& actual,
InvokeFlag flag);
// ---------------------------------------------------------------------------
// Debugger Support
@ -200,7 +193,11 @@ class MacroAssembler: public Assembler {
// Invoke specified builtin JavaScript function. Adds an entry to
// the unresolved list if the name does not resolve.
void InvokeBuiltin(const char* name, int argc, InvokeJSFlags flags);
void InvokeBuiltin(Builtins::JavaScript id, InvokeJSFlags flags);
// Store the code object for the given builtin in the target register and
// setup the function in r1.
void GetBuiltinEntry(Register target, Builtins::JavaScript id);
struct Unresolved {
int pc;
@ -233,6 +230,18 @@ class MacroAssembler: public Assembler {
List<Unresolved> unresolved_;
bool generating_stub_;
bool allow_stub_calls_;
// Helper functions for generating invokes.
void InvokePrologue(const ParameterCount& expected,
const ParameterCount& actual,
Handle<Code> code_constant,
Register code_reg,
Label* done,
InvokeFlag flag);
// Get the code for the given builtin. Returns if able to resolve
// the function in the 'resolved' flag.
Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved);
};

View File

@ -319,21 +319,20 @@ void MacroAssembler::FCmp() {
}
void MacroAssembler::EnterFrame(StackFrame::Type type) {
ASSERT(type != StackFrame::JAVA_SCRIPT);
void MacroAssembler::EnterInternalFrame() {
int type = StackFrame::INTERNAL;
push(ebp);
mov(ebp, Operand(esp));
push(esi);
push(Immediate(Smi::FromInt(type)));
if (type == StackFrame::INTERNAL) {
push(Immediate(0));
}
push(Immediate(0)); // Push an empty code cache slot.
}
void MacroAssembler::ExitFrame(StackFrame::Type type) {
ASSERT(type != StackFrame::JAVA_SCRIPT);
void MacroAssembler::ExitInternalFrame() {
if (FLAG_debug_code) {
StackFrame::Type type = StackFrame::INTERNAL;
cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
Immediate(Smi::FromInt(type)));
Check(equal, "stack frame types must match");
@ -727,24 +726,8 @@ Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
mov(edi, FieldOperand(edx, builtins_offset));
Code* code = Builtins::builtin(Builtins::Illegal);
*resolved = false;
if (Top::security_context() != NULL) {
Object* object = Top::security_context_builtins()->javascript_builtin(id);
if (object->IsJSFunction()) {
Handle<JSFunction> function(JSFunction::cast(object));
// Make sure the number of parameters match the formal parameter count.
ASSERT(function->shared()->formal_parameter_count() ==
Builtins::GetArgumentsCount(id));
if (function->is_compiled() || CompileLazy(function, CLEAR_EXCEPTION)) {
code = function->code();
*resolved = true;
}
}
}
return Handle<Code>(code);
return Builtins::GetCode(id, resolved);
}

View File

@ -88,8 +88,8 @@ class MacroAssembler: public Assembler {
// Enter or exit a stack frame of the given type. Cannot be used to
// construct or leave JavaScript frames.
void EnterFrame(StackFrame::Type type);
void ExitFrame(StackFrame::Type type);
void EnterInternalFrame();
void ExitInternalFrame();
// ---------------------------------------------------------------------------
@ -120,10 +120,6 @@ class MacroAssembler: public Assembler {
// Store the code object for the given builtin in the target register.
void GetBuiltinEntry(Register target, Builtins::JavaScript id);
// Get the code for the given builtin. Returns if able to resolve
// the function in the 'resolved' flag.
Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved);
// Expression support
void Set(Register dst, const Immediate& x);
void Set(const Operand& dst, const Immediate& x);
@ -251,6 +247,10 @@ class MacroAssembler: public Assembler {
const Operand& code_operand,
Label* done,
InvokeFlag flag);
// Get the code for the given builtin. Returns if able to resolve
// the function in the 'resolved' flag.
Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved);
};

View File

@ -159,22 +159,9 @@ static Object* Runtime_CreateArrayLiteral(Arguments args) {
// literal.
ASSERT(args.length() == 2);
CONVERT_CHECKED(FixedArray, elements, args[0]);
#ifdef USE_OLD_CALLING_CONVENTIONS
ASSERT(args[1]->IsTheHole());
// TODO(1332579): Pass in the literals array from the function once
// the new calling convention is in place on ARM. Currently, we
// retrieve the array constructor from the global context. This is
// a security problem since the global object might have been
// reinitialized and the array constructor from the global context
// might be from a context that we are not allowed to access.
JSFunction* constructor =
JSFunction::cast(Top::context()->global_context()->array_function());
#else
CONVERT_CHECKED(FixedArray, literals, args[1]);
const int kArrayFunIndex = JSFunction::kLiteralArrayFunctionIndex;
JSFunction* constructor = JSFunction::cast(literals->get(kArrayFunIndex));
#endif
// Create the JSArray.
Object* object = Heap::AllocateJSObject(constructor);

View File

@ -40,8 +40,9 @@ namespace assembler { namespace arm {
using ::v8::internal::Object;
using ::v8::internal::PrintF;
using ::v8:: internal::ReadLine;
using ::v8:: internal::DeleteArray;
using ::v8::internal::OS;
using ::v8::internal::ReadLine;
using ::v8::internal::DeleteArray;
DEFINE_bool(trace_sim, false, "trace simulator execution");
@ -1392,7 +1393,7 @@ void Simulator::InstructionDecode(Instr* instr) {
}
DEFINE_int(stop_sim_at, -1, "Simulator stop after x number of instructions");
DEFINE_int(stop_sim_at, 0, "Simulator stop after x number of instructions");
//
@ -1400,16 +1401,30 @@ void Simulator::execute() {
// Get the PC to simulate. Cannot use the accessor here as we need the
// raw PC value and not the one used as input to arithmetic instructions.
int program_counter = get_pc();
while (program_counter != end_sim_pc) {
Instr* instr = reinterpret_cast<Instr*>(program_counter);
icount_++;
if (icount_ == FLAG_stop_sim_at) {
Debugger dbg(this);
dbg.Debug();
} else {
if (FLAG_stop_sim_at == 0) {
// Fast version of the dispatch loop without checking whether the simulator
// should be stopping at a particular executed instruction.
while (program_counter != end_sim_pc) {
Instr* instr = reinterpret_cast<Instr*>(program_counter);
icount_++;
InstructionDecode(instr);
program_counter = get_pc();
}
} else {
// FLAG_stop_sim_at is at the non-default value. Stop in the debugger when
// we reach the particular instuction count.
while (program_counter != end_sim_pc) {
Instr* instr = reinterpret_cast<Instr*>(program_counter);
icount_++;
if (icount_ == FLAG_stop_sim_at) {
Debugger dbg(this);
dbg.Debug();
} else {
InstructionDecode(instr);
}
program_counter = get_pc();
}
program_counter = get_pc();
}
}

View File

@ -154,25 +154,34 @@ void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
// ----------- S t a t e -------------
// -- r1: function
// -- lr: return address
// -----------------------------------
HandleScope scope;
// Enter the JS frame but don't add additional arguments.
__ EnterJSFrame(0);
// Enter an internal frame.
__ EnterInternalFrame();
// Push the function on the stack and call the runtime function.
__ ldr(r0, MemOperand(pp, 0));
__ push(r0);
// Preserve the function.
__ push(r1);
// Push the function on the stack as the argument to the runtime function.
__ push(r1);
__ CallRuntime(Runtime::kLazyCompile, 1);
// Move result to r1 and restore number of arguments.
__ mov(r1, Operand(r0));
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kArgsLengthOffset));
// Calculate the entry point.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ ExitJSFrame(DO_NOT_RETURN);
// Restore saved function.
__ pop(r1);
// Tear down temporary frame.
__ ExitInternalFrame();
// Do a tail-call of the compiled function.
__ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r1);
__ Jump(r2);
return GetCodeWithFlags(flags);
}
@ -202,30 +211,23 @@ Object* CallStubCompiler::CompileCallField(Object* object,
// Get the properties array of the holder and get the function from the field.
int offset = index * kPointerSize + Array::kHeaderSize;
__ ldr(r3, FieldMemOperand(reg, JSObject::kPropertiesOffset));
__ ldr(r3, FieldMemOperand(r3, offset));
__ ldr(r1, FieldMemOperand(reg, JSObject::kPropertiesOffset));
__ ldr(r1, FieldMemOperand(r1, offset));
// Check that the function really is a function.
__ tst(r3, Operand(kSmiTagMask));
__ tst(r1, Operand(kSmiTagMask));
__ b(eq, &miss);
// Get the map.
__ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
__ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
__ cmp(r2, Operand(JS_FUNCTION_TYPE));
__ b(ne, &miss);
// TODO(1233523): remove r0 after changing Jump to InvokeCode
// Setup argument length register.
__ mov(r0, Operand(argc));
// Patch the function on the stack; 1 ~ receiver.
__ str(r3, MemOperand(sp, (argc + 1) * kPointerSize));
__ str(r1, MemOperand(sp, (argc + 1) * kPointerSize));
// Setup the context and jump to the call code of the function (tail call).
__ ldr(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
__ ldr(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
__ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
// Invoke the function.
__ InvokeFunction(r1, arguments(), JUMP_FUNCTION);
// Handle call cache miss.
__ bind(&miss);
@ -330,11 +332,11 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
}
// Get the function and setup the context.
__ mov(r3, Operand(Handle<JSFunction>(function)));
__ ldr(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
__ mov(r1, Operand(Handle<JSFunction>(function)));
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Patch the function on the stack; 1 ~ receiver.
__ str(r3, MemOperand(sp, (argc + 1) * kPointerSize));
__ str(r1, MemOperand(sp, (argc + 1) * kPointerSize));
// Jump to the cached code (tail call).
Handle<Code> code(function->code());

View File

@ -467,7 +467,7 @@ Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
HandleScope scope;
// Enter an internal frame.
__ EnterFrame(StackFrame::INTERNAL);
__ EnterInternalFrame();
// Push a copy of the function onto the stack.
__ push(edi);
@ -476,7 +476,7 @@ Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
__ CallRuntime(Runtime::kLazyCompile, 1);
__ pop(edi);
__ ExitFrame(StackFrame::INTERNAL);
__ ExitInternalFrame();
// Do a tail-call of the compiled function.
__ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
@ -666,7 +666,7 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object,
__ CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss);
// Enter an internal frame.
__ EnterFrame(StackFrame::INTERNAL);
__ EnterInternalFrame();
// Push arguments on the expression stack.
__ push(edx); // receiver
@ -687,7 +687,7 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object,
__ mov(edx, Operand(ebp, (argc + 2) * kPointerSize)); // receiver
// Exit frame.
__ ExitFrame(StackFrame::INTERNAL);
__ ExitInternalFrame();
// Check that the function really is a function.
__ test(edi, Immediate(kSmiTagMask));