x64 code generation for construct calls, declaring global variables

and for runtime calls.

We could not handle functions with no explicit return statement.  I
added support for that as well.  The place was hard to find because
code was left out from the codegenerator with no TODO comment.  We
need to make sure to comment if we leave out code when porting
something. :-)

Review URL: http://codereview.chromium.org/146029

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2257 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
ager@chromium.org 2009-06-24 08:28:42 +00:00
parent f66ea38c0b
commit 5b4bacdd69
7 changed files with 287 additions and 44 deletions

View File

@ -77,7 +77,7 @@ Operand::Operand(Register base, int32_t disp): rex_(0) {
len_ = 1;
if (base.is(rsp) || base.is(r12)) {
// SIB byte is needed to encode (rsp + offset) or (r12 + offset).
set_sib(kTimes1, rsp, base);
set_sib(times_1, rsp, base);
}
if (disp == 0 && !base.is(rbp) && !base.is(r13)) {

View File

@ -278,12 +278,12 @@ class Immediate BASE_EMBEDDED {
// Machine instruction Operands
enum ScaleFactor {
kTimes1 = 0,
kTimes2 = 1,
kTimes4 = 2,
kTimes8 = 3,
kTimesIntSize = kTimes4,
kTimesPointerSize = kTimes8
times_1 = 0,
times_2 = 1,
times_4 = 2,
times_8 = 3,
times_int_size = times_4,
times_pointer_size = times_8
};

View File

@ -50,10 +50,10 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ push(rdi);
// Preserve the number of arguments on the stack. Must preserve both
// eax and ebx because these registers are used when copying the
// rax and rbx because these registers are used when copying the
// arguments and the receiver.
ASSERT(kSmiTagSize == 1);
__ lea(rcx, Operand(rax, rax, kTimes1, kSmiTag));
__ lea(rcx, Operand(rax, rax, times_1, kSmiTag));
__ push(rcx);
}
@ -71,7 +71,7 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
ASSERT_EQ(kSmiTagSize, 1 && kSmiTag == 0);
ASSERT_EQ(kPointerSize, (1 << kSmiTagSize) * 4);
__ pop(rcx);
__ lea(rsp, Operand(rsp, rbx, kTimes4, 1 * kPointerSize)); // 1 ~ receiver
__ lea(rsp, Operand(rsp, rbx, times_4, 1 * kPointerSize)); // 1 ~ receiver
__ push(rcx);
}
@ -98,7 +98,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Copy receiver and all expected arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
__ lea(rax, Operand(rbp, rax, kTimesPointerSize, offset));
__ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
__ movq(rcx, Immediate(-1)); // account for receiver
Label copy;
@ -117,7 +117,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Copy receiver and all actual arguments.
const int offset = StandardFrameConstants::kCallerSPOffset;
__ lea(rdi, Operand(rbp, rax, kTimesPointerSize, offset));
__ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
__ movq(rcx, Immediate(-1)); // account for receiver
Label copy;
@ -167,14 +167,133 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
}
void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
// ----------- S t a t e -------------
// -- rax: number of arguments
// -- rdi: constructor function
// -----------------------------------
Label non_function_call;
// Check that function is not a smi.
__ testl(rdi, Immediate(kSmiTagMask));
__ j(zero, &non_function_call);
// Check that function is a JSFunction.
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
__ j(not_equal, &non_function_call);
// Jump to the function-specific construct stub.
__ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
__ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
__ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
__ jmp(rbx);
// edi: called object
// eax: number of arguments
__ bind(&non_function_call);
// Set expected number of arguments to zero (not changing eax).
__ movq(rbx, Immediate(0));
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
__ Jump(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
RelocInfo::CODE_TARGET);
}
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
masm->int3(); // UNIMPLEMENTED.
// Enter a construct frame.
__ EnterConstructFrame();
// Store a smi-tagged arguments count on the stack.
__ shl(rax, Immediate(kSmiTagSize));
__ push(rax);
// Push the function to invoke on the stack.
__ push(rdi);
// Try to allocate the object without transitioning into C code. If any of the
// preconditions is not met, the code bails out to the runtime call.
Label rt_call, allocated;
// TODO(x64): Implement inlined allocation.
// Allocate the new receiver object using the runtime call.
// rdi: function (constructor)
__ bind(&rt_call);
// Must restore edi (constructor) before calling runtime.
__ movq(rdi, Operand(rsp, 0));
__ push(rdi);
__ CallRuntime(Runtime::kNewObject, 1);
__ movq(rbx, rax); // store result in rbx
// New object allocated.
// rbx: newly allocated object
__ bind(&allocated);
// Retrieve the function from the stack.
__ pop(rdi);
// Retrieve smi-tagged arguments count from the stack.
__ movq(rax, Operand(rsp, 0));
__ shr(rax, Immediate(kSmiTagSize));
// Push the allocated receiver to the stack. We need two copies
// because we may have to return the original one and the calling
// conventions dictate that the called function pops the receiver.
__ push(rbx);
__ push(rbx);
// Setup pointer to last argument.
__ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
// Copy arguments and receiver to the expression stack.
Label loop, entry;
__ movq(rcx, rax);
__ jmp(&entry);
__ bind(&loop);
__ push(Operand(rbx, rcx, times_pointer_size, 0));
__ bind(&entry);
__ decq(rcx);
__ j(greater_equal, &loop);
// Call the function.
ParameterCount actual(rax);
__ InvokeFunction(rdi, actual, CALL_FUNCTION);
// Restore context from the frame.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
// If the result is an object (in the ECMA sense), we should get rid
// of the receiver and use the result; see ECMA-262 section 13.2.2-7
// on page 74.
Label use_receiver, exit;
// If the result is a smi, it is *not* an object in the ECMA sense.
__ testl(rax, Immediate(kSmiTagMask));
__ j(zero, &use_receiver);
// If the type of the result (stored in its map) is less than
// FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
__ j(greater_equal, &exit);
// Throw away the result of the constructor invocation and use the
// on-stack receiver as the result.
__ bind(&use_receiver);
__ movq(rax, Operand(rsp, 0));
// Restore the arguments count and leave the construct frame.
__ bind(&exit);
__ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
__ LeaveConstructFrame();
// Remove caller arguments from the stack and return.
ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ pop(rcx);
__ lea(rsp, Operand(rsp, rbx, times_4, 1 * kPointerSize)); // 1 ~ receiver
__ push(rcx);
__ ret(0);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
// Expects five C++ function parameters.
@ -258,7 +377,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ xor_(rcx, rcx); // Set loop variable to 0.
__ jmp(&entry);
__ bind(&loop);
__ movq(kScratchRegister, Operand(rbx, rcx, kTimesPointerSize, 0));
__ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
__ push(Operand(kScratchRegister, 0)); // dereference handle
__ addq(rcx, Immediate(1));
__ bind(&entry);

View File

@ -123,10 +123,21 @@ CodeGenerator::CodeGenerator(int buffer_size,
}
void CodeGenerator::DeclareGlobals(Handle<FixedArray> a) {
UNIMPLEMENTED();
void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals. The inevitable call
// will sync frame elements to memory anyway, so we do it eagerly to
// allow us to push the arguments directly into place.
frame_->SyncRange(0, frame_->element_count() - 1);
__ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT);
frame_->EmitPush(kScratchRegister);
frame_->EmitPush(rsi); // The context is the second argument.
frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
// Return value is ignored.
}
void CodeGenerator::TestCodeGenerator() {
// Compile a function from a string, and run it.
@ -286,10 +297,35 @@ void CodeGenerator::GenCode(FunctionLiteral* function) {
// Ignore the return value.
}
#endif
}
VisitStatements(body);
VisitStatements(body);
// Handle the return from the function.
if (has_valid_frame()) {
// If there is a valid frame, control flow can fall off the end of
// the body. In that case there is an implicit return statement.
ASSERT(!function_return_is_shadowed_);
CodeForReturnPosition(function);
frame_->PrepareForReturn();
Result undefined(Factory::undefined_value());
if (function_return_.is_bound()) {
function_return_.Jump(&undefined);
} else {
function_return_.Bind(&undefined);
GenerateReturnSequence(&undefined);
}
} else if (function_return_.is_linked()) {
// If the return target has dangling jumps to it, then we have not
// yet generated the return sequence. This can happen when (a)
// control does not flow off the end of the body so we did not
// compile an artificial return statement just above, and (b) there
// are return statements in the body but (c) they are all shadowed.
Result return_value;
function_return_.Bind(&return_value);
GenerateReturnSequence(&return_value);
}
}
}
// Adjust for function-level loop nesting.
loop_nesting_ -= function->loop_nesting();
@ -1323,13 +1359,75 @@ void CodeGenerator::VisitCallEval(CallEval* a) {
}
void CodeGenerator::VisitCallNew(CallNew* a) {
UNIMPLEMENTED();
void CodeGenerator::VisitCallNew(CallNew* node) {
Comment cmnt(masm_, "[ CallNew");
CodeForStatementPosition(node);
// According to ECMA-262, section 11.2.2, page 44, the function
// expression in new calls must be evaluated before the
// arguments. This is different from ordinary calls, where the
// actual function to call is resolved after the arguments have been
// evaluated.
// Compute function to call and use the global object as the
// receiver. There is no need to use the global proxy here because
// it will always be replaced with a newly allocated object.
Load(node->expression());
LoadGlobal();
// Push the arguments ("left-to-right") on the stack.
ZoneList<Expression*>* args = node->arguments();
int arg_count = args->length();
for (int i = 0; i < arg_count; i++) {
Load(args->at(i));
}
// Call the construct call builtin that handles allocation and
// constructor invocation.
CodeForSourcePosition(node->position());
Result result = frame_->CallConstructor(arg_count);
// Replace the function on the stack with the result.
frame_->SetElementAt(0, &result);
}
void CodeGenerator::VisitCallRuntime(CallRuntime* a) {
UNIMPLEMENTED();
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
if (CheckForInlineRuntimeCall(node)) {
return;
}
ZoneList<Expression*>* args = node->arguments();
Comment cmnt(masm_, "[ CallRuntime");
Runtime::Function* function = node->function();
if (function == NULL) {
// Prepare stack for calling JS runtime function.
frame_->Push(node->name());
// Push the builtins object found in the current global object.
__ movq(kScratchRegister, GlobalObject());
__ movq(kScratchRegister,
FieldOperand(kScratchRegister, GlobalObject::kBuiltinsOffset));
frame_->Push(kScratchRegister);
}
// Push the arguments ("left-to-right").
int arg_count = args->length();
for (int i = 0; i < arg_count; i++) {
Load(args->at(i));
}
if (function == NULL) {
// Call the JS runtime function.
Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
arg_count,
loop_nesting_);
frame_->RestoreContextRegister();
frame_->SetElementAt(0, &answer);
} else {
// Call the C runtime function.
Result answer = frame_->CallRuntime(function, arg_count);
frame_->Push(&answer);
}
}
@ -1509,10 +1607,12 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* a) {
UNIMPLEMENTED();
}
void CodeGenerator::VisitThisFunction(ThisFunction* a) {
UNIMPLEMENTED();
void CodeGenerator::VisitThisFunction(ThisFunction* node) {
frame_->PushFunction();
}
void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) {
UNIMPLEMENTED();
}
@ -1706,7 +1806,7 @@ void CodeGenerator::ToBoolean(ControlDestination* dest) {
ASSERT(kSmiTag == 0);
__ testq(value.reg(), value.reg());
dest->false_target()->Branch(zero);
__ testq(value.reg(), Immediate(kSmiTagMask));
__ testl(value.reg(), Immediate(kSmiTagMask));
dest->true_target()->Branch(zero);
// Call the stub for all other cases.
@ -2600,7 +2700,7 @@ void Reference::GetValue(TypeofState typeof_state) {
GetName());
// Check that the receiver is a heap object.
__ testq(receiver.reg(), Immediate(kSmiTagMask));
__ testl(receiver.reg(), Immediate(kSmiTagMask));
deferred->Branch(zero);
__ bind(deferred->patch_site());
@ -2767,11 +2867,11 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
}
bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
return false; // UNIMPLEMENTED.
}
void CodeGenerator::LikelySmiBinaryOperation(Token::Value op,
Result* left,
Result* right,
@ -2851,7 +2951,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
// Check that the function really is a JavaScript function.
__ testq(rdi, Immediate(kSmiTagMask));
__ testl(rdi, Immediate(kSmiTagMask));
__ j(zero, &slow);
// Goto slow case if we do not have a function.
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
@ -2916,7 +3016,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// Patch the arguments.length and the parameters pointer.
__ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ movq(Operand(rsp, 1 * kPointerSize), rcx);
__ lea(rdx, Operand(rdx, rcx, kTimes4, kDisplacement));
__ lea(rdx, Operand(rdx, rcx, times_4, kDisplacement));
__ movq(Operand(rsp, 2 * kPointerSize), rdx);
// Do the runtime call to allocate the arguments object.
@ -2954,9 +3054,9 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// Shifting code depends on SmiEncoding being equivalent to left shift:
// we multiply by four to get pointer alignment.
ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ lea(rbx, Operand(rbp, rax, kTimes4, 0));
__ lea(rbx, Operand(rbp, rax, times_4, 0));
__ neg(rdx);
__ movq(rax, Operand(rbx, rdx, kTimes4, kDisplacement));
__ movq(rax, Operand(rbx, rdx, times_4, kDisplacement));
__ Ret();
// Arguments adaptor case: Check index against actual arguments
@ -2971,9 +3071,9 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// Shifting code depends on SmiEncoding being equivalent to left shift:
// we multiply by four to get pointer alignment.
ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ lea(rbx, Operand(rbx, rcx, kTimes4, 0));
__ lea(rbx, Operand(rbx, rcx, times_4, 0));
__ neg(rdx);
__ movq(rax, Operand(rbx, rdx, kTimes4, kDisplacement));
__ movq(rax, Operand(rbx, rdx, times_4, kDisplacement));
__ Ret();
// Slow-case: Handle non-smi or out-of-bounds access to arguments
@ -3105,7 +3205,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
Label retry;
// If the returned exception is RETRY_AFTER_GC continue at retry label
ASSERT(Failure::RETRY_AFTER_GC == 0);
__ testq(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
__ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
__ j(zero, &retry);
Label continue_exception;
@ -3574,8 +3674,8 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
// Check for negative zero result.
__ NegativeZeroTest(rax, rcx, slow); // use ecx = x | y
// Tag the result and store it in register rax.
ASSERT(kSmiTagSize == kTimes2); // adjust code if not the case
__ lea(rax, Operand(rax, rax, kTimes1, kSmiTag));
ASSERT(kSmiTagSize == times_2); // adjust code if not the case
__ lea(rax, Operand(rax, rax, times_1, kSmiTag));
break;
case Token::MOD:
@ -3640,8 +3740,8 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
UNREACHABLE();
}
// Tag the result and store it in register eax.
ASSERT(kSmiTagSize == kTimes2); // adjust code if not the case
__ lea(rax, Operand(rax, rax, kTimes1, kSmiTag));
ASSERT(kSmiTagSize == times_2); // adjust code if not the case
__ lea(rax, Operand(rax, rax, times_1, kSmiTag));
break;
default:
@ -3787,8 +3887,8 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
__ j(negative, &non_smi_result);
}
// Tag smi result and return.
ASSERT(kSmiTagSize == kTimes2); // adjust code if not the case
__ lea(rax, Operand(rax, rax, kTimes1, kSmiTag));
ASSERT(kSmiTagSize == times_2); // adjust code if not the case
__ lea(rax, Operand(rax, rax, times_1, kSmiTag));
__ ret(2 * kPointerSize);
// All ops except SHR return a signed int32 that we load in a HeapNumber.

View File

@ -206,7 +206,7 @@ void CallIC::Generate(MacroAssembler* masm,
// Check if the receiver is a global object of some sort.
Label invoke, global;
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); // receiver
__ testq(rdx, Immediate(kSmiTagMask));
__ testl(rdx, Immediate(kSmiTagMask));
__ j(zero, &invoke);
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
__ movzxbq(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));

View File

@ -756,7 +756,7 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type) {
// Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
// so it must be retained across the C-call.
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
lea(r15, Operand(rbp, rdi, kTimesPointerSize, offset));
lea(r15, Operand(rbp, rdi, times_pointer_size, offset));
#ifdef ENABLE_DEBUGGER_SUPPORT
// Save the state of all registers to the stack from the memory

View File

@ -65,7 +65,7 @@ void VirtualFrame::Enter() {
#ifdef DEBUG
// Verify that rdi contains a JS function. The following code
// relies on rax being available for use.
__ testq(rdi, Immediate(kSmiTagMask));
__ testl(rdi, Immediate(kSmiTagMask));
__ Check(not_zero,
"VirtualFrame::Enter - rdi is not a function (smi check).");
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
@ -906,6 +906,30 @@ Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
}
Result VirtualFrame::CallConstructor(int arg_count) {
// Arguments, receiver, and function are on top of the frame. The
// IC expects arg count in rax, function in rdi, and the arguments
// and receiver on the stack.
Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall));
// Duplicate the function before preparing the frame.
PushElementAt(arg_count + 1);
Result function = Pop();
PrepareForCall(arg_count + 1, arg_count + 1); // Spill args and receiver.
function.ToRegister(rdi);
// Constructors are called with the number of arguments in register
// eax for now. Another option would be to have separate construct
// call trampolines per different arguments counts encountered.
Result num_args = cgen()->allocator()->Allocate(rax);
ASSERT(num_args.is_valid());
__ movq(num_args.reg(), Immediate(arg_count));
function.Unuse();
num_args.Unuse();
return RawCallCodeObject(ic, RelocInfo::CONSTRUCT_CALL);
}
Result VirtualFrame::CallStoreIC() {
// Name, value, and receiver are on top of the frame. The IC
// expects name in rcx, value in rax, and receiver on the stack. It