Complete the full codegenerator on x64.

Review URL: http://codereview.chromium.org/2078022

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@4686 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
ager@chromium.org 2010-05-20 13:50:09 +00:00
parent b38a0a719a
commit 355d3166e1
9 changed files with 1300 additions and 126 deletions

View File

@ -121,7 +121,7 @@ static Handle<Code> MakeCode(Handle<Context> context, CompilationInfo* info) {
: (shared->is_toplevel() || shared->try_full_codegen());
bool force_full_compiler = false;
#ifdef V8_TARGET_ARCH_IA32
#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_X64)
// On ia32 the full compiler can compile all code whereas the other platforms
// the constructs supported is checked by the associated syntax checker. When
// --always-full-compiler is used on ia32 the syntax checker is still in

View File

@ -149,7 +149,7 @@ DEFINE_bool(full_compiler, true, "enable dedicated backend for run-once code")
DEFINE_bool(fast_compiler, false, "enable speculative optimizing backend")
DEFINE_bool(always_full_compiler, false,
"try to use the dedicated run-once backend for all code")
#ifdef V8_TARGET_ARCH_IA32
#if defined(V8_TARGET_ARCH_IA32) || defined(V8_TARGET_ARCH_X64)
DEFINE_bool(force_full_compiler, false,
"force use of the dedicated run-once backend for all code")
#endif

View File

@ -6163,11 +6163,11 @@ void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
__ mov(map.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
__ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
__ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
destination()->false_target()->Branch(less);
destination()->false_target()->Branch(below);
__ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
obj.Unuse();
map.Unuse();
destination()->Split(less_equal);
destination()->Split(below_equal);
}
@ -6280,7 +6280,7 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) {
__ mov(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset));
__ movzx_b(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset));
__ cmp(tmp.reg(), FIRST_JS_OBJECT_TYPE);
null.Branch(less);
null.Branch(below);
// As long as JS_FUNCTION_TYPE is the last instance type and it is
// right after LAST_JS_OBJECT_TYPE, we can avoid checking for
@ -6869,7 +6869,7 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
deferred->Branch(less);
deferred->Branch(below);
__ movzx_b(tmp1.reg(), FieldOperand(tmp1.reg(), Map::kBitFieldOffset));
__ test(tmp1.reg(), Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
deferred->Branch(not_zero);
@ -8185,11 +8185,11 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
__ mov(map.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
__ movzx_b(map.reg(), FieldOperand(map.reg(), Map::kInstanceTypeOffset));
__ cmp(map.reg(), FIRST_JS_OBJECT_TYPE);
destination()->false_target()->Branch(less);
destination()->false_target()->Branch(below);
__ cmp(map.reg(), LAST_JS_OBJECT_TYPE);
answer.Unuse();
map.Unuse();
destination()->Split(less_equal);
destination()->Split(below_equal);
} else {
// Uncommon case: typeof testing against a string literal that is
// never returned from the typeof operator.
@ -11586,7 +11586,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
Label first_non_object;
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
__ j(less, &first_non_object);
__ j(below, &first_non_object);
// Return non-zero (eax is not zero)
Label return_not_equal;
@ -11603,7 +11603,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
__ j(greater_equal, &return_not_equal);
__ j(above_equal, &return_not_equal);
// Check for oddballs: true, false, null, undefined.
__ cmp(ecx, ODDBALL_TYPE);
@ -12251,9 +12251,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); // eax - object map
__ movzx_b(ecx, FieldOperand(eax, Map::kInstanceTypeOffset)); // ecx - type
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
__ j(less, &slow, not_taken);
__ j(below, &slow, not_taken);
__ cmp(ecx, LAST_JS_OBJECT_TYPE);
__ j(greater, &slow, not_taken);
__ j(above, &slow, not_taken);
// Get the prototype of the function.
__ mov(edx, Operand(esp, 1 * kPointerSize)); // 1 ~ return address
@ -12281,9 +12281,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ mov(ecx, FieldOperand(ebx, HeapObject::kMapOffset));
__ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
__ j(less, &slow, not_taken);
__ j(below, &slow, not_taken);
__ cmp(ecx, LAST_JS_OBJECT_TYPE);
__ j(greater, &slow, not_taken);
__ j(above, &slow, not_taken);
// Register mapping:
// eax is object map.

View File

@ -806,8 +806,8 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
__ Check(equal, "Unexpected declaration in current context.");
}
if (mode == Variable::CONST) {
__ mov(eax, Immediate(Factory::the_hole_value()));
__ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax);
__ mov(CodeGenerator::ContextOperand(esi, slot->index()),
Immediate(Factory::the_hole_value()));
// No write barrier since the hole value is in old space.
} else if (function != NULL) {
VisitForValue(function, kAccumulator);
@ -823,10 +823,8 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
__ push(esi);
__ push(Immediate(variable->name()));
// Declaration nodes are always introduced in one of two modes.
ASSERT(mode == Variable::VAR ||
mode == Variable::CONST);
PropertyAttributes attr =
(mode == Variable::VAR) ? NONE : READ_ONLY;
ASSERT(mode == Variable::VAR || mode == Variable::CONST);
PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
__ push(Immediate(Smi::FromInt(attr)));
// Push initial value, if any.
// Note: For variables we must not push an initial value (such as
@ -1068,8 +1066,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ StackLimitCheck(&stack_limit_hit);
__ bind(&stack_check_done);
// Generate code for the going to the next element by incrementing
// the index (smi) stored on top of the stack.
// Generate code for going to the next element by incrementing the
// index (smi) stored on top of the stack.
__ bind(loop_statement.continue_target());
__ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
__ jmp(&loop);
@ -2031,9 +2029,9 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
__ j(not_zero, if_false);
__ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
__ cmp(ecx, FIRST_JS_OBJECT_TYPE);
__ j(less, if_false);
__ j(below, if_false);
__ cmp(ecx, LAST_JS_OBJECT_TYPE);
__ j(less_equal, if_true);
__ j(below_equal, if_true);
__ jmp(if_false);
Apply(context_, if_true, if_false);
@ -2225,7 +2223,7 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
__ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ebx, FieldOperand(eax, Map::kInstanceTypeOffset));
__ cmp(ebx, FIRST_JS_OBJECT_TYPE);
__ j(less, &null);
__ j(below, &null);
// As long as JS_FUNCTION_TYPE is the last instance type and it is
// right after LAST_JS_OBJECT_TYPE, we can avoid checking for

View File

@ -3840,11 +3840,13 @@ void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
__ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
destination()->false_target()->Branch(not_zero);
__ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
destination()->false_target()->Branch(less);
__ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
__ movzxbq(kScratchRegister,
FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
__ cmpq(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE));
destination()->false_target()->Branch(below);
__ cmpq(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE));
obj.Unuse();
destination()->Split(less_equal);
destination()->Split(below_equal);
}
@ -4336,7 +4338,7 @@ void CodeGenerator::GenerateRandomHeapNumber(
__ PrepareCallCFunction(0);
__ CallCFunction(ExternalReference::random_uint32_function(), 0);
// Convert 32 random bits in eax to 0.(32 random bits) in a double
// Convert 32 random bits in rax to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
__ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.

File diff suppressed because it is too large Load Diff

View File

@ -747,7 +747,7 @@ void MacroAssembler::SmiSub(Register dst,
void MacroAssembler::SmiSub(Register dst,
Register src1,
Operand const& src2,
const Operand& src2,
Label* on_not_smi_result) {
if (on_not_smi_result == NULL) {
// No overflow checking. Use only when it's known that
@ -866,6 +866,7 @@ void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
ASSERT(!dst.is(kScratchRegister));
if (constant->value() != 0) {
Move(kScratchRegister, constant);
addq(dst, kScratchRegister);

View File

@ -360,7 +360,7 @@ class MacroAssembler: public Assembler {
void SmiSub(Register dst,
Register src1,
Operand const& src2,
const Operand& src2,
Label* on_not_smi_result);
// Multiplies smi values and return the result as a smi,

View File

@ -273,7 +273,7 @@ static void CreateTraceCallerFunction(const char* func_name,
// StackTracer uses Top::c_entry_fp as a starting point for stack
// walking.
TEST(CFromJSStackTrace) {
#ifdef V8_HOST_ARCH_IA32
#if defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64)
// TODO(711) The hack of replacing the inline runtime function
// RandomHeapNumber with GetFrameNumber does not work with the way the full
// compiler generates inline runtime calls.
@ -315,7 +315,7 @@ TEST(CFromJSStackTrace) {
// Top::c_entry_fp value. In this case, StackTracer uses passed frame
// pointer value as a starting point for stack walking.
TEST(PureJSStackTrace) {
#ifdef V8_HOST_ARCH_IA32
#if defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64)
// TODO(711) The hack of replacing the inline runtime function
// RandomHeapNumber with GetFrameNumber does not work with the way the full
// compiler generates inline runtime calls.