X64 Crank: Implemented DoBranch and all *AndBranch comparisons.

Review URL: http://codereview.chromium.org/6374002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6385 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
lrn@chromium.org 2011-01-19 10:17:18 +00:00
parent 8d3b675586
commit e66ceb9079
11 changed files with 732 additions and 68 deletions

View File

@ -1285,11 +1285,11 @@ void LCodeGen::DoCmpID(LCmpID* instr) {
NearLabel done;
Condition cc = TokenToCondition(instr->op(), instr->is_double());
__ mov(ToRegister(result), Handle<Object>(Heap::true_value()));
__ mov(ToRegister(result), Factory::true_value());
__ j(cc, &done);
__ bind(&unordered);
__ mov(ToRegister(result), Handle<Object>(Heap::false_value()));
__ mov(ToRegister(result), Factory::false_value());
__ bind(&done);
}
@ -1320,10 +1320,10 @@ void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
Register result = ToRegister(instr->result());
__ cmp(left, Operand(right));
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
NearLabel done;
__ j(equal, &done);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ bind(&done);
}
@ -1348,10 +1348,10 @@ void LCodeGen::DoIsNull(LIsNull* instr) {
__ cmp(reg, Factory::null_value());
if (instr->is_strict()) {
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
NearLabel done;
__ j(equal, &done);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ bind(&done);
} else {
NearLabel true_value, false_value, done;
@ -1368,10 +1368,10 @@ void LCodeGen::DoIsNull(LIsNull* instr) {
__ test(scratch, Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, &true_value);
__ bind(&false_value);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ jmp(&done);
__ bind(&true_value);
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
__ bind(&done);
}
}
@ -1447,11 +1447,11 @@ void LCodeGen::DoIsObject(LIsObject* instr) {
__ j(true_cond, &is_true);
__ bind(&is_false);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ jmp(&done);
__ bind(&is_true);
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
__ bind(&done);
}
@ -1479,10 +1479,10 @@ void LCodeGen::DoIsSmi(LIsSmi* instr) {
ASSERT(instr->hydrogen()->value()->representation().IsTagged());
__ test(input, Immediate(kSmiTagMask));
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
NearLabel done;
__ j(zero, &done);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ bind(&done);
}
@ -1507,7 +1507,6 @@ static InstanceType TestType(HHasInstanceType* instr) {
}
static Condition BranchCondition(HHasInstanceType* instr) {
InstanceType from = instr->from();
InstanceType to = instr->to();
@ -1529,10 +1528,10 @@ void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
__ j(zero, &is_false);
__ CmpObjectType(input, TestType(instr->hydrogen()), result);
__ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
__ jmp(&done);
__ bind(&is_false);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ bind(&done);
}
@ -1559,12 +1558,12 @@ void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
Register result = ToRegister(instr->result());
ASSERT(instr->hydrogen()->value()->representation().IsTagged());
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
__ test(FieldOperand(input, String::kHashFieldOffset),
Immediate(String::kContainsCachedArrayIndexMask));
NearLabel done;
__ j(not_zero, &done);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ bind(&done);
}
@ -1653,11 +1652,11 @@ void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
__ j(not_equal, &is_false);
__ bind(&is_true);
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
__ jmp(&done);
__ bind(&is_false);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ bind(&done);
}
@ -3292,11 +3291,11 @@ void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
instr->type_literal());
__ j(final_branch_condition, &true_label);
__ bind(&false_label);
__ mov(result, Handle<Object>(Heap::false_value()));
__ mov(result, Factory::false_value());
__ jmp(&done);
__ bind(&true_label);
__ mov(result, Handle<Object>(Heap::true_value()));
__ mov(result, Factory::true_value());
__ bind(&done);
}
@ -3341,9 +3340,9 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
final_branch_condition = below;
} else if (type_name->Equals(Heap::boolean_symbol())) {
__ cmp(input, Handle<Object>(Heap::true_value()));
__ cmp(input, Factory::true_value());
__ j(equal, true_label);
__ cmp(input, Handle<Object>(Heap::false_value()));
__ cmp(input, Factory::false_value());
final_branch_condition = equal;
} else if (type_name->Equals(Heap::undefined_symbol())) {

View File

@ -523,7 +523,8 @@ enum InstanceType {
JS_BUILTINS_OBJECT_TYPE,
JS_GLOBAL_PROXY_TYPE,
JS_ARRAY_TYPE,
JS_REGEXP_TYPE, // LAST_JS_OBJECT_TYPE
JS_REGEXP_TYPE, // LAST_JS_OBJECT_TYPE, FIRST_FUNCTION_CLASS_TYPE
JS_FUNCTION_TYPE,
@ -541,7 +542,10 @@ enum InstanceType {
// function objects are not counted as objects, even though they are
// implemented as such; only values whose typeof is "object" are included.
FIRST_JS_OBJECT_TYPE = JS_VALUE_TYPE,
LAST_JS_OBJECT_TYPE = JS_REGEXP_TYPE
LAST_JS_OBJECT_TYPE = JS_REGEXP_TYPE,
// RegExp objects have [[Class]] "function" because they are callable.
// All types from this type and above are objects with [[Class]] "function".
FIRST_FUNCTION_CLASS_TYPE = JS_REGEXP_TYPE
};

View File

@ -707,6 +707,10 @@ class Assembler : public Malloced {
arithmetic_op_32(0x1b, dst, src);
}
void sbbq(Register dst, Register src) {
arithmetic_op(0x1b, dst, src);
}
void cmpb(Register dst, Immediate src) {
immediate_arithmetic_op_8(0x7, dst, src);
}

View File

@ -3248,6 +3248,12 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
}
Register InstanceofStub::left() { return rax; }
Register InstanceofStub::right() { return rdx; }
int CompareStub::MinorKey() {
// Encode the three parameters in a unique 16 bit value. To avoid duplicate
// stubs the never NaN NaN condition is only taken into account if the
@ -4272,22 +4278,119 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
}
void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
UNIMPLEMENTED();
ASSERT(state_ == CompareIC::SMIS);
NearLabel miss;
__ JumpIfNotBothSmi(rdx, rax, &miss);
if (GetCondition() == equal) {
// For equality we do not care about the sign of the result.
__ SmiSub(rax, rax, rdx);
} else {
NearLabel done;
__ SmiSub(rdx, rdx, rax);
__ j(no_overflow, &done);
// Correct sign of result in case of overflow.
__ SmiNot(rdx, rdx);
__ bind(&done);
__ movq(rax, rdx);
}
__ ret(0);
__ bind(&miss);
GenerateMiss(masm);
}
void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
UNIMPLEMENTED();
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
NearLabel generic_stub;
NearLabel unordered;
NearLabel miss;
Condition either_smi = masm->CheckEitherSmi(rax, rdx);
__ j(either_smi, &generic_stub);
__ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
__ j(not_equal, &miss);
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
__ j(not_equal, &miss);
// Load left and right operand
__ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
__ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
// Compare operands
__ ucomisd(xmm0, xmm1);
// Don't base result on EFLAGS when a NaN is involved.
__ j(parity_even, &unordered);
// Return a result of -1, 0, or 1, based on EFLAGS.
// Performing mov, because xor would destroy the flag register.
__ movl(rax, Immediate(0));
__ movl(rcx, Immediate(0));
__ setcc(above, rax); // Add one to zero if carry clear and not equal.
__ sbbq(rax, rcx); // Subtract one if below (aka. carry set).
__ ret(0);
__ bind(&unordered);
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
__ bind(&generic_stub);
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
__ bind(&miss);
GenerateMiss(masm);
}
void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
UNIMPLEMENTED();
ASSERT(state_ == CompareIC::OBJECTS);
NearLabel miss;
Condition either_smi = masm->CheckEitherSmi(rdx, rax);
__ j(either_smi, &miss);
__ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
__ j(not_equal, &miss, not_taken);
__ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
__ j(not_equal, &miss, not_taken);
ASSERT(GetCondition() == equal);
__ subq(rax, rdx);
__ ret(0);
__ bind(&miss);
GenerateMiss(masm);
}
void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
UNIMPLEMENTED();
// Save the registers.
__ pop(rcx);
__ push(rdx);
__ push(rax);
__ push(rcx);
// Call the runtime system in a fresh internal frame.
ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
__ EnterInternalFrame();
__ push(rdx);
__ push(rax);
__ Push(Smi::FromInt(op_));
__ CallExternalReference(miss, 3);
__ LeaveInternalFrame();
// Compute the entry point of the rewritten stub.
__ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
// Restore registers.
__ pop(rcx);
__ pop(rax);
__ pop(rdx);
__ push(rcx);
// Do a tail call to the rewritten stub.
__ jmp(rdi);
}
#undef __

View File

@ -930,12 +930,86 @@ int LCodeGen::GetNextEmittedBlock(int block) {
void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
Abort("Unimplemented: %s", "EmitBranch");
int next_block = GetNextEmittedBlock(current_block_);
right_block = chunk_->LookupDestination(right_block);
left_block = chunk_->LookupDestination(left_block);
if (right_block == left_block) {
EmitGoto(left_block);
} else if (left_block == next_block) {
__ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
} else if (right_block == next_block) {
__ j(cc, chunk_->GetAssemblyLabel(left_block));
} else {
__ j(cc, chunk_->GetAssemblyLabel(left_block));
if (cc != always) {
__ jmp(chunk_->GetAssemblyLabel(right_block));
}
}
}
void LCodeGen::DoBranch(LBranch* instr) {
Abort("Unimplemented: %s", "DoBranch");
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Representation r = instr->hydrogen()->representation();
if (r.IsInteger32()) {
Register reg = ToRegister(instr->InputAt(0));
__ testl(reg, reg);
EmitBranch(true_block, false_block, not_zero);
} else if (r.IsDouble()) {
XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
__ xorpd(xmm0, xmm0);
__ ucomisd(reg, xmm0);
EmitBranch(true_block, false_block, not_equal);
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->InputAt(0));
HType type = instr->hydrogen()->type();
if (type.IsBoolean()) {
__ Cmp(reg, Factory::true_value());
EmitBranch(true_block, false_block, equal);
} else if (type.IsSmi()) {
__ SmiCompare(reg, Smi::FromInt(0));
EmitBranch(true_block, false_block, not_equal);
} else {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
__ Cmp(reg, Factory::undefined_value());
__ j(equal, false_label);
__ Cmp(reg, Factory::true_value());
__ j(equal, true_label);
__ Cmp(reg, Factory::false_value());
__ j(equal, false_label);
__ SmiCompare(reg, Smi::FromInt(0));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
// Test for double values. Plus/minus zero are false. NaN is handled
// in the stub.
NearLabel call_stub;
__ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
Factory::heap_number_map());
__ j(not_equal, &call_stub);
__ movq(kScratchRegister, FieldOperand(reg, HeapNumber::kValueOffset));
__ shl(kScratchRegister, Immediate(1)); // Shift out the sign bit.
__ j(zero, false_label); // Zero or negative zero.
__ jmp(true_label);
// The conversion stub doesn't cause garbage collections so it's
// safe to not record a safepoint after the call.
__ bind(&call_stub);
ToBooleanStub stub;
__ Pushad();
__ push(reg);
__ CallStub(&stub);
__ testq(rax, rax);
__ Popad();
EmitBranch(true_block, false_block, not_zero);
}
}
}
@ -979,7 +1053,7 @@ void LCodeGen::DoGoto(LGoto* instr) {
}
Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
Condition cond = no_condition;
switch (op) {
case Token::EQ:
@ -1008,17 +1082,64 @@ Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
Abort("Unimplemented: %s", "EmitCmpI");
if (right->IsConstantOperand()) {
int32_t value = ToInteger32(LConstantOperand::cast(right));
if (left->IsRegister()) {
__ cmpl(ToRegister(left), Immediate(value));
} else {
__ cmpl(ToOperand(left), Immediate(value));
}
} else if (right->IsRegister()) {
__ cmpq(ToRegister(left), ToRegister(right));
} else {
__ cmpq(ToRegister(left), ToOperand(right));
}
}
void LCodeGen::DoCmpID(LCmpID* instr) {
Abort("Unimplemented: %s", "DoCmpID");
LOperand* left = instr->InputAt(0);
LOperand* right = instr->InputAt(1);
LOperand* result = instr->result();
NearLabel unordered;
if (instr->is_double()) {
// Don't base result on EFLAGS when a NaN is involved. Instead
// jump to the unordered case, which produces a false value.
__ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
__ j(parity_even, &unordered);
} else {
EmitCmpI(left, right);
}
NearLabel done;
Condition cc = TokenToCondition(instr->op(), instr->is_double());
__ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
__ j(cc, &done);
__ bind(&unordered);
__ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
__ bind(&done);
}
void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
Abort("Unimplemented: %s", "DoCmpIDAndBranch");
LOperand* left = instr->InputAt(0);
LOperand* right = instr->InputAt(1);
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
if (instr->is_double()) {
// Don't base result on EFLAGS when a NaN is involved. Instead
// jump to the false block.
__ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
__ j(parity_even, chunk_->GetAssemblyLabel(false_block));
} else {
EmitCmpI(left, right);
}
Condition cc = TokenToCondition(instr->op(), instr->is_double());
EmitBranch(true_block, false_block, cc);
}
@ -1028,7 +1149,13 @@ void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
Abort("Unimplemented: %s", "DoCmpJSObjectAndBranch");
Register left = ToRegister(instr->InputAt(0));
Register right = ToRegister(instr->InputAt(1));
int false_block = chunk_->LookupDestination(instr->false_block_id());
int true_block = chunk_->LookupDestination(instr->true_block_id());
__ cmpq(left, right);
EmitBranch(true_block, false_block, equal);
}
@ -1038,7 +1165,39 @@ void LCodeGen::DoIsNull(LIsNull* instr) {
void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
Abort("Unimplemented: %s", "DoIsNullAndBranch");
Register reg = ToRegister(instr->InputAt(0));
int false_block = chunk_->LookupDestination(instr->false_block_id());
if (instr->hydrogen()->representation().IsSpecialization() ||
instr->hydrogen()->type().IsSmi()) {
// If the expression is known to untagged or smi, then it's definitely
// not null, and it can't be a an undetectable object.
// Jump directly to the false block.
EmitGoto(false_block);
return;
}
int true_block = chunk_->LookupDestination(instr->true_block_id());
__ Cmp(reg, Factory::null_value());
if (instr->is_strict()) {
EmitBranch(true_block, false_block, equal);
} else {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
__ j(equal, true_label);
__ Cmp(reg, Factory::undefined_value());
__ j(equal, true_label);
__ JumpIfSmi(reg, false_label);
// Check for undetectable objects by looking in the bit field in
// the map. The object has already been smi checked.
Register scratch = ToRegister(instr->TempAt(0));
__ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
__ testb(FieldOperand(scratch, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
EmitBranch(true_block, false_block, not_zero);
}
}
@ -1047,7 +1206,25 @@ Condition LCodeGen::EmitIsObject(Register input,
Register temp2,
Label* is_not_object,
Label* is_object) {
Abort("Unimplemented: %s", "EmitIsObject");
ASSERT(!input.is(temp1));
ASSERT(!input.is(temp2));
ASSERT(!temp1.is(temp2));
__ JumpIfSmi(input, is_not_object);
__ Cmp(input, Factory::null_value());
__ j(equal, is_object);
__ movq(temp1, FieldOperand(input, HeapObject::kMapOffset));
// Undetectable objects behave like undefined.
__ testb(FieldOperand(temp1, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, is_not_object);
__ movzxbl(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset));
__ cmpb(temp2, Immediate(FIRST_JS_OBJECT_TYPE));
__ j(below, is_not_object);
__ cmpb(temp2, Immediate(LAST_JS_OBJECT_TYPE));
return below_equal;
}
@ -1058,7 +1235,18 @@ void LCodeGen::DoIsObject(LIsObject* instr) {
void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Abort("Unimplemented: %s", "DoIsObjectAndBranch");
Register reg = ToRegister(instr->InputAt(0));
Register temp = ToRegister(instr->TempAt(0));
Register temp2 = ToRegister(instr->TempAt(1));
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label);
EmitBranch(true_block, false_block, true_cond);
}
@ -1068,7 +1256,38 @@ void LCodeGen::DoIsSmi(LIsSmi* instr) {
void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
Abort("Unimplemented: %s", "DoIsSmiAndBranch");
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Condition is_smi;
if (instr->InputAt(0)->IsRegister()) {
Register input = ToRegister(instr->InputAt(0));
is_smi = masm()->CheckSmi(input);
} else {
Operand input = ToOperand(instr->InputAt(0));
is_smi = masm()->CheckSmi(input);
}
EmitBranch(true_block, false_block, is_smi);
}
static InstanceType TestType(HHasInstanceType* instr) {
InstanceType from = instr->from();
InstanceType to = instr->to();
if (from == FIRST_TYPE) return to;
ASSERT(from == to || to == LAST_TYPE);
return from;
}
static Condition BranchCondition(HHasInstanceType* instr) {
InstanceType from = instr->from();
InstanceType to = instr->to();
if (from == to) return equal;
if (to == LAST_TYPE) return above_equal;
if (from == FIRST_TYPE) return below_equal;
UNREACHABLE();
return equal;
}
@ -1078,7 +1297,17 @@ void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
Abort("Unimplemented: %s", "DoHasInstanceTypeAndBranch");
Register input = ToRegister(instr->InputAt(0));
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Label* false_label = chunk_->GetAssemblyLabel(false_block);
__ JumpIfSmi(input, false_label);
__ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister);
EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
}
@ -1089,19 +1318,68 @@ void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
void LCodeGen::DoHasCachedArrayIndexAndBranch(
LHasCachedArrayIndexAndBranch* instr) {
Abort("Unimplemented: %s", "DoHasCachedArrayIndexAndBranch");
Register input = ToRegister(instr->InputAt(0));
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
__ testl(FieldOperand(input, String::kHashFieldOffset),
Immediate(String::kContainsCachedArrayIndexMask));
EmitBranch(true_block, false_block, not_equal);
}
// Branches to a label or falls through with the answer in the z flag. Trashes
// the temp registers, but not the input. Only input and temp2 may alias.
// Branches to a label or falls through with the answer in the z flag.
// Trashes the temp register and possibly input (if it and temp are aliased).
void LCodeGen::EmitClassOfTest(Label* is_true,
Label* is_false,
Handle<String>class_name,
Handle<String> class_name,
Register input,
Register temp,
Register temp2) {
Abort("Unimplemented: %s", "EmitClassOfTest");
Register temp) {
__ JumpIfSmi(input, is_false);
__ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
__ j(below, is_false);
// Map is now in temp.
// Functions have class 'Function'.
__ CmpInstanceType(temp, JS_FUNCTION_TYPE);
if (class_name->IsEqualTo(CStrVector("Function"))) {
__ j(equal, is_true);
} else {
__ j(equal, is_false);
}
// Check if the constructor in the map is a function.
__ movq(temp, FieldOperand(temp, Map::kConstructorOffset));
// As long as JS_FUNCTION_TYPE is the last instance type and it is
// right after LAST_JS_OBJECT_TYPE, we can avoid checking for
// LAST_JS_OBJECT_TYPE.
ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
// Objects with a non-function constructor have class 'Object'.
__ CmpObjectType(temp, JS_FUNCTION_TYPE, kScratchRegister);
if (class_name->IsEqualTo(CStrVector("Object"))) {
__ j(not_equal, is_true);
} else {
__ j(not_equal, is_false);
}
// temp now contains the constructor function. Grab the
// instance class name from there.
__ movq(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
__ movq(temp, FieldOperand(temp,
SharedFunctionInfo::kInstanceClassNameOffset));
// The class name we are testing against is a symbol because it's a literal.
// The name in the constructor is a symbol because of the way the context is
// booted. This routine isn't expected to work for random API-created
// classes and it doesn't have to because you can't access it with natives
// syntax. Since both sides are symbols it is sufficient to use an identity
// comparison.
ASSERT(class_name->IsSymbol());
__ Cmp(temp, class_name);
// End with the answer in the z flag.
}
@ -1111,7 +1389,19 @@ void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Abort("Unimplemented: %s", "DoClassOfTestAndBranch");
Register input = ToRegister(instr->InputAt(0));
Register temp = ToRegister(instr->TempAt(0));
Handle<String> class_name = instr->hydrogen()->class_name();
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
EmitClassOfTest(true_label, false_label, class_name, input, temp);
EmitBranch(true_block, false_block, equal);
}
@ -1126,7 +1416,13 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
Abort("Unimplemented: %s", "DoInstanceOfAndBranch");
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ testq(rax, rax);
EmitBranch(true_block, false_block, zero);
}
@ -1142,12 +1438,42 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
void LCodeGen::DoCmpT(LCmpT* instr) {
Abort("Unimplemented: %s", "DoCmpT");
Token::Value op = instr->op();
Handle<Code> ic = CompareIC::GetUninitialized(op);
CallCode(ic, RelocInfo::CODE_TARGET, instr);
Condition condition = TokenToCondition(op, false);
if (op == Token::GT || op == Token::LTE) {
condition = ReverseCondition(condition);
}
NearLabel true_value, done;
__ testq(rax, rax);
__ j(condition, &true_value);
__ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
__ jmp(&done);
__ bind(&true_value);
__ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
__ bind(&done);
}
void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
Abort("Unimplemented: %s", "DoCmpTAndBranch");
Token::Value op = instr->op();
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Handle<Code> ic = CompareIC::GetUninitialized(op);
CallCode(ic, RelocInfo::CODE_TARGET, instr);
// The compare stub expects compare condition and the input operands
// reversed for GT and LTE.
Condition condition = TokenToCondition(op, false);
if (op == Token::GT || op == Token::LTE) {
condition = ReverseCondition(condition);
}
__ testq(rax, rax);
EmitBranch(true_block, false_block, condition);
}
@ -1494,7 +1820,18 @@ void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
Abort("Unimplemented: %s", "DoTypeofIsAndBranch");
Register input = ToRegister(instr->InputAt(0));
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
Condition final_branch_condition = EmitTypeofIs(true_label,
false_label,
input,
instr->type_literal());
EmitBranch(true_block, false_block, final_branch_condition);
}
@ -1502,8 +1839,63 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label,
Label* false_label,
Register input,
Handle<String> type_name) {
Abort("Unimplemented: %s", "EmitTypeofIs");
return no_condition;
Condition final_branch_condition = no_condition;
if (type_name->Equals(Heap::number_symbol())) {
__ JumpIfSmi(input, true_label);
__ Cmp(FieldOperand(input, HeapObject::kMapOffset),
Factory::heap_number_map());
final_branch_condition = equal;
} else if (type_name->Equals(Heap::string_symbol())) {
__ JumpIfSmi(input, false_label);
__ movq(input, FieldOperand(input, HeapObject::kMapOffset));
__ testb(FieldOperand(input, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, false_label);
__ CmpInstanceType(input, FIRST_NONSTRING_TYPE);
final_branch_condition = below;
} else if (type_name->Equals(Heap::boolean_symbol())) {
__ CompareRoot(input, Heap::kTrueValueRootIndex);
__ j(equal, true_label);
__ CompareRoot(input, Heap::kFalseValueRootIndex);
final_branch_condition = equal;
} else if (type_name->Equals(Heap::undefined_symbol())) {
__ CompareRoot(input, Heap::kUndefinedValueRootIndex);
__ j(equal, true_label);
__ JumpIfSmi(input, false_label);
// Check for undetectable objects => true.
__ movq(input, FieldOperand(input, HeapObject::kMapOffset));
__ testb(FieldOperand(input, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
final_branch_condition = not_zero;
} else if (type_name->Equals(Heap::function_symbol())) {
__ JumpIfSmi(input, false_label);
__ CmpObjectType(input, FIRST_FUNCTION_CLASS_TYPE, input);
final_branch_condition = above_equal;
} else if (type_name->Equals(Heap::object_symbol())) {
__ JumpIfSmi(input, false_label);
__ Cmp(input, Factory::null_value());
__ j(equal, true_label);
// Check for undetectable objects => false.
__ testb(FieldOperand(input, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, false_label);
// Check for JS objects that are not RegExp or Function => true.
__ CmpInstanceType(input, FIRST_JS_OBJECT_TYPE);
__ j(below, false_label);
__ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
final_branch_condition = below_equal;
} else {
final_branch_condition = never;
__ jmp(false_label);
}
return final_branch_condition;
}

View File

@ -138,8 +138,7 @@ class LCodeGen BASE_EMBEDDED {
Label* if_false,
Handle<String> class_name,
Register input,
Register temporary,
Register temporary2);
Register temporary);
int StackSlotCount() const { return chunk()->spill_slot_count(); }
int ParameterCount() const { return scope()->num_parameters(); }

View File

@ -913,8 +913,103 @@ LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
Abort("Unimplemented: %s", "DoBranch");
return NULL;
HValue* v = instr->value();
if (v->EmitAtUses()) {
if (v->IsClassOfTest()) {
HClassOfTest* compare = HClassOfTest::cast(v);
ASSERT(compare->value()->representation().IsTagged());
return new LClassOfTestAndBranch(UseTempRegister(compare->value()),
TempRegister());
} else if (v->IsCompare()) {
HCompare* compare = HCompare::cast(v);
Token::Value op = compare->token();
HValue* left = compare->left();
HValue* right = compare->right();
Representation r = compare->GetInputRepresentation();
if (r.IsInteger32()) {
ASSERT(left->representation().IsInteger32());
ASSERT(right->representation().IsInteger32());
return new LCmpIDAndBranch(UseRegisterAtStart(left),
UseOrConstantAtStart(right));
} else if (r.IsDouble()) {
ASSERT(left->representation().IsDouble());
ASSERT(right->representation().IsDouble());
return new LCmpIDAndBranch(UseRegisterAtStart(left),
UseRegisterAtStart(right));
} else {
ASSERT(left->representation().IsTagged());
ASSERT(right->representation().IsTagged());
bool reversed = op == Token::GT || op == Token::LTE;
LOperand* left_operand = UseFixed(left, reversed ? rax : rdx);
LOperand* right_operand = UseFixed(right, reversed ? rdx : rax);
LCmpTAndBranch* result = new LCmpTAndBranch(left_operand,
right_operand);
return MarkAsCall(result, instr);
}
} else if (v->IsIsSmi()) {
HIsSmi* compare = HIsSmi::cast(v);
ASSERT(compare->value()->representation().IsTagged());
return new LIsSmiAndBranch(Use(compare->value()));
} else if (v->IsHasInstanceType()) {
HHasInstanceType* compare = HHasInstanceType::cast(v);
ASSERT(compare->value()->representation().IsTagged());
return new LHasInstanceTypeAndBranch(
UseRegisterAtStart(compare->value()));
} else if (v->IsHasCachedArrayIndex()) {
HHasCachedArrayIndex* compare = HHasCachedArrayIndex::cast(v);
ASSERT(compare->value()->representation().IsTagged());
return new LHasCachedArrayIndexAndBranch(
UseRegisterAtStart(compare->value()));
} else if (v->IsIsNull()) {
HIsNull* compare = HIsNull::cast(v);
ASSERT(compare->value()->representation().IsTagged());
// We only need a temp register for non-strict compare.
LOperand* temp = compare->is_strict() ? NULL : TempRegister();
return new LIsNullAndBranch(UseRegisterAtStart(compare->value()),
temp);
} else if (v->IsIsObject()) {
HIsObject* compare = HIsObject::cast(v);
ASSERT(compare->value()->representation().IsTagged());
LOperand* temp1 = TempRegister();
LOperand* temp2 = TempRegister();
return new LIsObjectAndBranch(UseRegisterAtStart(compare->value()),
temp1,
temp2);
} else if (v->IsCompareJSObjectEq()) {
HCompareJSObjectEq* compare = HCompareJSObjectEq::cast(v);
return new LCmpJSObjectEqAndBranch(UseRegisterAtStart(compare->left()),
UseRegisterAtStart(compare->right()));
} else if (v->IsInstanceOf()) {
HInstanceOf* instance_of = HInstanceOf::cast(v);
LInstanceOfAndBranch* result =
new LInstanceOfAndBranch(
UseFixed(instance_of->left(), InstanceofStub::left()),
UseFixed(instance_of->right(), InstanceofStub::right()));
return MarkAsCall(result, instr);
} else if (v->IsTypeofIs()) {
HTypeofIs* typeof_is = HTypeofIs::cast(v);
return new LTypeofIsAndBranch(UseTempRegister(typeof_is->value()));
} else {
if (v->IsConstant()) {
if (HConstant::cast(v)->handle()->IsTrue()) {
return new LGoto(instr->FirstSuccessor()->block_id());
} else if (HConstant::cast(v)->handle()->IsFalse()) {
return new LGoto(instr->SecondSuccessor()->block_id());
}
}
Abort("Undefined compare before branch");
return NULL;
}
}
return new LBranch(UseRegisterAtStart(v));
}
@ -1124,8 +1219,29 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
LInstruction* LChunkBuilder::DoCompare(HCompare* instr) {
Abort("Unimplemented: %s", "DoCompare");
return NULL;
Token::Value op = instr->token();
Representation r = instr->GetInputRepresentation();
if (r.IsInteger32()) {
ASSERT(instr->left()->representation().IsInteger32());
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
return DefineAsRegister(new LCmpID(left, right));
} else if (r.IsDouble()) {
ASSERT(instr->left()->representation().IsDouble());
ASSERT(instr->right()->representation().IsDouble());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
return DefineAsRegister(new LCmpID(left, right));
} else {
ASSERT(instr->left()->representation().IsTagged());
ASSERT(instr->right()->representation().IsTagged());
bool reversed = (op == Token::GT || op == Token::LTE);
LOperand* left = UseFixed(instr->left(), reversed ? rax : rdx);
LOperand* right = UseFixed(instr->right(), reversed ? rdx : rax);
LCmpT* result = new LCmpT(left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
}

View File

@ -788,11 +788,10 @@ class LHasInstanceType: public LTemplateInstruction<1, 1> {
};
class LHasInstanceTypeAndBranch: public LControlInstruction<1, 1> {
class LHasInstanceTypeAndBranch: public LControlInstruction<1, 0> {
public:
LHasInstanceTypeAndBranch(LOperand* value, LOperand* temp) {
explicit LHasInstanceTypeAndBranch(LOperand* value) {
inputs_[0] = value;
temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(HasInstanceTypeAndBranch,
@ -840,12 +839,11 @@ class LClassOfTest: public LTemplateInstruction<1, 1, 1> {
};
class LClassOfTestAndBranch: public LControlInstruction<1, 2> {
class LClassOfTestAndBranch: public LControlInstruction<1, 1> {
public:
LClassOfTestAndBranch(LOperand* value, LOperand* temp, LOperand* temp2) {
LClassOfTestAndBranch(LOperand* value, LOperand* temp) {
inputs_[0] = value;
temps_[0] = temp;
temps_[1] = temp2;
}
DECLARE_CONCRETE_INSTRUCTION(ClassOfTestAndBranch,

View File

@ -885,6 +885,13 @@ Condition MacroAssembler::CheckSmi(Register src) {
}
Condition MacroAssembler::CheckSmi(const Operand& src) {
ASSERT_EQ(0, kSmiTag);
testb(src, Immediate(kSmiTagMask));
return zero;
}
Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
ASSERT_EQ(0, kSmiTag);
// Make mask 0x8000000000000001 and test that both bits are zero.
@ -1386,6 +1393,40 @@ void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
}
void MacroAssembler::Pushad() {
push(rax);
push(rcx);
push(rdx);
push(rbx);
// Not pushing rsp or rbp.
push(rsi);
push(rdi);
push(r8);
push(r9);
// r10 is kScratchRegister.
push(r11);
push(r12);
// r13 is kRootRegister.
push(r14);
// r15 is kSmiConstantRegister
}
void MacroAssembler::Popad() {
pop(r14);
pop(r12);
pop(r11);
pop(r9);
pop(r8);
pop(rdi);
pop(rsi);
pop(rbx);
pop(rdx);
pop(rcx);
pop(rax);
}
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
// Adjust this code if not the case.

View File

@ -272,6 +272,7 @@ class MacroAssembler: public Assembler {
// Is the value a tagged smi.
Condition CheckSmi(Register src);
Condition CheckSmi(const Operand& src);
// Is the value a non-negative tagged smi.
Condition CheckNonNegativeSmi(Register src);
@ -590,6 +591,13 @@ class MacroAssembler: public Assembler {
void Call(ExternalReference ext);
void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
// Non-x64 instructions.
// Push/pop all general purpose registers.
// Does not push rsp/rbp nor any of the assembler's special purpose registers
// (kScratchRegister, kSmiConstantRegister, kRootRegister).
void Pushad();
void Popad();
// Compare object type for heap object.
// Always use unsigned comparisons: above and below, not less and greater.
// Incoming register is heap_object and outgoing register is map.

View File

@ -82,7 +82,7 @@ test-serialize/ContextSerialization: SKIP
test-serialize/ContextDeserialization: SKIP
test-debug/BreakPointReturn: SKIP
test-debug/DebugStepLinearMixedICs: SKIP
test-debug/DebugConditional: SKIP
##############################################################################
[ $arch == arm ]