Convert UnaryOpStub to a HydrogenCodeStub

BUG=
R=danno@chromium.org

Review URL: https://codereview.chromium.org/18712002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15506 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
olivf@chromium.org 2013-07-05 09:26:22 +00:00
parent 437f8b0c24
commit 240c7aced9
32 changed files with 630 additions and 1339 deletions

View File

@ -226,8 +226,20 @@ void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
}
void UnaryOpStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
static Register registers[] = { r0 };
descriptor->register_param_count_ = 1;
descriptor->register_params_ = registers;
descriptor->deoptimization_handler_ =
FUNCTION_ADDR(UnaryOpIC_Miss);
}
#define __ ACCESS_MASM(masm)
static void EmitIdenticalObjectComparison(MacroAssembler* masm,
Label* slow,
Condition cond);
@ -1289,277 +1301,6 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
}
void UnaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name = NULL; // Make g++ happy.
switch (mode_) {
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
}
stream->Add("UnaryOpStub_%s_%s_%s",
op_name,
overwrite_name,
UnaryOpIC::GetName(operand_type_));
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::Generate(MacroAssembler* masm) {
switch (operand_type_) {
case UnaryOpIC::UNINITIALIZED:
GenerateTypeTransition(masm);
break;
case UnaryOpIC::SMI:
GenerateSmiStub(masm);
break;
case UnaryOpIC::NUMBER:
GenerateNumberStub(masm);
break;
case UnaryOpIC::GENERIC:
GenerateGenericStub(masm);
break;
}
}
void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
__ mov(r3, Operand(r0)); // the operand
__ mov(r2, Operand(Smi::FromInt(op_)));
__ mov(r1, Operand(Smi::FromInt(mode_)));
__ mov(r0, Operand(Smi::FromInt(operand_type_)));
__ Push(r3, r2, r1, r0);
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateSmiStubSub(masm);
break;
case Token::BIT_NOT:
GenerateSmiStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeSub(masm, &non_smi, &slow);
__ bind(&non_smi);
__ bind(&slow);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
Label non_smi;
GenerateSmiCodeBitNot(masm, &non_smi);
__ bind(&non_smi);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
Label* non_smi,
Label* slow) {
__ JumpIfNotSmi(r0, non_smi);
// The result of negating zero or the smallest negative smi is not a smi.
__ bic(ip, r0, Operand(0x80000000), SetCC);
__ b(eq, slow);
// Return '0 - value'.
__ rsb(r0, r0, Operand::Zero());
__ Ret();
}
void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
Label* non_smi) {
__ JumpIfNotSmi(r0, non_smi);
// Flip bits and revert inverted smi-tag.
__ mvn(r0, Operand(r0));
__ bic(r0, r0, Operand(kSmiTagMask));
__ Ret();
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateNumberStubSub(masm);
break;
case Token::BIT_NOT:
GenerateNumberStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
Label non_smi, slow, call_builtin;
GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
__ bind(&non_smi);
GenerateHeapNumberCodeSub(masm, &slow);
__ bind(&slow);
GenerateTypeTransition(masm);
__ bind(&call_builtin);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateNumberStubBitNot(MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeBitNot(masm, &non_smi);
__ bind(&non_smi);
GenerateHeapNumberCodeBitNot(masm, &slow);
__ bind(&slow);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
Label* slow) {
EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
// r0 is a heap number. Get a new heap number in r1.
if (mode_ == UNARY_OVERWRITE) {
__ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
__ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
__ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
} else {
Label slow_allocate_heapnumber, heapnumber_allocated;
__ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber);
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(r0);
__ CallRuntime(Runtime::kNumberAlloc, 0);
__ mov(r1, Operand(r0));
__ pop(r0);
}
__ bind(&heapnumber_allocated);
__ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
__ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
__ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
__ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
__ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
__ mov(r0, Operand(r1));
}
__ Ret();
}
void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
Label* slow) {
EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
// Convert the heap number in r0 to an untagged integer in r1.
__ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset));
__ ECMAToInt32(r1, d0, r2, r3, r4, d1);
// Do the bitwise operation and check if the result fits in a smi.
Label try_float;
__ mvn(r1, Operand(r1));
__ cmn(r1, Operand(0x40000000));
__ b(mi, &try_float);
// Tag the result as a smi and we're done.
__ SmiTag(r0, r1);
__ Ret();
// Try to store the result in a heap number.
__ bind(&try_float);
if (mode_ == UNARY_NO_OVERWRITE) {
Label slow_allocate_heapnumber, heapnumber_allocated;
__ AllocateHeapNumber(r0, r3, r4, r6, &slow_allocate_heapnumber);
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push the lower bit of the result (left shifted to look like a smi).
__ mov(r2, Operand(r1, LSL, 31));
// Push the 31 high bits (bit 0 cleared to look like a smi).
__ bic(r1, r1, Operand(1));
__ Push(r2, r1);
__ CallRuntime(Runtime::kNumberAlloc, 0);
__ Pop(r2, r1); // Restore the result.
__ orr(r1, r1, Operand(r2, LSR, 31));
}
__ bind(&heapnumber_allocated);
}
__ vmov(s0, r1);
__ vcvt_f64_s32(d0, s0);
__ vstr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset));
__ Ret();
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateGenericStubSub(masm);
break;
case Token::BIT_NOT:
GenerateGenericStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeSub(masm, &non_smi, &slow);
__ bind(&non_smi);
GenerateHeapNumberCodeSub(masm, &slow);
__ bind(&slow);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeBitNot(masm, &non_smi);
__ bind(&non_smi);
GenerateHeapNumberCodeBitNot(masm, &slow);
__ bind(&slow);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
// Handle the slow case by jumping to the JavaScript builtin.
__ push(r0);
switch (op_) {
case Token::SUB:
__ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
break;
case Token::BIT_NOT:
__ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
break;
default:
UNREACHABLE();
}
}
// Generates code to call a C function to do a double operation.
// This code never falls through, but returns with a heap number containing
// the result in r0.

View File

@ -80,71 +80,6 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
};
class UnaryOpStub: public PlatformCodeStub {
public:
UnaryOpStub(Token::Value op,
UnaryOverwriteMode mode,
UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
: op_(op),
mode_(mode),
operand_type_(operand_type) {
}
private:
Token::Value op_;
UnaryOverwriteMode mode_;
// Operand type information determined at runtime.
UnaryOpIC::TypeInfo operand_type_;
virtual void PrintName(StringStream* stream);
class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
class OpBits: public BitField<Token::Value, 1, 7> {};
class OperandTypeInfoBits: public BitField<UnaryOpIC::TypeInfo, 8, 3> {};
Major MajorKey() { return UnaryOp; }
int MinorKey() {
return ModeBits::encode(mode_)
| OpBits::encode(op_)
| OperandTypeInfoBits::encode(operand_type_);
}
// Note: A lot of the helper functions below will vanish when we use virtual
// function instead of switch more often.
void Generate(MacroAssembler* masm);
void GenerateTypeTransition(MacroAssembler* masm);
void GenerateSmiStub(MacroAssembler* masm);
void GenerateSmiStubSub(MacroAssembler* masm);
void GenerateSmiStubBitNot(MacroAssembler* masm);
void GenerateSmiCodeSub(MacroAssembler* masm, Label* non_smi, Label* slow);
void GenerateSmiCodeBitNot(MacroAssembler* masm, Label* slow);
void GenerateNumberStub(MacroAssembler* masm);
void GenerateNumberStubSub(MacroAssembler* masm);
void GenerateNumberStubBitNot(MacroAssembler* masm);
void GenerateHeapNumberCodeSub(MacroAssembler* masm, Label* slow);
void GenerateHeapNumberCodeBitNot(MacroAssembler* masm, Label* slow);
void GenerateGenericStub(MacroAssembler* masm);
void GenerateGenericStubSub(MacroAssembler* masm);
void GenerateGenericStubBitNot(MacroAssembler* masm);
void GenerateGenericCodeFallback(MacroAssembler* masm);
virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; }
virtual InlineCacheState GetICState() {
return UnaryOpIC::ToState(operand_type_);
}
virtual void FinishCode(Handle<Code> code) {
code->set_unary_op_type(operand_type_);
}
};
class StringHelper : public AllStatic {
public:
// Generate code for copying characters using a simple loop. This should only

View File

@ -4366,10 +4366,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
const char* comment) {
// TODO(svenpanne): Allowing format strings in Comment would be nice here...
Comment cmt(masm_, comment);
bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode overwrite =
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
UnaryOpStub stub(expr->op(), overwrite);
UnaryOpStub stub(expr->op());
// UnaryOpStub expects the argument to be in the
// accumulator register r0.
VisitForAccumulatorValue(expr->expression());

View File

@ -1998,6 +1998,18 @@ LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoIsNumberAndBranch(HIsNumberAndBranch* instr) {
return new(zone())
LIsNumberAndBranch(UseRegisterOrConstantAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
LInstruction* result = new(zone()) LCheckInstanceType(value);

View File

@ -118,6 +118,7 @@ class LCodeGen;
V(IsConstructCallAndBranch) \
V(IsObjectAndBranch) \
V(IsStringAndBranch) \
V(IsNumberAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
V(Label) \
@ -925,6 +926,19 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> {
};
class LIsNumberAndBranch: public LControlInstruction<1, 0> {
public:
explicit LIsNumberAndBranch(LOperand* value) {
inputs_[0] = value;
}
LOperand* value() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(IsNumberAndBranch, "is-number-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsNumberAndBranch)
};
class LIsStringAndBranch: public LControlInstruction<1, 1> {
public:
LIsStringAndBranch(LOperand* value, LOperand* temp) {

View File

@ -2130,12 +2130,12 @@ int LCodeGen::GetNextEmittedBlock() const {
template<class InstrType>
void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
int right_block = instr->FalseDestination(chunk_);
int left_block = instr->TrueDestination(chunk_);
int right_block = instr->FalseDestination(chunk_);
int next_block = GetNextEmittedBlock();
if (right_block == left_block) {
if (right_block == left_block || cc == al) {
EmitGoto(left_block);
} else if (left_block == next_block) {
__ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
@ -2153,6 +2153,25 @@ void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
}
void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsSmiOrInteger32() || r.IsDouble()) {
EmitBranch(instr, al);
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->value());
HType type = instr->hydrogen()->value()->type();
if (type.IsTaggedNumber()) {
EmitBranch(instr, al);
}
__ JumpIfSmi(reg, instr->TrueLabel(chunk_));
__ ldr(scratch0(), FieldMemOperand(reg, HeapObject::kMapOffset));
__ CompareRoot(scratch0(), Heap::kHeapNumberMapRootIndex);
EmitBranch(instr, eq);
}
}
void LCodeGen::DoBranch(LBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsInteger32() || r.IsSmi()) {

View File

@ -763,6 +763,45 @@ Handle<Code> CompareNilICStub::GenerateCode() {
}
template <>
HValue* CodeStubGraphBuilder<UnaryOpStub>::BuildCodeInitializedStub() {
UnaryOpStub* stub = casted_stub();
Handle<Type> type = stub->GetType(graph()->isolate());
HValue* input = GetParameter(0);
// Prevent unwanted HChange being inserted to ensure that the stub
// deopts on newly encountered types.
if (!type->Maybe(Type::Double())) {
input = AddInstruction(new(zone())
HForceRepresentation(input, Representation::Smi()));
}
if (!type->Is(Type::Number())) {
// If we expect to see other things than Numbers, we will create a generic
// stub, which handles all numbers and calls into the runtime for the rest.
IfBuilder if_number(this);
if_number.If<HIsNumberAndBranch>(input);
if_number.Then();
HInstruction* res = BuildUnaryMathOp(input, type, stub->operation());
if_number.Return(AddInstruction(res));
if_number.Else();
HValue* function = AddLoadJSBuiltin(stub->ToJSBuiltin(), context());
Add<HPushArgument>(GetParameter(0));
HValue* result = Add<HInvokeFunction>(context(), function, 1);
if_number.Return(result);
if_number.End();
return graph()->GetConstantUndefined();
}
return AddInstruction(BuildUnaryMathOp(input, type, stub->operation()));
}
Handle<Code> UnaryOpStub::GenerateCode() {
return DoGenerateCode(this);
}
template <>
HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
ToBooleanStub* stub = casted_stub();

View File

@ -184,9 +184,79 @@ const char* CodeStub::MajorName(CodeStub::Major major_key,
}
}
void CodeStub::PrintBaseName(StringStream* stream) {
stream->Add("%s", MajorName(MajorKey(), false));
}
void CodeStub::PrintName(StringStream* stream) {
stream->Add("%s", MajorName(MajorKey(), false));
PrintBaseName(stream);
PrintState(stream);
}
Builtins::JavaScript UnaryOpStub::ToJSBuiltin() {
switch (operation_) {
default:
UNREACHABLE();
case Token::SUB:
return Builtins::UNARY_MINUS;
case Token::BIT_NOT:
return Builtins::BIT_NOT;
}
}
Handle<JSFunction> UnaryOpStub::ToJSFunction(Isolate* isolate) {
Handle<JSBuiltinsObject> builtins(isolate->js_builtins_object());
Object* builtin = builtins->javascript_builtin(ToJSBuiltin());
return Handle<JSFunction>(JSFunction::cast(builtin), isolate);
}
MaybeObject* UnaryOpStub::Result(Handle<Object> object, Isolate* isolate) {
Handle<JSFunction> builtin_function = ToJSFunction(isolate);
bool caught_exception;
Handle<Object> result = Execution::Call(builtin_function, object,
0, NULL, &caught_exception);
if (caught_exception) {
return Failure::Exception();
}
return *result;
}
void UnaryOpStub::UpdateStatus(Handle<Object> object) {
State old_state(state_);
if (object->IsSmi()) {
state_.Add(SMI);
if (operation_ == Token::SUB && *object == 0) {
// The result (-0) has to be represented as double.
state_.Add(HEAP_NUMBER);
}
} else if (object->IsHeapNumber()) {
state_.Add(HEAP_NUMBER);
} else {
state_.Add(GENERIC);
}
TraceTransition(old_state, state_);
}
Handle<Type> UnaryOpStub::GetType(Isolate* isolate) {
if (state_.Contains(GENERIC)) {
return handle(Type::Any(), isolate);
}
Handle<Type> type = handle(Type::None(), isolate);
if (state_.Contains(SMI)) {
type = handle(
Type::Union(type, handle(Type::Smi(), isolate)), isolate);
}
if (state_.Contains(HEAP_NUMBER)) {
type = handle(
Type::Union(type, handle(Type::Double(), isolate)), isolate);
}
return type;
}
@ -275,6 +345,29 @@ void BinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) {
#undef __
void UnaryOpStub::PrintBaseName(StringStream* stream) {
CodeStub::PrintBaseName(stream);
if (operation_ == Token::SUB) stream->Add("Minus");
if (operation_ == Token::BIT_NOT) stream->Add("Not");
}
void UnaryOpStub::PrintState(StringStream* stream) {
state_.Print(stream);
}
void UnaryOpStub::State::Print(StringStream* stream) const {
stream->Add("(");
SimpleListPrinter printer(stream);
if (IsEmpty()) printer.Add("None");
if (Contains(GENERIC)) printer.Add("Generic");
if (Contains(HEAP_NUMBER)) printer.Add("HeapNumber");
if (Contains(SMI)) printer.Add("Smi");
stream->Add(")");
}
void BinaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name;
@ -431,8 +524,9 @@ void ICCompareStub::Generate(MacroAssembler* masm) {
}
void CompareNilICStub::Record(Handle<Object> object) {
void CompareNilICStub::UpdateStatus(Handle<Object> object) {
ASSERT(state_ != State::Generic());
State old_state(state_);
if (object->IsNull()) {
state_.Add(NULL_TYPE);
} else if (object->IsUndefined()) {
@ -446,18 +540,22 @@ void CompareNilICStub::Record(Handle<Object> object) {
} else {
state_.Add(MONOMORPHIC_MAP);
}
TraceTransition(old_state, state_);
}
void CompareNilICStub::State::TraceTransition(State to) const {
template<class StateType>
void HydrogenCodeStub::TraceTransition(StateType from, StateType to) {
#ifdef DEBUG
if (!FLAG_trace_ic) return;
char buffer[100];
NoAllocationStringAllocator allocator(buffer,
static_cast<unsigned>(sizeof(buffer)));
StringStream stream(&allocator);
stream.Add("[CompareNilIC : ");
Print(&stream);
stream.Add("[");
PrintBaseName(&stream);
stream.Add(": ");
from.Print(&stream);
stream.Add("=>");
to.Print(&stream);
stream.Add("]\n");
@ -465,12 +563,14 @@ void CompareNilICStub::State::TraceTransition(State to) const {
#endif
}
void CompareNilICStub::PrintBaseName(StringStream* stream) {
CodeStub::PrintBaseName(stream);
stream->Add((nil_value_ == kNullValue) ? "(NullValue)":
"(UndefinedValue)");
}
void CompareNilICStub::PrintName(StringStream* stream) {
stream->Add("CompareNilICStub_");
void CompareNilICStub::PrintState(StringStream* stream) {
state_.Print(stream);
stream->Add((nil_value_ == kNullValue) ? "(NullValue|":
"(UndefinedValue|");
}
@ -615,16 +715,15 @@ void CallConstructStub::PrintName(StringStream* stream) {
}
bool ToBooleanStub::Record(Handle<Object> object) {
bool ToBooleanStub::UpdateStatus(Handle<Object> object) {
Types old_types(types_);
bool to_boolean_value = types_.Record(object);
old_types.TraceTransition(types_);
bool to_boolean_value = types_.UpdateStatus(object);
TraceTransition(old_types, types_);
return to_boolean_value;
}
void ToBooleanStub::PrintName(StringStream* stream) {
stream->Add("ToBooleanStub_");
void ToBooleanStub::PrintState(StringStream* stream) {
types_.Print(stream);
}
@ -645,24 +744,7 @@ void ToBooleanStub::Types::Print(StringStream* stream) const {
}
void ToBooleanStub::Types::TraceTransition(Types to) const {
#ifdef DEBUG
if (!FLAG_trace_ic) return;
char buffer[100];
NoAllocationStringAllocator allocator(buffer,
static_cast<unsigned>(sizeof(buffer)));
StringStream stream(&allocator);
stream.Add("[ToBooleanIC : ");
Print(&stream);
stream.Add("=>");
to.Print(&stream);
stream.Add("]\n");
stream.OutputToStdOut();
#endif
}
bool ToBooleanStub::Types::Record(Handle<Object> object) {
bool ToBooleanStub::Types::UpdateStatus(Handle<Object> object) {
if (object->IsUndefined()) {
Add(UNDEFINED);
return false;

View File

@ -123,8 +123,6 @@ namespace internal {
// Mode to overwrite BinaryExpression values.
enum OverwriteMode { NO_OVERWRITE, OVERWRITE_LEFT, OVERWRITE_RIGHT };
enum UnaryOverwriteMode { UNARY_OVERWRITE, UNARY_NO_OVERWRITE };
// Stub is base classes of all stubs.
class CodeStub BASE_EMBEDDED {
@ -197,6 +195,8 @@ class CodeStub BASE_EMBEDDED {
return -1;
}
virtual void PrintName(StringStream* stream);
protected:
static bool CanUseFPRegisters();
@ -208,6 +208,11 @@ class CodeStub BASE_EMBEDDED {
// a fixed (non-moveable) code object.
virtual bool NeedsImmovableCode() { return false; }
// Returns a name for logging/debugging purposes.
SmartArrayPointer<const char> GetName();
virtual void PrintBaseName(StringStream* stream);
virtual void PrintState(StringStream* stream) { }
private:
// Perform bookkeeping required after code generation when stub code is
// initially generated.
@ -236,10 +241,6 @@ class CodeStub BASE_EMBEDDED {
// If a stub uses a special cache override this.
virtual bool UseSpecialCache() { return false; }
// Returns a name for logging/debugging purposes.
SmartArrayPointer<const char> GetName();
virtual void PrintName(StringStream* stream);
// Computes the key based on major and minor.
uint32_t GetKey() {
ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS);
@ -354,6 +355,9 @@ class HydrogenCodeStub : public CodeStub {
Handle<Code> GenerateLightweightMissCode(Isolate* isolate);
template<class StateType>
void TraceTransition(StateType from, StateType to);
private:
class MinorKeyBits: public BitField<int, 0, kStubMinorKeyBits - 1> {};
class IsMissBits: public BitField<bool, kStubMinorKeyBits - 1, 1> {};
@ -520,6 +524,73 @@ class FastNewBlockContextStub : public PlatformCodeStub {
};
class UnaryOpStub : public HydrogenCodeStub {
public:
// Stub without type info available -> construct uninitialized
explicit UnaryOpStub(Token::Value operation)
: HydrogenCodeStub(UNINITIALIZED), operation_(operation) { }
explicit UnaryOpStub(Code::ExtraICState ic_state) :
state_(StateBits::decode(ic_state)),
operation_(OperatorBits::decode(ic_state)) { }
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; }
virtual InlineCacheState GetICState() {
if (state_.Contains(GENERIC)) {
return MEGAMORPHIC;
} else if (state_.IsEmpty()) {
return PREMONOMORPHIC;
} else {
return MONOMORPHIC;
}
}
virtual Code::ExtraICState GetExtraICState() {
return OperatorBits::encode(operation_) |
StateBits::encode(state_.ToIntegral());
}
Token::Value operation() { return operation_; }
Handle<JSFunction> ToJSFunction(Isolate* isolate);
Builtins::JavaScript ToJSBuiltin();
void UpdateStatus(Handle<Object> object);
MaybeObject* Result(Handle<Object> object, Isolate* isolate);
Handle<Code> GenerateCode();
Handle<Type> GetType(Isolate* isolate);
protected:
void PrintState(StringStream* stream);
void PrintBaseName(StringStream* stream);
private:
enum UnaryOpType {
SMI,
HEAP_NUMBER,
GENERIC,
NUMBER_OF_TYPES
};
class State : public EnumSet<UnaryOpType, byte> {
public:
State() : EnumSet<UnaryOpType, byte>() { }
explicit State(byte bits) : EnumSet<UnaryOpType, byte>(bits) { }
void Print(StringStream* stream) const;
};
class StateBits : public BitField<int, 0, NUMBER_OF_TYPES> { };
class OperatorBits : public BitField<Token::Value, NUMBER_OF_TYPES, 8> { };
State state_;
Token::Value operation_;
virtual CodeStub::Major MajorKey() { return UnaryOp; }
virtual int NotMissMinorKey() { return GetExtraICState(); }
};
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
@ -1143,7 +1214,6 @@ class CompareNilICStub : public HydrogenCodeStub {
}
void Print(StringStream* stream) const;
void TraceTransition(State to) const;
};
static Handle<Type> StateToType(
@ -1206,14 +1276,15 @@ class CompareNilICStub : public HydrogenCodeStub {
return NilValueField::decode(state);
}
void Record(Handle<Object> object);
void UpdateStatus(Handle<Object> object);
bool IsMonomorphic() const { return state_.Contains(MONOMORPHIC_MAP); }
NilValue GetNilValue() const { return nil_value_; }
State GetState() const { return state_; }
void ClearState() { state_.RemoveAll(); }
virtual void PrintName(StringStream* stream);
virtual void PrintState(StringStream* stream);
virtual void PrintBaseName(StringStream* stream);
private:
friend class CompareNilIC;
@ -2009,8 +2080,7 @@ class ToBooleanStub: public HydrogenCodeStub {
byte ToByte() const { return ToIntegral(); }
void Print(StringStream* stream) const;
void TraceTransition(Types to) const;
bool Record(Handle<Object> object);
bool UpdateStatus(Handle<Object> object);
bool NeedsMap() const;
bool CanBeUndetectable() const;
bool IsGeneric() const { return ToIntegral() == Generic().ToIntegral(); }
@ -2023,7 +2093,7 @@ class ToBooleanStub: public HydrogenCodeStub {
explicit ToBooleanStub(Code::ExtraICState state)
: types_(static_cast<byte>(state)) { }
bool Record(Handle<Object> object);
bool UpdateStatus(Handle<Object> object);
Types GetTypes() { return types_; }
virtual Handle<Code> GenerateCode();
@ -2032,7 +2102,7 @@ class ToBooleanStub: public HydrogenCodeStub {
CodeStubInterfaceDescriptor* descriptor);
virtual Code::Kind GetCodeKind() const { return Code::TO_BOOLEAN_IC; }
virtual void PrintName(StringStream* stream);
virtual void PrintState(StringStream* stream);
virtual bool SometimesSetsUpAFrame() { return false; }

View File

@ -3078,6 +3078,11 @@ HType HCheckHeapObject::CalculateInferredType() {
}
HType HCheckSmi::CalculateInferredType() {
return HType::Smi();
}
HType HPhi::CalculateInferredType() {
HType result = HType::Uninitialized();
for (int i = 0; i < OperandCount(); ++i) {

View File

@ -92,6 +92,7 @@ class LChunkBuilder;
V(CheckHeapObject) \
V(CheckInstanceType) \
V(CheckMaps) \
V(CheckSmi) \
V(CheckPrototypeMaps) \
V(ClampToUint8) \
V(ClassOfTestAndBranch) \
@ -128,6 +129,7 @@ class LChunkBuilder;
V(InvokeFunction) \
V(IsConstructCallAndBranch) \
V(IsObjectAndBranch) \
V(IsNumberAndBranch) \
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
@ -2934,6 +2936,49 @@ class HCheckInstanceType: public HUnaryOperation {
};
class HCheckSmi: public HUnaryOperation {
public:
explicit HCheckSmi(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Smi());
SetFlag(kUseGVN);
}
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
virtual HType CalculateInferredType();
virtual HValue* Canonicalize() {
HType value_type = value()->type();
if (value_type.IsSmi()) {
return NULL;
}
return this;
}
DECLARE_CONCRETE_INSTRUCTION(CheckSmi)
protected:
virtual bool DataEquals(HValue* other) { return true; }
};
class HIsNumberAndBranch: public HUnaryControlInstruction {
public:
explicit HIsNumberAndBranch(HValue* value)
: HUnaryControlInstruction(value, NULL, NULL) {
SetFlag(kFlexibleRepresentation);
}
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
}
DECLARE_CONCRETE_INSTRUCTION(IsNumberAndBranch)
};
class HCheckHeapObject: public HUnaryOperation {
public:
explicit HCheckHeapObject(HValue* value) : HUnaryOperation(value) {

View File

@ -1005,6 +1005,17 @@ HReturn* HGraphBuilder::AddReturn(HValue* value) {
}
void HGraphBuilder::AddSoftDeoptimize() {
isolate()->counters()->soft_deopts_requested()->Increment();
if (FLAG_always_opt) return;
if (current_block()->IsDeoptimizing()) return;
Add<HSoftDeoptimize>();
isolate()->counters()->soft_deopts_inserted()->Increment();
current_block()->MarkAsDeoptimizing();
graph()->set_has_soft_deoptimize(true);
}
HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
HBasicBlock* b = graph()->CreateBasicBlock();
b->SetInitialEnvironment(env);
@ -1654,6 +1665,39 @@ HValue* HGraphBuilder::BuildCloneShallowArray(HContext* context,
}
HInstruction* HGraphBuilder::BuildUnaryMathOp(
HValue* input, Handle<Type> type, Token::Value operation) {
// We only handle the numeric cases here
type = handle(
Type::Intersect(type, handle(Type::Number(), isolate())), isolate());
switch (operation) {
default:
UNREACHABLE();
case Token::SUB: {
HInstruction* instr =
HMul::New(zone(), environment()->LookupContext(),
input, graph()->GetConstantMinus1());
Representation rep = Representation::FromType(type);
if (type->Is(Type::None())) {
AddSoftDeoptimize();
}
if (instr->IsBinaryOperation()) {
HBinaryOperation* binop = HBinaryOperation::cast(instr);
binop->set_observed_input_representation(1, rep);
binop->set_observed_input_representation(2, rep);
}
return instr;
}
case Token::BIT_NOT:
if (type->Is(Type::None())) {
AddSoftDeoptimize();
}
return new(zone()) HBitNot(input);
}
}
void HGraphBuilder::BuildCompareNil(
HValue* value,
Handle<Type> type,
@ -1909,6 +1953,18 @@ HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object,
}
HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin,
HContext* context) {
HGlobalObject* global_object = Add<HGlobalObject>(context);
HObjectAccess access = HObjectAccess::ForJSObjectOffset(
GlobalObject::kBuiltinsOffset);
HValue* builtins = AddLoad(global_object, access);
HObjectAccess function_access = HObjectAccess::ForJSObjectOffset(
JSBuiltinsObject::OffsetOfFunctionWithId(builtin));
return AddLoad(builtins, function_access);
}
HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
: HGraphBuilder(info),
function_state_(NULL),
@ -4067,17 +4123,6 @@ void HOptimizedGraphBuilder::PushAndAdd(HInstruction* instr) {
}
void HOptimizedGraphBuilder::AddSoftDeoptimize() {
isolate()->counters()->soft_deopts_requested()->Increment();
if (FLAG_always_opt) return;
if (current_block()->IsDeoptimizing()) return;
Add<HSoftDeoptimize>();
isolate()->counters()->soft_deopts_inserted()->Increment();
current_block()->MarkAsDeoptimizing();
graph()->set_has_soft_deoptimize(true);
}
template <class Instruction>
HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
int count = call->argument_count();
@ -8292,18 +8337,8 @@ void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
void HOptimizedGraphBuilder::VisitSub(UnaryOperation* expr) {
CHECK_ALIVE(VisitForValue(expr->expression()));
HValue* value = Pop();
HValue* context = environment()->LookupContext();
HInstruction* instr =
HMul::New(zone(), context, value, graph()->GetConstantMinus1());
Handle<Type> operand_type = expr->expression()->lower_type();
Representation rep = ToRepresentation(operand_type);
if (operand_type->Is(Type::None())) {
AddSoftDeoptimize();
}
if (instr->IsBinaryOperation()) {
HBinaryOperation::cast(instr)->set_observed_input_representation(1, rep);
HBinaryOperation::cast(instr)->set_observed_input_representation(2, rep);
}
HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::SUB);
return ast_context()->ReturnInstruction(instr, expr->id());
}
@ -8312,10 +8347,7 @@ void HOptimizedGraphBuilder::VisitBitNot(UnaryOperation* expr) {
CHECK_ALIVE(VisitForValue(expr->expression()));
HValue* value = Pop();
Handle<Type> operand_type = expr->expression()->lower_type();
if (operand_type->Is(Type::None())) {
AddSoftDeoptimize();
}
HInstruction* instr = new(zone()) HBitNot(value);
HInstruction* instr = BuildUnaryMathOp(value, operand_type, Token::BIT_NOT);
return ast_context()->ReturnInstruction(instr, expr->id());
}
@ -8369,7 +8401,7 @@ HInstruction* HOptimizedGraphBuilder::BuildIncrement(
CountOperation* expr) {
// The input to the count operation is on top of the expression stack.
TypeInfo info = expr->type();
Representation rep = ToRepresentation(info);
Representation rep = Representation::FromType(info);
if (rep.IsNone() || rep.IsTagged()) {
rep = Representation::Smi();
}
@ -8677,9 +8709,10 @@ HInstruction* HOptimizedGraphBuilder::BuildBinaryOperation(
Handle<Type> right_type = expr->right()->lower_type();
Handle<Type> result_type = expr->lower_type();
Maybe<int> fixed_right_arg = expr->fixed_right_arg();
Representation left_rep = ToRepresentation(left_type);
Representation right_rep = ToRepresentation(right_type);
Representation result_rep = ToRepresentation(result_type);
Representation left_rep = Representation::FromType(left_type);
Representation right_rep = Representation::FromType(right_type);
Representation result_rep = Representation::FromType(result_type);
if (left_type->Is(Type::None())) {
AddSoftDeoptimize();
// TODO(rossberg): we should be able to get rid of non-continuous defaults.
@ -8907,26 +8940,6 @@ void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
}
// TODO(rossberg): this should die eventually.
Representation HOptimizedGraphBuilder::ToRepresentation(TypeInfo info) {
if (info.IsUninitialized()) return Representation::None();
// TODO(verwaest): Return Smi rather than Integer32.
if (info.IsSmi()) return Representation::Integer32();
if (info.IsInteger32()) return Representation::Integer32();
if (info.IsDouble()) return Representation::Double();
if (info.IsNumber()) return Representation::Double();
return Representation::Tagged();
}
Representation HOptimizedGraphBuilder::ToRepresentation(Handle<Type> type) {
if (type->Is(Type::None())) return Representation::None();
if (type->Is(Type::Signed32())) return Representation::Integer32();
if (type->Is(Type::Number())) return Representation::Double();
return Representation::Tagged();
}
void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
HTypeof* typeof_expr,
Handle<String> check) {
@ -9019,9 +9032,9 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
Handle<Type> left_type = expr->left()->lower_type();
Handle<Type> right_type = expr->right()->lower_type();
Handle<Type> combined_type = expr->combined_type();
Representation combined_rep = ToRepresentation(combined_type);
Representation left_rep = ToRepresentation(left_type);
Representation right_rep = ToRepresentation(right_type);
Representation combined_rep = Representation::FromType(combined_type);
Representation left_rep = Representation::FromType(left_type);
Representation right_rep = Representation::FromType(right_type);
CHECK_ALIVE(VisitForValue(expr->left()));
CHECK_ALIVE(VisitForValue(expr->right()));
@ -9150,8 +9163,8 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
result->set_position(expr->position());
return ast_context()->ReturnInstruction(result, expr->id());
} else {
// TODO(verwaest): Remove once ToRepresentation properly returns Smi when
// the IC measures Smi.
// TODO(verwaest): Remove once Representation::FromType properly
// returns Smi when the IC measures Smi.
if (left_type->Is(Type::Smi())) left_rep = Representation::Smi();
if (right_type->Is(Type::Smi())) right_rep = Representation::Smi();
HCompareIDAndBranch* result =

View File

@ -1129,6 +1129,10 @@ class HGraphBuilder {
HLoadNamedField* AddLoadFixedArrayLength(HValue *object);
HValue* AddLoadJSBuiltin(Builtins::JavaScript builtin, HContext* context);
void AddSoftDeoptimize();
class IfBuilder {
public:
explicit IfBuilder(HGraphBuilder* builder,
@ -1409,6 +1413,9 @@ class HGraphBuilder {
ElementsKind kind,
int length);
HInstruction* BuildUnaryMathOp(
HValue* value, Handle<Type> type, Token::Value token);
void BuildCompareNil(
HValue* value,
Handle<Type> type,
@ -1495,8 +1502,6 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor {
bool inline_bailout() { return inline_bailout_; }
void AddSoftDeoptimize();
void Bailout(const char* reason);
HBasicBlock* CreateJoin(HBasicBlock* first,
@ -1676,9 +1681,6 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor {
// to push them as outgoing parameters.
template <class Instruction> HInstruction* PreProcessCall(Instruction* call);
static Representation ToRepresentation(TypeInfo info);
static Representation ToRepresentation(Handle<Type> type);
void SetUpScope(Scope* scope);
virtual void VisitStatements(ZoneList<Statement*>* statements);

View File

@ -230,6 +230,17 @@ void ToBooleanStub::InitializeInterfaceDescriptor(
}
void UnaryOpStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
static Register registers[] = { eax };
descriptor->register_param_count_ = 1;
descriptor->register_params_ = registers;
descriptor->deoptimization_handler_ =
FUNCTION_ADDR(UnaryOpIC_Miss);
}
#define __ ACCESS_MASM(masm)
@ -759,325 +770,6 @@ static void ConvertHeapNumberToInt32(MacroAssembler* masm,
}
void UnaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name = NULL; // Make g++ happy.
switch (mode_) {
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
}
stream->Add("UnaryOpStub_%s_%s_%s",
op_name,
overwrite_name,
UnaryOpIC::GetName(operand_type_));
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::Generate(MacroAssembler* masm) {
switch (operand_type_) {
case UnaryOpIC::UNINITIALIZED:
GenerateTypeTransition(masm);
break;
case UnaryOpIC::SMI:
GenerateSmiStub(masm);
break;
case UnaryOpIC::NUMBER:
GenerateNumberStub(masm);
break;
case UnaryOpIC::GENERIC:
GenerateGenericStub(masm);
break;
}
}
void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
__ pop(ecx); // Save return address.
__ push(eax); // the operand
__ push(Immediate(Smi::FromInt(op_)));
__ push(Immediate(Smi::FromInt(mode_)));
__ push(Immediate(Smi::FromInt(operand_type_)));
__ push(ecx); // Push return address.
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateSmiStubSub(masm);
break;
case Token::BIT_NOT:
GenerateSmiStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
Label non_smi, undo, slow;
GenerateSmiCodeSub(masm, &non_smi, &undo, &slow,
Label::kNear, Label::kNear, Label::kNear);
__ bind(&undo);
GenerateSmiCodeUndo(masm);
__ bind(&non_smi);
__ bind(&slow);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
Label non_smi;
GenerateSmiCodeBitNot(masm, &non_smi);
__ bind(&non_smi);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
Label* non_smi,
Label* undo,
Label* slow,
Label::Distance non_smi_near,
Label::Distance undo_near,
Label::Distance slow_near) {
// Check whether the value is a smi.
__ JumpIfNotSmi(eax, non_smi, non_smi_near);
// We can't handle -0 with smis, so use a type transition for that case.
__ test(eax, eax);
__ j(zero, slow, slow_near);
// Try optimistic subtraction '0 - value', saving operand in eax for undo.
__ mov(edx, eax);
__ Set(eax, Immediate(0));
__ sub(eax, edx);
__ j(overflow, undo, undo_near);
__ ret(0);
}
void UnaryOpStub::GenerateSmiCodeBitNot(
MacroAssembler* masm,
Label* non_smi,
Label::Distance non_smi_near) {
// Check whether the value is a smi.
__ JumpIfNotSmi(eax, non_smi, non_smi_near);
// Flip bits and revert inverted smi-tag.
__ not_(eax);
__ and_(eax, ~kSmiTagMask);
__ ret(0);
}
void UnaryOpStub::GenerateSmiCodeUndo(MacroAssembler* masm) {
__ mov(eax, edx);
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateNumberStubSub(masm);
break;
case Token::BIT_NOT:
GenerateNumberStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
Label non_smi, undo, slow, call_builtin;
GenerateSmiCodeSub(masm, &non_smi, &undo, &call_builtin, Label::kNear);
__ bind(&non_smi);
GenerateHeapNumberCodeSub(masm, &slow);
__ bind(&undo);
GenerateSmiCodeUndo(masm);
__ bind(&slow);
GenerateTypeTransition(masm);
__ bind(&call_builtin);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateNumberStubBitNot(
MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
__ bind(&non_smi);
GenerateHeapNumberCodeBitNot(masm, &slow);
__ bind(&slow);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
Label* slow) {
__ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
__ cmp(edx, masm->isolate()->factory()->heap_number_map());
__ j(not_equal, slow);
if (mode_ == UNARY_OVERWRITE) {
__ xor_(FieldOperand(eax, HeapNumber::kExponentOffset),
Immediate(HeapNumber::kSignMask)); // Flip sign.
} else {
__ mov(edx, eax);
// edx: operand
Label slow_allocate_heapnumber, heapnumber_allocated;
__ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
__ jmp(&heapnumber_allocated, Label::kNear);
__ bind(&slow_allocate_heapnumber);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(edx);
__ CallRuntime(Runtime::kNumberAlloc, 0);
__ pop(edx);
}
__ bind(&heapnumber_allocated);
// eax: allocated 'empty' number
__ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
__ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
__ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
__ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
__ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
}
__ ret(0);
}
void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
Label* slow) {
__ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
__ cmp(edx, masm->isolate()->factory()->heap_number_map());
__ j(not_equal, slow);
// Convert the heap number in eax to an untagged integer in ecx.
IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), slow);
// Do the bitwise operation and check if the result fits in a smi.
Label try_float;
__ not_(ecx);
__ cmp(ecx, 0xc0000000);
__ j(sign, &try_float, Label::kNear);
// Tag the result as a smi and we're done.
STATIC_ASSERT(kSmiTagSize == 1);
__ lea(eax, Operand(ecx, times_2, kSmiTag));
__ ret(0);
// Try to store the result in a heap number.
__ bind(&try_float);
if (mode_ == UNARY_NO_OVERWRITE) {
Label slow_allocate_heapnumber, heapnumber_allocated;
__ mov(ebx, eax);
__ AllocateHeapNumber(eax, edx, edi, &slow_allocate_heapnumber);
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push the original HeapNumber on the stack. The integer value can't
// be stored since it's untagged and not in the smi range (so we can't
// smi-tag it). We'll recalculate the value after the GC instead.
__ push(ebx);
__ CallRuntime(Runtime::kNumberAlloc, 0);
// New HeapNumber is in eax.
__ pop(edx);
}
// IntegerConvert uses ebx and edi as scratch registers.
// This conversion won't go slow-case.
IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
__ not_(ecx);
__ bind(&heapnumber_allocated);
}
if (CpuFeatures::IsSupported(SSE2)) {
CpuFeatureScope use_sse2(masm, SSE2);
__ cvtsi2sd(xmm0, ecx);
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
} else {
__ push(ecx);
__ fild_s(Operand(esp, 0));
__ pop(ecx);
__ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
}
__ ret(0);
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateGenericStubSub(masm);
break;
case Token::BIT_NOT:
GenerateGenericStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
Label non_smi, undo, slow;
GenerateSmiCodeSub(masm, &non_smi, &undo, &slow, Label::kNear);
__ bind(&non_smi);
GenerateHeapNumberCodeSub(masm, &slow);
__ bind(&undo);
GenerateSmiCodeUndo(masm);
__ bind(&slow);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
__ bind(&non_smi);
GenerateHeapNumberCodeBitNot(masm, &slow);
__ bind(&slow);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
// Handle the slow case by jumping to the corresponding JavaScript builtin.
__ pop(ecx); // pop return address.
__ push(eax);
__ push(ecx); // push return address
switch (op_) {
case Token::SUB:
__ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
break;
case Token::BIT_NOT:
__ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
break;
default:
UNREACHABLE();
}
}
void BinaryOpStub::Initialize() {
platform_specific_bit_ = CpuFeatures::IsSupported(SSE3);
}

View File

@ -86,80 +86,6 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
};
class UnaryOpStub: public PlatformCodeStub {
public:
UnaryOpStub(Token::Value op,
UnaryOverwriteMode mode,
UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
: op_(op),
mode_(mode),
operand_type_(operand_type) {
}
private:
Token::Value op_;
UnaryOverwriteMode mode_;
// Operand type information determined at runtime.
UnaryOpIC::TypeInfo operand_type_;
virtual void PrintName(StringStream* stream);
class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
class OpBits: public BitField<Token::Value, 1, 7> {};
class OperandTypeInfoBits: public BitField<UnaryOpIC::TypeInfo, 8, 3> {};
Major MajorKey() { return UnaryOp; }
int MinorKey() {
return ModeBits::encode(mode_)
| OpBits::encode(op_)
| OperandTypeInfoBits::encode(operand_type_);
}
// Note: A lot of the helper functions below will vanish when we use virtual
// function instead of switch more often.
void Generate(MacroAssembler* masm);
void GenerateTypeTransition(MacroAssembler* masm);
void GenerateSmiStub(MacroAssembler* masm);
void GenerateSmiStubSub(MacroAssembler* masm);
void GenerateSmiStubBitNot(MacroAssembler* masm);
void GenerateSmiCodeSub(MacroAssembler* masm,
Label* non_smi,
Label* undo,
Label* slow,
Label::Distance non_smi_near = Label::kFar,
Label::Distance undo_near = Label::kFar,
Label::Distance slow_near = Label::kFar);
void GenerateSmiCodeBitNot(MacroAssembler* masm,
Label* non_smi,
Label::Distance non_smi_near = Label::kFar);
void GenerateSmiCodeUndo(MacroAssembler* masm);
void GenerateNumberStub(MacroAssembler* masm);
void GenerateNumberStubSub(MacroAssembler* masm);
void GenerateNumberStubBitNot(MacroAssembler* masm);
void GenerateHeapNumberCodeSub(MacroAssembler* masm, Label* slow);
void GenerateHeapNumberCodeBitNot(MacroAssembler* masm, Label* slow);
void GenerateGenericStub(MacroAssembler* masm);
void GenerateGenericStubSub(MacroAssembler* masm);
void GenerateGenericStubBitNot(MacroAssembler* masm);
void GenerateGenericCodeFallback(MacroAssembler* masm);
virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; }
virtual InlineCacheState GetICState() {
return UnaryOpIC::ToState(operand_type_);
}
virtual void FinishCode(Handle<Code> code) {
code->set_unary_op_type(operand_type_);
}
};
class StringHelper : public AllStatic {
public:
// Generate code for copying characters using a simple loop. This should only

View File

@ -4364,10 +4364,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
const char* comment) {
Comment cmt(masm_, comment);
bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode overwrite =
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
UnaryOpStub stub(expr->op(), overwrite);
UnaryOpStub stub(expr->op());
// UnaryOpStub expects the argument to be in the
// accumulator register eax.
VisitForAccumulatorValue(expr->expression());

View File

@ -2036,6 +2036,16 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
}
void LCodeGen::DoNegateNoSSE2D(LNegateNoSSE2D* instr) {
__ push(Immediate(-1));
__ fild_s(Operand(esp, 0));
__ add(esp, Immediate(kPointerSize));
__ fmulp();
CurrentInstructionReturnsX87Result();
}
void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->left()).is(edx));
@ -2058,12 +2068,12 @@ int LCodeGen::GetNextEmittedBlock() const {
template<class InstrType>
void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
int right_block = instr->FalseDestination(chunk_);
int left_block = instr->TrueDestination(chunk_);
int right_block = instr->FalseDestination(chunk_);
int next_block = GetNextEmittedBlock();
if (right_block == left_block) {
if (right_block == left_block || cc == no_condition) {
EmitGoto(left_block);
} else if (left_block == next_block) {
__ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
@ -2076,6 +2086,25 @@ void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
}
void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsSmiOrInteger32() || r.IsDouble()) {
EmitBranch(instr, no_condition);
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->value());
HType type = instr->hydrogen()->value()->type();
if (type.IsTaggedNumber()) {
EmitBranch(instr, no_condition);
}
__ JumpIfSmi(reg, instr->TrueLabel(chunk_));
__ cmp(FieldOperand(reg, HeapObject::kMapOffset),
factory()->heap_number_map());
EmitBranch(instr, equal);
}
}
void LCodeGen::DoBranch(LBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsSmiOrInteger32()) {

View File

@ -1578,7 +1578,17 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
}
return DefineSameAsFirst(mul);
} else if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::MUL, instr);
if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
return DoArithmeticD(Token::MUL, instr);
}
ASSERT(instr->right()->IsConstant() &&
static_cast<HConstant*>(instr->right())->DoubleValue() == -1);
// TODO(olivf) This is currently just a hack to support the UnaryOp Minus
// Stub. This will go away once we can use more than one X87 register,
// thus fully support binary instructions without SSE2.
LOperand* left = UseX87TopOfStack(instr->left());
LNegateNoSSE2D* result = new(zone()) LNegateNoSSE2D(left);
return DefineX87TOS(result);
} else {
ASSERT(instr->representation().IsSmiOrTagged());
return DoArithmeticT(Token::MUL, instr);
@ -2038,6 +2048,18 @@ LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoIsNumberAndBranch(HIsNumberAndBranch* instr) {
return new(zone())
LIsNumberAndBranch(UseRegisterOrConstantAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();

View File

@ -114,6 +114,7 @@ class LCodeGen;
V(IsObjectAndBranch) \
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsNumberAndBranch) \
V(IsUndetectableAndBranch) \
V(Label) \
V(LazyBailout) \
@ -142,6 +143,7 @@ class LCodeGen;
V(MathTan) \
V(ModI) \
V(MulI) \
V(NegateNoSSE2D) \
V(NumberTagD) \
V(NumberTagI) \
V(NumberTagU) \
@ -659,6 +661,18 @@ class LMathFloorOfDiv: public LTemplateInstruction<1, 2, 1> {
};
class LNegateNoSSE2D: public LTemplateInstruction<1, 1, 0> {
public:
explicit LNegateNoSSE2D(LOperand* value) {
inputs_[0] = value;
}
LOperand* value() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(NegateNoSSE2D, "negate-no-sse2-d")
};
class LMulI: public LTemplateInstruction<1, 2, 1> {
public:
LMulI(LOperand* left, LOperand* right, LOperand* temp) {
@ -883,6 +897,19 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> {
};
class LIsNumberAndBranch: public LControlInstruction<1, 0> {
public:
explicit LIsNumberAndBranch(LOperand* value) {
inputs_[0] = value;
}
LOperand* value() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(IsNumberAndBranch, "is-number-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsNumberAndBranch)
};
class LIsStringAndBranch: public LControlInstruction<1, 1> {
public:
LIsStringAndBranch(LOperand* value, LOperand* temp) {

151
src/ic.cc
View File

@ -2401,86 +2401,6 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissForceGeneric) {
}
void UnaryOpIC::patch(Code* code) {
set_target(code);
}
const char* UnaryOpIC::GetName(TypeInfo type_info) {
switch (type_info) {
case UNINITIALIZED: return "Uninitialized";
case SMI: return "Smi";
case NUMBER: return "Number";
case GENERIC: return "Generic";
default: return "Invalid";
}
}
UnaryOpIC::State UnaryOpIC::ToState(TypeInfo type_info) {
switch (type_info) {
case UNINITIALIZED:
return v8::internal::UNINITIALIZED;
case SMI:
case NUMBER:
return MONOMORPHIC;
case GENERIC:
return v8::internal::GENERIC;
}
UNREACHABLE();
return v8::internal::UNINITIALIZED;
}
Handle<Type> UnaryOpIC::TypeInfoToType(TypeInfo type_info, Isolate* isolate) {
switch (type_info) {
case UNINITIALIZED:
return handle(Type::None(), isolate);
case SMI:
return handle(Type::Smi(), isolate);
case NUMBER:
return handle(Type::Number(), isolate);
case GENERIC:
return handle(Type::Any(), isolate);
}
UNREACHABLE();
return handle(Type::Any(), isolate);
}
UnaryOpIC::TypeInfo UnaryOpIC::GetTypeInfo(Handle<Object> operand) {
v8::internal::TypeInfo operand_type =
v8::internal::TypeInfo::FromValue(operand);
if (operand_type.IsSmi()) {
return SMI;
} else if (operand_type.IsNumber()) {
return NUMBER;
} else {
return GENERIC;
}
}
UnaryOpIC::TypeInfo UnaryOpIC::ComputeNewType(
TypeInfo current_type,
TypeInfo previous_type) {
switch (previous_type) {
case UNINITIALIZED:
return current_type;
case SMI:
return (current_type == GENERIC) ? GENERIC : NUMBER;
case NUMBER:
return GENERIC;
case GENERIC:
// We should never do patching if we are in GENERIC state.
UNREACHABLE();
return GENERIC;
}
UNREACHABLE();
return GENERIC;
}
void BinaryOpIC::patch(Code* code) {
set_target(code);
}
@ -2558,57 +2478,24 @@ void BinaryOpIC::StubInfoToType(int minor_key,
}
RUNTIME_FUNCTION(MaybeObject*, UnaryOp_Patch) {
ASSERT(args.length() == 4);
MaybeObject* UnaryOpIC::Transition(Handle<Object> object) {
Code::ExtraICState extra_ic_state = target()->extended_extra_ic_state();
UnaryOpStub stub(extra_ic_state);
stub.UpdateStatus(object);
Handle<Code> code = stub.GetCode(isolate());
set_target(*code);
return stub.Result(object, isolate());
}
RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss) {
HandleScope scope(isolate);
Handle<Object> operand = args.at<Object>(0);
Token::Value op = static_cast<Token::Value>(args.smi_at(1));
UnaryOverwriteMode mode = static_cast<UnaryOverwriteMode>(args.smi_at(2));
UnaryOpIC::TypeInfo previous_type =
static_cast<UnaryOpIC::TypeInfo>(args.smi_at(3));
UnaryOpIC::TypeInfo type = UnaryOpIC::GetTypeInfo(operand);
type = UnaryOpIC::ComputeNewType(type, previous_type);
UnaryOpStub stub(op, mode, type);
Handle<Code> code = stub.GetCode(isolate);
if (!code.is_null()) {
if (FLAG_trace_ic) {
PrintF("[UnaryOpIC in ");
JavaScriptFrame::PrintTop(isolate, stdout, false, true);
PrintF(" %s => %s #%s @ %p]\n",
UnaryOpIC::GetName(previous_type),
UnaryOpIC::GetName(type),
Token::Name(op),
static_cast<void*>(*code));
}
UnaryOpIC ic(isolate);
ic.patch(*code);
}
Handle<JSBuiltinsObject> builtins(isolate->js_builtins_object());
Object* builtin = NULL; // Initialization calms down the compiler.
switch (op) {
case Token::SUB:
builtin = builtins->javascript_builtin(Builtins::UNARY_MINUS);
break;
case Token::BIT_NOT:
builtin = builtins->javascript_builtin(Builtins::BIT_NOT);
break;
default:
UNREACHABLE();
}
Handle<JSFunction> builtin_function(JSFunction::cast(builtin), isolate);
bool caught_exception;
Handle<Object> result = Execution::Call(builtin_function, operand, 0, NULL,
&caught_exception);
if (caught_exception) {
return Failure::Exception();
}
return *result;
Handle<Object> object = args.at<Object>(0);
UnaryOpIC ic(isolate);
return ic.Transition(object);
}
@ -3069,9 +2956,7 @@ MaybeObject* CompareNilIC::CompareNil(Handle<Object> object) {
// types must be supported as a result of the miss.
bool already_monomorphic = stub.IsMonomorphic();
CompareNilICStub::State old_state = stub.GetState();
stub.Record(object);
old_state.TraceTransition(stub.GetState());
stub.UpdateStatus(object);
NilValue nil = stub.GetNilValue();
@ -3108,7 +2993,7 @@ RUNTIME_FUNCTION(MaybeObject*, Unreachable) {
MaybeObject* ToBooleanIC::ToBoolean(Handle<Object> object,
Code::ExtraICState extra_ic_state) {
ToBooleanStub stub(extra_ic_state);
bool to_boolean_value = stub.Record(object);
bool to_boolean_value = stub.UpdateStatus(object);
Handle<Code> code = stub.GetCode(isolate());
set_target(*code);
return Smi::FromInt(to_boolean_value ? 1 : 0);

View File

@ -57,7 +57,6 @@ namespace internal {
ICU(LoadPropertyWithInterceptorForCall) \
ICU(KeyedLoadPropertyWithInterceptor) \
ICU(StoreInterceptorProperty) \
ICU(UnaryOp_Patch) \
ICU(BinaryOp_Patch) \
ICU(CompareIC_Miss) \
ICU(CompareNilIC_Miss) \
@ -681,28 +680,9 @@ class KeyedStoreIC: public StoreIC {
class UnaryOpIC: public IC {
public:
// sorted: increasingly more unspecific (ignoring UNINITIALIZED)
// TODO(svenpanne) Using enums+switch is an antipattern, use a class instead.
enum TypeInfo {
UNINITIALIZED,
SMI,
NUMBER,
GENERIC
};
explicit UnaryOpIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { }
static Handle<Type> TypeInfoToType(TypeInfo info, Isolate* isolate);
explicit UnaryOpIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
void patch(Code* code);
static const char* GetName(TypeInfo type_info);
static State ToState(TypeInfo type_info);
static TypeInfo GetTypeInfo(Handle<Object> operand);
static TypeInfo ComputeNewType(TypeInfo type, TypeInfo previous);
MUST_USE_RESULT MaybeObject* Transition(Handle<Object> object);
};
@ -838,6 +818,7 @@ void PatchInlinedSmiCode(Address address, InlinedSmiCheck check);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissFromStubFailure);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, ToBooleanIC_Miss);

View File

@ -10610,6 +10610,7 @@ const char* Code::StubType2String(StubType type) {
void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
PrintF(out, "extra_ic_state = ");
const char* name = NULL;
switch (kind) {
case CALL_IC:
@ -10627,9 +10628,9 @@ void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) {
break;
}
if (name != NULL) {
PrintF(out, "extra_ic_state = %s\n", name);
PrintF(out, "%s\n", name);
} else {
PrintF(out, "extra_ic_state = %d\n", extra);
PrintF(out, "%d\n", extra);
}
}
@ -10638,7 +10639,8 @@ void Code::Disassemble(const char* name, FILE* out) {
PrintF(out, "kind = %s\n", Kind2String(kind()));
if (is_inline_cache_stub()) {
PrintF(out, "ic_state = %s\n", ICState2String(ic_state()));
PrintExtraICState(out, kind(), extra_ic_state());
PrintExtraICState(out, kind(), needs_extended_extra_ic_state(kind()) ?
extended_extra_ic_state() : extra_ic_state());
if (ic_state() == MONOMORPHIC) {
PrintF(out, "type = %s\n", StubType2String(type()));
}

View File

@ -4567,7 +4567,8 @@ class Code: public HeapObject {
// TODO(danno): This is a bit of a hack right now since there are still
// clients of this API that pass "extra" values in for argc. These clients
// should be retrofitted to used ExtendedExtraICState.
return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC;
return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC ||
kind == UNARY_OP_IC;
}
inline StubType type(); // Only valid for monomorphic IC stubs.

View File

@ -55,6 +55,8 @@ namespace v8 {
namespace internal {
class Smi;
class Type;
class TypeInfo;
// Type of properties.
// Order of properties is significant.
@ -101,6 +103,10 @@ class Representation {
static Representation FromKind(Kind kind) { return Representation(kind); }
// TODO(rossberg): this should die eventually.
static Representation FromType(TypeInfo info);
static Representation FromType(Handle<Type> type);
bool Equals(const Representation& other) const {
return kind_ == other.kind_;
}

View File

@ -395,8 +395,7 @@ Handle<Type> TypeFeedbackOracle::UnaryType(TypeFeedbackId id) {
}
Handle<Code> code = Handle<Code>::cast(object);
ASSERT(code->is_unary_op_stub());
return UnaryOpIC::TypeInfoToType(
static_cast<UnaryOpIC::TypeInfo>(code->unary_op_type()), isolate());
return UnaryOpStub(code->extra_ic_state()).GetType(isolate());
}
@ -698,4 +697,16 @@ void TypeFeedbackOracle::SetInfo(TypeFeedbackId ast_id, Object* target) {
#endif
}
Representation Representation::FromType(TypeInfo info) {
if (info.IsUninitialized()) return Representation::None();
// TODO(verwaest): Return Smi rather than Integer32.
if (info.IsSmi()) return Representation::Integer32();
if (info.IsInteger32()) return Representation::Integer32();
if (info.IsDouble()) return Representation::Double();
if (info.IsNumber()) return Representation::Double();
return Representation::Tagged();
}
} } // namespace v8::internal

View File

@ -476,4 +476,13 @@ Type* Type::Optional(Handle<Type> type) {
: Union(type, Undefined()->handle_via_isolate_of(*type));
}
Representation Representation::FromType(Handle<Type> type) {
if (type->Is(Type::None())) return Representation::None();
if (type->Is(Type::Signed32())) return Representation::Integer32();
if (type->Is(Type::Number())) return Representation::Double();
return Representation::Tagged();
}
} } // namespace v8::internal

View File

@ -222,7 +222,18 @@ void ToBooleanStub::InitializeInterfaceDescriptor(
descriptor->deoptimization_handler_ =
FUNCTION_ADDR(ToBooleanIC_Miss);
descriptor->SetMissHandler(
ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
}
void UnaryOpStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
static Register registers[] = { rax };
descriptor->register_param_count_ = 1;
descriptor->register_params_ = registers;
descriptor->deoptimization_handler_ =
FUNCTION_ADDR(UnaryOpIC_Miss);
}
@ -643,259 +654,6 @@ void IntegerConvert(MacroAssembler* masm,
}
void UnaryOpStub::Generate(MacroAssembler* masm) {
switch (operand_type_) {
case UnaryOpIC::UNINITIALIZED:
GenerateTypeTransition(masm);
break;
case UnaryOpIC::SMI:
GenerateSmiStub(masm);
break;
case UnaryOpIC::NUMBER:
GenerateNumberStub(masm);
break;
case UnaryOpIC::GENERIC:
GenerateGenericStub(masm);
break;
}
}
void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
__ pop(rcx); // Save return address.
__ push(rax); // the operand
__ Push(Smi::FromInt(op_));
__ Push(Smi::FromInt(mode_));
__ Push(Smi::FromInt(operand_type_));
__ push(rcx); // Push return address.
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateSmiStubSub(masm);
break;
case Token::BIT_NOT:
GenerateSmiStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
Label slow;
GenerateSmiCodeSub(masm, &slow, &slow, Label::kNear, Label::kNear);
__ bind(&slow);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
Label non_smi;
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
__ bind(&non_smi);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
Label* non_smi,
Label* slow,
Label::Distance non_smi_near,
Label::Distance slow_near) {
Label done;
__ JumpIfNotSmi(rax, non_smi, non_smi_near);
__ SmiNeg(rax, rax, &done, Label::kNear);
__ jmp(slow, slow_near);
__ bind(&done);
__ ret(0);
}
void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
Label* non_smi,
Label::Distance non_smi_near) {
__ JumpIfNotSmi(rax, non_smi, non_smi_near);
__ SmiNot(rax, rax);
__ ret(0);
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateNumberStubSub(masm);
break;
case Token::BIT_NOT:
GenerateNumberStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
Label non_smi, slow, call_builtin;
GenerateSmiCodeSub(masm, &non_smi, &call_builtin, Label::kNear);
__ bind(&non_smi);
GenerateHeapNumberCodeSub(masm, &slow);
__ bind(&slow);
GenerateTypeTransition(masm);
__ bind(&call_builtin);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateNumberStubBitNot(
MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
__ bind(&non_smi);
GenerateHeapNumberCodeBitNot(masm, &slow);
__ bind(&slow);
GenerateTypeTransition(masm);
}
void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
Label* slow) {
// Check if the operand is a heap number.
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
__ j(not_equal, slow);
// Operand is a float, negate its value by flipping the sign bit.
if (mode_ == UNARY_OVERWRITE) {
__ Set(kScratchRegister, 0x01);
__ shl(kScratchRegister, Immediate(63));
__ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister);
} else {
// Allocate a heap number before calculating the answer,
// so we don't have an untagged double around during GC.
Label slow_allocate_heapnumber, heapnumber_allocated;
__ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber);
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(rax);
__ CallRuntime(Runtime::kNumberAlloc, 0);
__ movq(rcx, rax);
__ pop(rax);
}
__ bind(&heapnumber_allocated);
// rcx: allocated 'empty' number
// Copy the double value to the new heap number, flipping the sign.
__ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset));
__ Set(kScratchRegister, 0x01);
__ shl(kScratchRegister, Immediate(63));
__ xor_(rdx, kScratchRegister); // Flip sign.
__ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx);
__ movq(rax, rcx);
}
__ ret(0);
}
void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
Label* slow) {
// Check if the operand is a heap number.
__ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
__ j(not_equal, slow);
// Convert the heap number in rax to an untagged integer in rcx.
IntegerConvert(masm, rax, rax);
// Do the bitwise operation and smi tag the result.
__ notl(rax);
__ Integer32ToSmi(rax, rax);
__ ret(0);
}
// TODO(svenpanne): Use virtual functions instead of switch.
void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
switch (op_) {
case Token::SUB:
GenerateGenericStubSub(masm);
break;
case Token::BIT_NOT:
GenerateGenericStubBitNot(masm);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeSub(masm, &non_smi, &slow, Label::kNear);
__ bind(&non_smi);
GenerateHeapNumberCodeSub(masm, &slow);
__ bind(&slow);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
Label non_smi, slow;
GenerateSmiCodeBitNot(masm, &non_smi, Label::kNear);
__ bind(&non_smi);
GenerateHeapNumberCodeBitNot(masm, &slow);
__ bind(&slow);
GenerateGenericCodeFallback(masm);
}
void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
// Handle the slow case by jumping to the JavaScript builtin.
__ pop(rcx); // pop return address
__ push(rax);
__ push(rcx); // push return address
switch (op_) {
case Token::SUB:
__ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
break;
case Token::BIT_NOT:
__ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
break;
default:
UNREACHABLE();
}
}
void UnaryOpStub::PrintName(StringStream* stream) {
const char* op_name = Token::Name(op_);
const char* overwrite_name = NULL; // Make g++ happy.
switch (mode_) {
case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
}
stream->Add("UnaryOpStub_%s_%s_%s",
op_name,
overwrite_name,
UnaryOpIC::GetName(operand_type_));
}
void BinaryOpStub::Initialize() {}

View File

@ -81,77 +81,6 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
};
class UnaryOpStub: public PlatformCodeStub {
public:
UnaryOpStub(Token::Value op,
UnaryOverwriteMode mode,
UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED)
: op_(op),
mode_(mode),
operand_type_(operand_type) {
}
private:
Token::Value op_;
UnaryOverwriteMode mode_;
// Operand type information determined at runtime.
UnaryOpIC::TypeInfo operand_type_;
virtual void PrintName(StringStream* stream);
class ModeBits: public BitField<UnaryOverwriteMode, 0, 1> {};
class OpBits: public BitField<Token::Value, 1, 7> {};
class OperandTypeInfoBits: public BitField<UnaryOpIC::TypeInfo, 8, 3> {};
Major MajorKey() { return UnaryOp; }
int MinorKey() {
return ModeBits::encode(mode_)
| OpBits::encode(op_)
| OperandTypeInfoBits::encode(operand_type_);
}
// Note: A lot of the helper functions below will vanish when we use virtual
// function instead of switch more often.
void Generate(MacroAssembler* masm);
void GenerateTypeTransition(MacroAssembler* masm);
void GenerateSmiStub(MacroAssembler* masm);
void GenerateSmiStubSub(MacroAssembler* masm);
void GenerateSmiStubBitNot(MacroAssembler* masm);
void GenerateSmiCodeSub(MacroAssembler* masm,
Label* non_smi,
Label* slow,
Label::Distance non_smi_near = Label::kFar,
Label::Distance slow_near = Label::kFar);
void GenerateSmiCodeBitNot(MacroAssembler* masm,
Label* non_smi,
Label::Distance non_smi_near);
void GenerateNumberStub(MacroAssembler* masm);
void GenerateNumberStubSub(MacroAssembler* masm);
void GenerateNumberStubBitNot(MacroAssembler* masm);
void GenerateHeapNumberCodeSub(MacroAssembler* masm, Label* slow);
void GenerateHeapNumberCodeBitNot(MacroAssembler* masm, Label* slow);
void GenerateGenericStub(MacroAssembler* masm);
void GenerateGenericStubSub(MacroAssembler* masm);
void GenerateGenericStubBitNot(MacroAssembler* masm);
void GenerateGenericCodeFallback(MacroAssembler* masm);
virtual Code::Kind GetCodeKind() const { return Code::UNARY_OP_IC; }
virtual InlineCacheState GetICState() {
return UnaryOpIC::ToState(operand_type_);
}
virtual void FinishCode(Handle<Code> code) {
code->set_unary_op_type(operand_type_);
}
};
class StringHelper : public AllStatic {
public:
// Generate code for copying characters using a simple loop. This should only

View File

@ -4353,10 +4353,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
const char* comment) {
// TODO(svenpanne): Allowing format strings in Comment would be nice here...
Comment cmt(masm_, comment);
bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode overwrite =
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
UnaryOpStub stub(expr->op(), overwrite);
UnaryOpStub stub(expr->op());
// UnaryOpStub expects the argument to be in the
// accumulator register rax.
VisitForAccumulatorValue(expr->expression());

View File

@ -1834,12 +1834,12 @@ int LCodeGen::GetNextEmittedBlock() const {
template<class InstrType>
void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
int right_block = instr->FalseDestination(chunk_);
int left_block = instr->TrueDestination(chunk_);
int right_block = instr->FalseDestination(chunk_);
int next_block = GetNextEmittedBlock();
if (right_block == left_block) {
if (right_block == left_block || cc == no_condition) {
EmitGoto(left_block);
} else if (left_block == next_block) {
__ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
@ -1859,6 +1859,25 @@ void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
}
void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsSmiOrInteger32() || r.IsDouble()) {
EmitBranch(instr, no_condition);
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->value());
HType type = instr->hydrogen()->value()->type();
if (type.IsTaggedNumber()) {
EmitBranch(instr, no_condition);
}
__ JumpIfSmi(reg, instr->TrueLabel(chunk_));
__ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
EmitBranch(instr, equal);
}
}
void LCodeGen::DoBranch(LBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsInteger32()) {

View File

@ -1911,6 +1911,18 @@ LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoIsNumberAndBranch(HIsNumberAndBranch* instr) {
return new(zone()) LIsNumberAndBranch(
UseRegisterOrConstantAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
LCheckInstanceType* result = new(zone()) LCheckInstanceType(value);

View File

@ -120,6 +120,7 @@ class LCodeGen;
V(IsObjectAndBranch) \
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsNumberAndBranch) \
V(IsUndetectableAndBranch) \
V(Label) \
V(LazyBailout) \
@ -866,6 +867,19 @@ class LIsObjectAndBranch: public LControlInstruction<1, 0> {
};
class LIsNumberAndBranch: public LControlInstruction<1, 0> {
public:
explicit LIsNumberAndBranch(LOperand* value) {
inputs_[0] = value;
}
LOperand* value() { return inputs_[0]; }
DECLARE_CONCRETE_INSTRUCTION(IsNumberAndBranch, "is-number-and-branch")
DECLARE_HYDROGEN_ACCESSOR(IsNumberAndBranch)
};
class LIsStringAndBranch: public LControlInstruction<1, 1> {
public:
explicit LIsStringAndBranch(LOperand* value, LOperand* temp) {