diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 47df2a353e..ef286bbe01 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -3181,18 +3181,43 @@ void SubStringStub::Generate(MacroAssembler* masm) { void ToNumberStub::Generate(MacroAssembler* masm) { // The ToNumber stub takes one argument in r0. - Label check_heap_number, call_builtin; - __ JumpIfNotSmi(r0, &check_heap_number); + Label not_smi; + __ JumpIfNotSmi(r0, ¬_smi); __ Ret(); + __ bind(¬_smi); - __ bind(&check_heap_number); + Label not_heap_number; __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); - __ CompareRoot(r1, Heap::kHeapNumberMapRootIndex); - __ b(ne, &call_builtin); + __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); + // r0: object + // r1: instance type. + __ cmp(r1, Operand(HEAP_NUMBER_TYPE)); + __ b(ne, ¬_heap_number); __ Ret(); + __ bind(¬_heap_number); - __ bind(&call_builtin); - __ push(r0); + Label not_string, slow_string; + __ cmp(r1, Operand(FIRST_NONSTRING_TYPE)); + __ b(hs, ¬_string); + // Check if string has a cached array index. + __ ldr(r2, FieldMemOperand(r0, String::kHashFieldOffset)); + __ tst(r2, Operand(String::kContainsCachedArrayIndexMask)); + __ b(ne, &slow_string); + __ IndexFromHash(r2, r0); + __ Ret(); + __ bind(&slow_string); + __ push(r0); // Push argument. + __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); + __ bind(¬_string); + + Label not_oddball; + __ cmp(r1, Operand(ODDBALL_TYPE)); + __ b(ne, ¬_oddball); + __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset)); + __ Ret(); + __ bind(¬_oddball); + + __ push(r0); // Push argument. __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); } diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index 6392640741..e773b531a1 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -3887,16 +3887,43 @@ void SubStringStub::Generate(MacroAssembler* masm) { void ToNumberStub::Generate(MacroAssembler* masm) { // The ToNumber stub takes one argument in x0. - Label check_heap_number, call_builtin; - __ JumpIfNotSmi(x0, &check_heap_number); + Label not_smi; + __ JumpIfNotSmi(x0, ¬_smi); __ Ret(); + __ Bind(¬_smi); - __ bind(&check_heap_number); - __ JumpIfNotHeapNumber(x0, &call_builtin); + Label not_heap_number; + __ Ldr(x1, FieldMemOperand(x0, HeapObject::kMapOffset)); + __ Ldrb(x1, FieldMemOperand(x1, Map::kInstanceTypeOffset)); + // x0: object + // x1: instance type + __ Cmp(x1, HEAP_NUMBER_TYPE); + __ B(ne, ¬_heap_number); __ Ret(); + __ Bind(¬_heap_number); - __ bind(&call_builtin); - __ push(x0); + Label not_string, slow_string; + __ Cmp(x1, FIRST_NONSTRING_TYPE); + __ B(hs, ¬_string); + // Check if string has a cached array index. + __ Ldr(x2, FieldMemOperand(x0, String::kHashFieldOffset)); + __ Tst(x2, Operand(String::kContainsCachedArrayIndexMask)); + __ B(ne, &slow_string); + __ IndexFromHash(x2, x0); + __ Ret(); + __ Bind(&slow_string); + __ Push(x0); // Push argument. + __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); + __ Bind(¬_string); + + Label not_oddball; + __ Cmp(x1, ODDBALL_TYPE); + __ B(ne, ¬_oddball); + __ Ldr(x0, FieldMemOperand(x0, Oddball::kToNumberOffset)); + __ Ret(); + __ Bind(¬_oddball); + + __ Push(x0); // Push argument. __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); } diff --git a/src/compiler/js-generic-lowering.cc b/src/compiler/js-generic-lowering.cc index 3275256a28..4886442314 100644 --- a/src/compiler/js-generic-lowering.cc +++ b/src/compiler/js-generic-lowering.cc @@ -232,12 +232,6 @@ void JSGenericLowering::LowerJSToBoolean(Node* node) { void JSGenericLowering::LowerJSToNumber(Node* node) { Callable callable = CodeFactory::ToNumber(isolate()); - // TODO(mstarzinger): Embedding the context this way prevents sharing of code - // across native contexts. - if (!info_context_constant_.is_set()) { - info_context_constant_.set(jsgraph()->HeapConstant(info()->context())); - } - node->ReplaceInput(1, info_context_constant_.get()); ReplaceWithStubCall(node, callable, FlagsForNode(node)); } diff --git a/src/compiler/js-generic-lowering.h b/src/compiler/js-generic-lowering.h index c3049883cd..eb234b84ff 100644 --- a/src/compiler/js-generic-lowering.h +++ b/src/compiler/js-generic-lowering.h @@ -60,7 +60,6 @@ class JSGenericLowering : public Reducer { private: CompilationInfo* info_; - SetOncePointer info_context_constant_; JSGraph* jsgraph_; Linkage* linkage_; }; diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 8db05a16e8..b75ae3a531 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -3227,18 +3227,45 @@ void SubStringStub::Generate(MacroAssembler* masm) { void ToNumberStub::Generate(MacroAssembler* masm) { // The ToNumber stub takes one argument in eax. - Label check_heap_number, call_builtin; - __ JumpIfNotSmi(eax, &check_heap_number, Label::kNear); + Label not_smi; + __ JumpIfNotSmi(eax, ¬_smi, Label::kNear); __ Ret(); + __ bind(¬_smi); - __ bind(&check_heap_number); + Label not_heap_number; __ CompareMap(eax, masm->isolate()->factory()->heap_number_map()); - __ j(not_equal, &call_builtin, Label::kNear); + __ j(not_equal, ¬_heap_number, Label::kNear); __ Ret(); + __ bind(¬_heap_number); - __ bind(&call_builtin); - __ pop(ecx); // Pop return address. - __ push(eax); + Label not_string, slow_string; + __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi); + // eax: object + // edi: object map + __ j(above_equal, ¬_string, Label::kNear); + // Check if string has a cached array index. + __ test(FieldOperand(eax, String::kHashFieldOffset), + Immediate(String::kContainsCachedArrayIndexMask)); + __ j(not_zero, &slow_string, Label::kNear); + __ mov(eax, FieldOperand(eax, String::kHashFieldOffset)); + __ IndexFromHash(eax, eax); + __ Ret(); + __ bind(&slow_string); + __ pop(ecx); // Pop return address. + __ push(eax); // Push argument. + __ push(ecx); // Push return address. + __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); + __ bind(¬_string); + + Label not_oddball; + __ CmpInstanceType(edi, ODDBALL_TYPE); + __ j(not_equal, ¬_oddball, Label::kNear); + __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset)); + __ Ret(); + __ bind(¬_oddball); + + __ pop(ecx); // Pop return address. + __ push(eax); // Push argument. __ push(ecx); // Push return address. __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); } diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index da72c29f3b..f327b50085 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -3169,20 +3169,47 @@ void SubStringStub::Generate(MacroAssembler* masm) { void ToNumberStub::Generate(MacroAssembler* masm) { // The ToNumber stub takes one argument in rax. - Label check_heap_number, call_builtin; - __ JumpIfNotSmi(rax, &check_heap_number, Label::kNear); + Label not_smi; + __ JumpIfNotSmi(rax, ¬_smi, Label::kNear); __ Ret(); + __ bind(¬_smi); - __ bind(&check_heap_number); + Label not_heap_number; __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), Heap::kHeapNumberMapRootIndex); - __ j(not_equal, &call_builtin, Label::kNear); + __ j(not_equal, ¬_heap_number, Label::kNear); __ Ret(); + __ bind(¬_heap_number); - __ bind(&call_builtin); - __ popq(rcx); // Pop return address. - __ pushq(rax); - __ pushq(rcx); // Push return address. + Label not_string, slow_string; + __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi); + // rax: object + // rdi: object map + __ j(above_equal, ¬_string, Label::kNear); + // Check if string has a cached array index. + __ testl(FieldOperand(rax, String::kHashFieldOffset), + Immediate(String::kContainsCachedArrayIndexMask)); + __ j(not_zero, &slow_string, Label::kNear); + __ movl(rax, FieldOperand(rax, String::kHashFieldOffset)); + __ IndexFromHash(rax, rax); + __ Ret(); + __ bind(&slow_string); + __ PopReturnAddressTo(rcx); // Pop return address. + __ Push(rax); // Push argument. + __ PushReturnAddressFrom(rcx); // Push return address. + __ TailCallRuntime(Runtime::kStringToNumber, 1, 1); + __ bind(¬_string); + + Label not_oddball; + __ CmpInstanceType(rdi, ODDBALL_TYPE); + __ j(not_equal, ¬_oddball, Label::kNear); + __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); + __ Ret(); + __ bind(¬_oddball); + + __ PopReturnAddressTo(rcx); // Pop return address. + __ Push(rax); // Push argument. + __ PushReturnAddressFrom(rcx); // Push return address. __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); }