diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc index b7828b81ab..df7ad4467f 100644 --- a/src/ia32/stub-cache-ia32.cc +++ b/src/ia32/stub-cache-ia32.cc @@ -2479,6 +2479,8 @@ Handle CallStubCompiler::CompileMathAbsCall( STATIC_ASSERT(kSmiTag == 0); __ JumpIfNotSmi(eax, ¬_smi); + // Branchless abs implementation, refer to below: + // http://graphics.stanford.edu/~seander/bithacks.html#IntegerAbs // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0 // otherwise. __ mov(ebx, eax); diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index cc635100b1..a814318f7c 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -3449,7 +3449,7 @@ void LCodeGen::EmitIntegerMathAbs(LMathAbs* instr) { } -void LCodeGen::EmitInteger64MathAbs(LMathAbs* instr) { +void LCodeGen::EmitSmiMathAbs(LMathAbs* instr) { Register input_reg = ToRegister(instr->value()); __ testq(input_reg, input_reg); Label is_positive; @@ -3486,16 +3486,14 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) { } else if (r.IsInteger32()) { EmitIntegerMathAbs(instr); } else if (r.IsSmi()) { - EmitInteger64MathAbs(instr); + EmitSmiMathAbs(instr); } else { // Tagged case. DeferredMathAbsTaggedHeapNumber* deferred = new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); Register input_reg = ToRegister(instr->value()); // Smi check. __ JumpIfNotSmi(input_reg, deferred->entry()); - __ SmiToInteger32(input_reg, input_reg); - EmitIntegerMathAbs(instr); - __ Integer32ToSmi(input_reg, input_reg); + EmitSmiMathAbs(instr); __ bind(deferred->exit()); } } diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h index b9b3569393..11c9ec3792 100644 --- a/src/x64/lithium-codegen-x64.h +++ b/src/x64/lithium-codegen-x64.h @@ -269,7 +269,7 @@ class LCodeGen BASE_EMBEDDED { uint32_t additional_index = 0); void EmitIntegerMathAbs(LMathAbs* instr); - void EmitInteger64MathAbs(LMathAbs* instr); + void EmitSmiMathAbs(LMathAbs* instr); // Support for recording safepoint and position information. void RecordSafepoint(LPointerMap* pointers, diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index 7ad250a4ad..34a557bd1a 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -2246,26 +2246,25 @@ Handle CallStubCompiler::CompileMathAbsCall( Label not_smi; STATIC_ASSERT(kSmiTag == 0); __ JumpIfNotSmi(rax, ¬_smi); - __ SmiToInteger32(rax, rax); + // Branchless abs implementation, refer to below: + // http://graphics.stanford.edu/~seander/bithacks.html#IntegerAbs // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0 // otherwise. - __ movl(rbx, rax); - __ sarl(rbx, Immediate(kBitsPerInt - 1)); + __ movq(rbx, rax); + __ sar(rbx, Immediate(kBitsPerPointer - 1)); // Do bitwise not or do nothing depending on ebx. - __ xorl(rax, rbx); + __ xor_(rax, rbx); // Add 1 or do nothing depending on ebx. - __ subl(rax, rbx); + __ subq(rax, rbx); // If the result is still negative, go to the slow case. // This only happens for the most negative smi. Label slow; __ j(negative, &slow); - // Smi case done. - __ Integer32ToSmi(rax, rax); __ ret(2 * kPointerSize); // Check if the argument is a heap number and load its value.