diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc index 449dbb5903..b051a43f4d 100644 --- a/src/x64/assembler-x64.cc +++ b/src/x64/assembler-x64.cc @@ -3196,6 +3196,7 @@ void Assembler::cvtsd2ss(XMMRegister dst, const Operand& src) { void Assembler::cvtsd2si(Register dst, XMMRegister src) { + DCHECK(!IsEnabled(AVX)); EnsureSpace ensure_space(this); emit(0xF2); emit_optional_rex_32(dst, src); @@ -3206,6 +3207,7 @@ void Assembler::cvtsd2si(Register dst, XMMRegister src) { void Assembler::cvtsd2siq(Register dst, XMMRegister src) { + DCHECK(!IsEnabled(AVX)); EnsureSpace ensure_space(this); emit(0xF2); emit_rex_64(dst, src); diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h index 3d929b17fa..4b433ec470 100644 --- a/src/x64/assembler-x64.h +++ b/src/x64/assembler-x64.h @@ -1379,6 +1379,10 @@ class Assembler : public AssemblerBase { XMMRegister idst = {dst.code()}; vsd(0x2c, idst, xmm0, src, kF2, k0F, kW1); } + void vcvtsd2si(Register dst, XMMRegister src) { + XMMRegister idst = {dst.code()}; + vsd(0x2d, idst, xmm0, src, kF2, k0F, kW0); + } void vucomisd(XMMRegister dst, XMMRegister src) { vsd(0x2e, dst, xmm0, src, k66, k0F, kWIG); } diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc index 84be654c55..94a5ee713e 100644 --- a/src/x64/disasm-x64.cc +++ b/src/x64/disasm-x64.cc @@ -1051,6 +1051,11 @@ int DisassemblerX64::AVXInstruction(byte* data) { NameOfCPURegister(regop)); current += PrintRightXMMOperand(current); break; + case 0x2d: + AppendToBuffer("vcvtsd2si%s %s,", vex_w() ? "q" : "", + NameOfCPURegister(regop)); + current += PrintRightXMMOperand(current); + break; case 0x51: AppendToBuffer("vsqrtsd %s,%s,", NameOfXMMRegister(regop), NameOfXMMRegister(vvvv)); diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index a202d099d3..05e65e39b4 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -863,6 +863,16 @@ void MacroAssembler::Cvtqsi2sd(XMMRegister dst, Register src) { } +void MacroAssembler::Cvtsd2si(Register dst, XMMRegister src) { + if (CpuFeatures::IsSupported(AVX)) { + CpuFeatureScope scope(this, AVX); + vcvtsd2si(dst, src); + } else { + cvtsd2si(dst, src); + } +} + + void MacroAssembler::Cvttsd2si(Register dst, XMMRegister src) { if (CpuFeatures::IsSupported(AVX)) { CpuFeatureScope scope(this, AVX); @@ -3444,7 +3454,7 @@ void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg, Label done; Label conv_failure; Xorpd(temp_xmm_reg, temp_xmm_reg); - cvtsd2si(result_reg, input_reg); + Cvtsd2si(result_reg, input_reg); testl(result_reg, Immediate(0xFFFFFF00)); j(zero, &done, Label::kNear); cmpl(result_reg, Immediate(1)); diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index f24e3a2467..9bc6ae342e 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -818,6 +818,8 @@ class MacroAssembler: public Assembler { void Cvtlsi2sd(XMMRegister dst, const Operand& src); void Cvtqsi2sd(XMMRegister dst, Register src); + void Cvtsd2si(Register dst, XMMRegister src); + void Cvttsd2si(Register dst, XMMRegister src); void Cvttsd2si(Register dst, const Operand& src); void Cvttsd2siq(Register dst, XMMRegister src); diff --git a/test/cctest/test-assembler-x64.cc b/test/cctest/test-assembler-x64.cc index 347b95e32e..cc87c4ebf6 100644 --- a/test/cctest/test-assembler-x64.cc +++ b/test/cctest/test-assembler-x64.cc @@ -1516,6 +1516,14 @@ TEST(AssemblerX64AVX_sd) { __ vucomisd(xmm5, xmm6); __ j(not_equal, &exit); + // Test vcvtsd2si + __ movl(rax, Immediate(19)); + __ movq(rdx, V8_UINT64_C(0x4018000000000000)); // 6.0 + __ vmovq(xmm5, rdx); + __ vcvtsd2si(rcx, xmm5); + __ cmpl(rcx, Immediate(6)); + __ j(not_equal, &exit); + __ movq(rdx, V8_INT64_C(0x3ff0000000000000)); // 1.0 __ vmovq(xmm7, rdx); __ vmulsd(xmm1, xmm1, xmm7); diff --git a/test/cctest/test-disasm-x64.cc b/test/cctest/test-disasm-x64.cc index ecca787d22..3722c1f800 100644 --- a/test/cctest/test-disasm-x64.cc +++ b/test/cctest/test-disasm-x64.cc @@ -557,6 +557,7 @@ TEST(DisasmX64) { __ vcvttsd2si(rax, Operand(rbx, r9, times_4, 10000)); __ vcvttsd2siq(rdi, xmm9); __ vcvttsd2siq(r8, Operand(r9, rbx, times_4, 10000)); + __ vcvtsd2si(rdi, xmm9); __ vmovaps(xmm10, xmm11); __ vmovapd(xmm7, xmm0);