2014-02-17 15:09:46 +00:00
|
|
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
|
|
|
// Rrdistribution and use in source and binary forms, with or without
|
|
|
|
// modification, are permitted provided that the following conditions are
|
|
|
|
// met:
|
|
|
|
//
|
|
|
|
// * Rrdistributions of source code must retain the above copyright
|
|
|
|
// notice, this list of conditions and the following disclaimer.
|
|
|
|
// * Rrdistributions in binary form must reproduce the above
|
|
|
|
// copyright notice, this list of conditions and the following
|
|
|
|
// disclaimer in the documentation and/or other materials provided
|
|
|
|
// with the distribution.
|
|
|
|
// * Neither the name of Google Inc. nor the names of its
|
|
|
|
// contributors may be used to endorse or promote products derived
|
|
|
|
// from this software without specific prior written permission.
|
|
|
|
//
|
|
|
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
|
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
|
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
|
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
|
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
|
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
|
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/v8.h"
|
2014-02-17 15:09:46 +00:00
|
|
|
|
2017-03-15 11:38:48 +00:00
|
|
|
#include "src/arm64/macro-assembler-arm64-inl.h"
|
2014-06-30 13:25:46 +00:00
|
|
|
#include "src/base/platform/platform.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/code-stubs.h"
|
|
|
|
#include "src/factory.h"
|
|
|
|
#include "src/macro-assembler.h"
|
|
|
|
#include "src/simulator.h"
|
|
|
|
#include "test/cctest/cctest.h"
|
|
|
|
#include "test/cctest/test-code-stubs.h"
|
2014-02-17 15:09:46 +00:00
|
|
|
|
2017-08-31 12:34:55 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2014-02-17 15:09:46 +00:00
|
|
|
|
|
|
|
#define __ masm.
|
|
|
|
|
|
|
|
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
Simplify DoubleToI stub.
The DoubleToI stub is no longer called outside of TurboFan, and always in the
same way:
- The parameter is on top of the stack.
- The stub is always called in a slow path.
- It truncates.
Therefore, we can simplify it to only support this case and remove dead
code.
On top of this, since the stub is always considered to be on a slow path for all
backends, this patch takes the opportunity to remove the `skip_fastpath`
optimisation. This would generate a stub which does not handle all inputs,
assuming that the backend already handled some of the inputs in a fast
path. Removing this allows the stub to have the same behaviour on all targets.
On Arm, this patch reworks the stub a little. We could use ip instead of saving
and restoring a register on the stack. Also, comments would mention that we
assume the exponent to be greater than 31 when the it can be 30 or higher. As
done for Arm64, let's check this at runtime in debug mode.
On Arm64, we can also implement the stub without pushing and poping off the
stack. It needs 2 general purpose and a double scratch registers which we have
reserved already (ip0, ip1 and d30). This removes the need to check that the
stack pointer is always 16-bytes aligned.
Finally, this also fixes a potential bug on Arm64, in the
`GetAllocatableRegisterThatIsNotOneOf` method which is now removed. We were
picking an allocatable double register when we meant to pick a general one.
Bug: v8:6644
Change-Id: I88d4597f377c9fc05432d5922a0d7129b6d19b47
Reviewed-on: https://chromium-review.googlesource.com/720963
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
Cr-Commit-Position: refs/heads/master@{#48671}
2017-10-16 12:13:53 +00:00
|
|
|
Register destination_reg) {
|
2014-02-17 15:09:46 +00:00
|
|
|
HandleScope handles(isolate);
|
2017-11-14 15:55:09 +00:00
|
|
|
|
|
|
|
size_t allocated;
|
|
|
|
byte* buffer =
|
|
|
|
AllocateAssemblerBuffer(&allocated, 4 * Assembler::kMinimalBufferSize);
|
|
|
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
2015-11-25 14:23:37 +00:00
|
|
|
v8::internal::CodeObjectRequired::kYes);
|
2017-11-14 15:55:09 +00:00
|
|
|
|
Simplify DoubleToI stub.
The DoubleToI stub is no longer called outside of TurboFan, and always in the
same way:
- The parameter is on top of the stack.
- The stub is always called in a slow path.
- It truncates.
Therefore, we can simplify it to only support this case and remove dead
code.
On top of this, since the stub is always considered to be on a slow path for all
backends, this patch takes the opportunity to remove the `skip_fastpath`
optimisation. This would generate a stub which does not handle all inputs,
assuming that the backend already handled some of the inputs in a fast
path. Removing this allows the stub to have the same behaviour on all targets.
On Arm, this patch reworks the stub a little. We could use ip instead of saving
and restoring a register on the stack. Also, comments would mention that we
assume the exponent to be greater than 31 when the it can be 30 or higher. As
done for Arm64, let's check this at runtime in debug mode.
On Arm64, we can also implement the stub without pushing and poping off the
stack. It needs 2 general purpose and a double scratch registers which we have
reserved already (ip0, ip1 and d30). This removes the need to check that the
stack pointer is always 16-bytes aligned.
Finally, this also fixes a potential bug on Arm64, in the
`GetAllocatableRegisterThatIsNotOneOf` method which is now removed. We were
picking an allocatable double register when we meant to pick a general one.
Bug: v8:6644
Change-Id: I88d4597f377c9fc05432d5922a0d7129b6d19b47
Reviewed-on: https://chromium-review.googlesource.com/720963
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
Cr-Commit-Position: refs/heads/master@{#48671}
2017-10-16 12:13:53 +00:00
|
|
|
DoubleToIStub stub(isolate, destination_reg);
|
2014-02-17 15:09:46 +00:00
|
|
|
|
2014-04-24 12:07:40 +00:00
|
|
|
byte* start = stub.GetCode()->instruction_start();
|
2014-02-17 15:09:46 +00:00
|
|
|
|
|
|
|
__ SetStackPointer(csp);
|
|
|
|
__ PushCalleeSavedRegisters();
|
|
|
|
__ Mov(jssp, csp);
|
|
|
|
__ SetStackPointer(jssp);
|
|
|
|
|
2014-02-19 09:43:45 +00:00
|
|
|
MacroAssembler::PushPopQueue queue(&masm);
|
2014-02-17 15:09:46 +00:00
|
|
|
|
|
|
|
// Save registers make sure they don't get clobbered.
|
|
|
|
int source_reg_offset = kDoubleSize;
|
|
|
|
int reg_num = 0;
|
2015-10-02 16:55:12 +00:00
|
|
|
for (; reg_num < Register::kNumRegisters; ++reg_num) {
|
2017-08-02 13:36:27 +00:00
|
|
|
if (RegisterConfiguration::Default()->IsAllocatableGeneralCode(reg_num)) {
|
2016-06-27 15:29:51 +00:00
|
|
|
Register reg = Register::from_code(reg_num);
|
2017-12-05 11:02:18 +00:00
|
|
|
queue.Queue(reg);
|
|
|
|
source_reg_offset += kPointerSize;
|
2014-02-17 15:09:46 +00:00
|
|
|
}
|
|
|
|
}
|
2017-12-05 11:02:18 +00:00
|
|
|
// Push the double argument. We push a second copy to maintain sp alignment.
|
|
|
|
queue.Queue(d0);
|
2014-02-19 09:43:45 +00:00
|
|
|
queue.Queue(d0);
|
|
|
|
|
|
|
|
queue.PushQueued();
|
2014-02-17 15:09:46 +00:00
|
|
|
|
2017-12-05 11:02:18 +00:00
|
|
|
// Call through to the actual stub.
|
2014-02-17 15:09:46 +00:00
|
|
|
__ Call(start, RelocInfo::EXTERNAL_REFERENCE);
|
|
|
|
|
2017-12-05 11:02:18 +00:00
|
|
|
__ Drop(2, kDoubleSize);
|
|
|
|
|
|
|
|
// Make sure no registers have been unexpectedly clobbered.
|
|
|
|
{
|
|
|
|
UseScratchRegisterScope temps(&masm);
|
|
|
|
Register temp0 = temps.AcquireX();
|
|
|
|
Register temp1 = temps.AcquireX();
|
|
|
|
for (--reg_num; reg_num >= 0; reg_num -= 2) {
|
|
|
|
if (RegisterConfiguration::Default()->IsAllocatableGeneralCode(reg_num)) {
|
|
|
|
Register reg0 = Register::from_code(reg_num);
|
|
|
|
Register reg1 = Register::from_code(reg_num - 1);
|
|
|
|
__ Pop(temp0, temp1);
|
|
|
|
if (!reg0.is(destination_reg)) {
|
|
|
|
__ Cmp(reg0, temp0);
|
2018-01-03 23:27:03 +00:00
|
|
|
__ Assert(eq, AbortReason::kRegisterWasClobbered);
|
2017-12-05 11:02:18 +00:00
|
|
|
}
|
|
|
|
if (!reg1.is(destination_reg)) {
|
|
|
|
__ Cmp(reg1, temp1);
|
2018-01-03 23:27:03 +00:00
|
|
|
__ Assert(eq, AbortReason::kRegisterWasClobbered);
|
2017-12-05 11:02:18 +00:00
|
|
|
}
|
2015-10-02 16:55:12 +00:00
|
|
|
}
|
2014-02-17 15:09:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!destination_reg.is(x0))
|
|
|
|
__ Mov(x0, destination_reg);
|
|
|
|
|
|
|
|
// Restore callee save registers.
|
|
|
|
__ Mov(csp, jssp);
|
|
|
|
__ SetStackPointer(csp);
|
|
|
|
__ PopCalleeSavedRegisters();
|
|
|
|
|
|
|
|
__ Ret();
|
|
|
|
|
|
|
|
CodeDesc desc;
|
2017-05-31 14:00:11 +00:00
|
|
|
masm.GetCode(isolate, &desc);
|
2017-12-06 16:57:55 +00:00
|
|
|
MakeAssemblerBufferExecutable(buffer, allocated);
|
2017-11-14 15:55:09 +00:00
|
|
|
Assembler::FlushICache(isolate, buffer, allocated);
|
2014-02-17 15:09:46 +00:00
|
|
|
return (reinterpret_cast<ConvertDToIFunc>(
|
|
|
|
reinterpret_cast<intptr_t>(buffer)));
|
|
|
|
}
|
|
|
|
|
|
|
|
#undef __
|
|
|
|
|
|
|
|
|
|
|
|
static Isolate* GetIsolateFrom(LocalContext* context) {
|
|
|
|
return reinterpret_cast<Isolate*>((*context)->GetIsolate());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
int32_t RunGeneratedCodeCallWrapper(ConvertDToIFunc func,
|
|
|
|
double from) {
|
|
|
|
#ifdef USE_SIMULATOR
|
2014-02-17 15:46:29 +00:00
|
|
|
Simulator::CallArgument args[] = {
|
|
|
|
Simulator::CallArgument(from),
|
|
|
|
Simulator::CallArgument::End()
|
|
|
|
};
|
2015-11-23 12:38:18 +00:00
|
|
|
return static_cast<int32_t>(Simulator::current(CcTest::i_isolate())
|
2015-05-15 05:13:15 +00:00
|
|
|
->CallInt64(FUNCTION_ADDR(func), args));
|
2014-02-17 15:09:46 +00:00
|
|
|
#else
|
|
|
|
return (*func)(from);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(ConvertDToI) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext context;
|
|
|
|
Isolate* isolate = GetIsolateFrom(&context);
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
|
|
|
|
#if DEBUG
|
|
|
|
// Verify that the tests actually work with the C version. In the release
|
|
|
|
// code, the compiler optimizes it away because it's all constant, but does it
|
|
|
|
// wrong, triggering an assert on gcc.
|
|
|
|
RunAllTruncationTests(&ConvertDToICVersion);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
Register dest_registers[] = {x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11,
|
|
|
|
x12, x13, x14, x15, x18, x19, x20, x21, x22, x23,
|
|
|
|
x24};
|
|
|
|
|
Simplify DoubleToI stub.
The DoubleToI stub is no longer called outside of TurboFan, and always in the
same way:
- The parameter is on top of the stack.
- The stub is always called in a slow path.
- It truncates.
Therefore, we can simplify it to only support this case and remove dead
code.
On top of this, since the stub is always considered to be on a slow path for all
backends, this patch takes the opportunity to remove the `skip_fastpath`
optimisation. This would generate a stub which does not handle all inputs,
assuming that the backend already handled some of the inputs in a fast
path. Removing this allows the stub to have the same behaviour on all targets.
On Arm, this patch reworks the stub a little. We could use ip instead of saving
and restoring a register on the stack. Also, comments would mention that we
assume the exponent to be greater than 31 when the it can be 30 or higher. As
done for Arm64, let's check this at runtime in debug mode.
On Arm64, we can also implement the stub without pushing and poping off the
stack. It needs 2 general purpose and a double scratch registers which we have
reserved already (ip0, ip1 and d30). This removes the need to check that the
stack pointer is always 16-bytes aligned.
Finally, this also fixes a potential bug on Arm64, in the
`GetAllocatableRegisterThatIsNotOneOf` method which is now removed. We were
picking an allocatable double register when we meant to pick a general one.
Bug: v8:6644
Change-Id: I88d4597f377c9fc05432d5922a0d7129b6d19b47
Reviewed-on: https://chromium-review.googlesource.com/720963
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
Cr-Commit-Position: refs/heads/master@{#48671}
2017-10-16 12:13:53 +00:00
|
|
|
for (size_t d = 0; d < sizeof(dest_registers) / sizeof(Register); d++) {
|
|
|
|
RunAllTruncationTests(
|
|
|
|
RunGeneratedCodeCallWrapper,
|
|
|
|
MakeConvertDToIFuncTrampoline(isolate, dest_registers[d]));
|
2014-02-17 15:09:46 +00:00
|
|
|
}
|
|
|
|
}
|
2017-08-31 12:34:55 +00:00
|
|
|
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|