2013-07-12 12:28:32 +00:00
|
|
|
// Copyright 2013 the V8 project authors. All rights reserved.
|
|
|
|
// Redistribution and use in source and binary forms, with or without
|
|
|
|
// modification, are permitted provided that the following conditions are
|
|
|
|
// met:
|
|
|
|
//
|
|
|
|
// * Redistributions of source code must retain the above copyright
|
|
|
|
// notice, this list of conditions and the following disclaimer.
|
|
|
|
// * Redistributions in binary form must reproduce the above
|
|
|
|
// copyright notice, this list of conditions and the following
|
|
|
|
// disclaimer in the documentation and/or other materials provided
|
|
|
|
// with the distribution.
|
|
|
|
// * Neither the name of Google Inc. nor the names of its
|
|
|
|
// contributors may be used to endorse or promote products derived
|
|
|
|
// from this software without specific prior written permission.
|
|
|
|
//
|
|
|
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
|
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
|
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
|
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
|
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
|
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
|
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
|
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
|
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
2013-07-12 13:45:41 +00:00
|
|
|
#include <limits>
|
|
|
|
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/v8.h"
|
2013-07-12 12:28:32 +00:00
|
|
|
|
2014-06-30 13:25:46 +00:00
|
|
|
#include "src/base/platform/platform.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "src/code-stubs.h"
|
|
|
|
#include "src/factory.h"
|
|
|
|
#include "src/macro-assembler.h"
|
2017-09-13 10:56:20 +00:00
|
|
|
#include "src/objects-inl.h"
|
2014-06-03 08:12:43 +00:00
|
|
|
#include "test/cctest/cctest.h"
|
|
|
|
#include "test/cctest/test-code-stubs.h"
|
2013-07-12 12:28:32 +00:00
|
|
|
|
2017-08-31 12:34:55 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2013-07-12 12:28:32 +00:00
|
|
|
|
2017-11-14 15:55:09 +00:00
|
|
|
#define __ masm.
|
2013-07-12 12:28:32 +00:00
|
|
|
|
|
|
|
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
|
|
|
Register destination_reg) {
|
|
|
|
HandleScope handles(isolate);
|
2017-11-14 15:55:09 +00:00
|
|
|
|
|
|
|
size_t allocated;
|
|
|
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
|
|
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
2015-11-25 14:23:37 +00:00
|
|
|
v8::internal::CodeObjectRequired::kYes);
|
2017-11-14 15:55:09 +00:00
|
|
|
|
Simplify DoubleToI stub.
The DoubleToI stub is no longer called outside of TurboFan, and always in the
same way:
- The parameter is on top of the stack.
- The stub is always called in a slow path.
- It truncates.
Therefore, we can simplify it to only support this case and remove dead
code.
On top of this, since the stub is always considered to be on a slow path for all
backends, this patch takes the opportunity to remove the `skip_fastpath`
optimisation. This would generate a stub which does not handle all inputs,
assuming that the backend already handled some of the inputs in a fast
path. Removing this allows the stub to have the same behaviour on all targets.
On Arm, this patch reworks the stub a little. We could use ip instead of saving
and restoring a register on the stack. Also, comments would mention that we
assume the exponent to be greater than 31 when the it can be 30 or higher. As
done for Arm64, let's check this at runtime in debug mode.
On Arm64, we can also implement the stub without pushing and poping off the
stack. It needs 2 general purpose and a double scratch registers which we have
reserved already (ip0, ip1 and d30). This removes the need to check that the
stack pointer is always 16-bytes aligned.
Finally, this also fixes a potential bug on Arm64, in the
`GetAllocatableRegisterThatIsNotOneOf` method which is now removed. We were
picking an allocatable double register when we meant to pick a general one.
Bug: v8:6644
Change-Id: I88d4597f377c9fc05432d5922a0d7129b6d19b47
Reviewed-on: https://chromium-review.googlesource.com/720963
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
Cr-Commit-Position: refs/heads/master@{#48671}
2017-10-16 12:13:53 +00:00
|
|
|
DoubleToIStub stub(isolate, destination_reg);
|
2014-04-24 12:07:40 +00:00
|
|
|
byte* start = stub.GetCode()->instruction_start();
|
2013-07-12 12:28:32 +00:00
|
|
|
|
|
|
|
__ push(ebx);
|
|
|
|
__ push(ecx);
|
|
|
|
__ push(edx);
|
|
|
|
__ push(esi);
|
|
|
|
__ push(edi);
|
|
|
|
|
|
|
|
int param_offset = 7 * kPointerSize;
|
|
|
|
// Save registers make sure they don't get clobbered.
|
|
|
|
int reg_num = 0;
|
2015-10-02 16:55:12 +00:00
|
|
|
for (; reg_num < Register::kNumRegisters; ++reg_num) {
|
2017-08-02 13:36:27 +00:00
|
|
|
if (RegisterConfiguration::Default()->IsAllocatableGeneralCode(reg_num)) {
|
2016-06-27 15:29:51 +00:00
|
|
|
Register reg = Register::from_code(reg_num);
|
2017-09-07 14:23:11 +00:00
|
|
|
if (reg != esp && reg != ebp && reg != destination_reg) {
|
2015-10-02 16:55:12 +00:00
|
|
|
__ push(reg);
|
|
|
|
param_offset += kPointerSize;
|
|
|
|
}
|
2013-07-12 12:28:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Re-push the double argument
|
|
|
|
__ push(MemOperand(esp, param_offset));
|
|
|
|
__ push(MemOperand(esp, param_offset));
|
|
|
|
|
|
|
|
// Call through to the actual stub
|
|
|
|
__ call(start, RelocInfo::EXTERNAL_REFERENCE);
|
|
|
|
|
|
|
|
__ add(esp, Immediate(kDoubleSize));
|
|
|
|
|
|
|
|
// Make sure no registers have been unexpectedly clobbered
|
|
|
|
for (--reg_num; reg_num >= 0; --reg_num) {
|
2017-08-02 13:36:27 +00:00
|
|
|
if (RegisterConfiguration::Default()->IsAllocatableGeneralCode(reg_num)) {
|
2016-06-27 15:29:51 +00:00
|
|
|
Register reg = Register::from_code(reg_num);
|
2017-09-07 14:23:11 +00:00
|
|
|
if (reg != esp && reg != ebp && reg != destination_reg) {
|
2015-10-02 16:55:12 +00:00
|
|
|
__ cmp(reg, MemOperand(esp, 0));
|
2018-01-03 23:27:03 +00:00
|
|
|
__ Assert(equal, AbortReason::kRegisterWasClobbered);
|
2015-10-02 16:55:12 +00:00
|
|
|
__ add(esp, Immediate(kPointerSize));
|
|
|
|
}
|
2013-07-12 12:28:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
__ mov(eax, destination_reg);
|
|
|
|
|
|
|
|
__ pop(edi);
|
|
|
|
__ pop(esi);
|
|
|
|
__ pop(edx);
|
|
|
|
__ pop(ecx);
|
|
|
|
__ pop(ebx);
|
|
|
|
|
|
|
|
__ ret(kDoubleSize);
|
|
|
|
|
|
|
|
CodeDesc desc;
|
2017-11-14 15:55:09 +00:00
|
|
|
masm.GetCode(isolate, &desc);
|
2017-12-06 16:57:55 +00:00
|
|
|
MakeAssemblerBufferExecutable(buffer, allocated);
|
2013-07-12 12:28:32 +00:00
|
|
|
return reinterpret_cast<ConvertDToIFunc>(
|
|
|
|
reinterpret_cast<intptr_t>(buffer));
|
|
|
|
}
|
|
|
|
|
|
|
|
#undef __
|
|
|
|
|
|
|
|
|
|
|
|
static Isolate* GetIsolateFrom(LocalContext* context) {
|
|
|
|
return reinterpret_cast<Isolate*>((*context)->GetIsolate());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(ConvertDToI) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
LocalContext context;
|
|
|
|
Isolate* isolate = GetIsolateFrom(&context);
|
|
|
|
HandleScope scope(isolate);
|
|
|
|
|
|
|
|
#if DEBUG
|
|
|
|
// Verify that the tests actually work with the C version. In the release
|
|
|
|
// code, the compiler optimizes it away because it's all constant, but does it
|
|
|
|
// wrong, triggering an assert on gcc.
|
|
|
|
RunAllTruncationTests(&ConvertDToICVersion);
|
|
|
|
#endif
|
|
|
|
|
2013-08-16 11:29:54 +00:00
|
|
|
Register dest_registers[] = {eax, ebx, ecx, edx, edi, esi};
|
|
|
|
|
Simplify DoubleToI stub.
The DoubleToI stub is no longer called outside of TurboFan, and always in the
same way:
- The parameter is on top of the stack.
- The stub is always called in a slow path.
- It truncates.
Therefore, we can simplify it to only support this case and remove dead
code.
On top of this, since the stub is always considered to be on a slow path for all
backends, this patch takes the opportunity to remove the `skip_fastpath`
optimisation. This would generate a stub which does not handle all inputs,
assuming that the backend already handled some of the inputs in a fast
path. Removing this allows the stub to have the same behaviour on all targets.
On Arm, this patch reworks the stub a little. We could use ip instead of saving
and restoring a register on the stack. Also, comments would mention that we
assume the exponent to be greater than 31 when the it can be 30 or higher. As
done for Arm64, let's check this at runtime in debug mode.
On Arm64, we can also implement the stub without pushing and poping off the
stack. It needs 2 general purpose and a double scratch registers which we have
reserved already (ip0, ip1 and d30). This removes the need to check that the
stack pointer is always 16-bytes aligned.
Finally, this also fixes a potential bug on Arm64, in the
`GetAllocatableRegisterThatIsNotOneOf` method which is now removed. We were
picking an allocatable double register when we meant to pick a general one.
Bug: v8:6644
Change-Id: I88d4597f377c9fc05432d5922a0d7129b6d19b47
Reviewed-on: https://chromium-review.googlesource.com/720963
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
Cr-Commit-Position: refs/heads/master@{#48671}
2017-10-16 12:13:53 +00:00
|
|
|
for (size_t d = 0; d < sizeof(dest_registers) / sizeof(Register); d++) {
|
|
|
|
RunAllTruncationTests(
|
|
|
|
MakeConvertDToIFuncTrampoline(isolate, dest_registers[d]));
|
2013-08-16 11:29:54 +00:00
|
|
|
}
|
2013-07-12 12:28:32 +00:00
|
|
|
}
|
2017-08-31 12:34:55 +00:00
|
|
|
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|