[Memory] Use OS::Allocate for all OS memory allocations.
- Eliminates OS::ReserveRegion and OS::ReserveAlignedRegion. - Changes OS::Allocate to take alignment parameter, reorders parameters to match page_allocator. - Since the size of memory allocation can be deduced, don't return the amount of memory allocated. - Changes reservation of aligned address space. Before we would reserve (size + alignment) rounded up to page size. This is too much, because maximum misalignment is (alignment - page_size). - On Windows and Cygwin, we release an oversize allocation and immediately retry at the aligned address in the allocation. If we lose the address due to a race, we just retry. - Clean up all the calls to OS::Allocate in codegen and tests by adding helper AllocateSystemPage function (allocation.h) and AllocateAssemblerBuffer (cctest.h). - Changes 'assm' to 'masm' in some targets for consistency when using a macro-assembler. Bug: chromium:756050 Cq-Include-Trybots: master.tryserver.chromium.linux:linux_chromium_rel_ng Change-Id: I306dbe042cc867670fdc935abca29db074b0da71 Reviewed-on: https://chromium-review.googlesource.com/749848 Commit-Queue: Bill Budge <bbudge@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Hannes Payer <hpayer@chromium.org> Cr-Commit-Position: refs/heads/master@{#49235}
This commit is contained in:
parent
494aa2e015
commit
7e78506fc2
@ -103,21 +103,28 @@ void AlignedFree(void *ptr) {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
VirtualMemory::VirtualMemory() : address_(nullptr), size_(0) {}
|
byte* AllocateSystemPage(void* address, size_t* allocated) {
|
||||||
|
size_t page_size = base::OS::AllocatePageSize();
|
||||||
VirtualMemory::VirtualMemory(size_t size, void* hint)
|
void* result = base::OS::Allocate(address, page_size, page_size,
|
||||||
: address_(base::OS::ReserveRegion(size, hint)), size_(size) {
|
base::OS::MemoryPermission::kReadWrite);
|
||||||
#if defined(LEAK_SANITIZER)
|
if (result != nullptr) *allocated = page_size;
|
||||||
__lsan_register_root_region(address_, size_);
|
return static_cast<byte*>(result);
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
VirtualMemory::VirtualMemory(size_t size, size_t alignment, void* hint)
|
VirtualMemory::VirtualMemory() : address_(nullptr), size_(0) {}
|
||||||
|
|
||||||
|
VirtualMemory::VirtualMemory(size_t size, void* hint, size_t alignment)
|
||||||
: address_(nullptr), size_(0) {
|
: address_(nullptr), size_(0) {
|
||||||
address_ = base::OS::ReserveAlignedRegion(size, alignment, hint, &size_);
|
size_t page_size = base::OS::AllocatePageSize();
|
||||||
|
size_t alloc_size = RoundUp(size, page_size);
|
||||||
|
address_ = base::OS::Allocate(hint, alloc_size, alignment,
|
||||||
|
base::OS::MemoryPermission::kNoAccess);
|
||||||
|
if (address_ != nullptr) {
|
||||||
|
size_ = alloc_size;
|
||||||
#if defined(LEAK_SANITIZER)
|
#if defined(LEAK_SANITIZER)
|
||||||
__lsan_register_root_region(address_, size_);
|
__lsan_register_root_region(address_, size_);
|
||||||
#endif
|
#endif
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
VirtualMemory::~VirtualMemory() {
|
VirtualMemory::~VirtualMemory() {
|
||||||
@ -205,14 +212,14 @@ bool AllocVirtualMemory(size_t size, void* hint, VirtualMemory* result) {
|
|||||||
|
|
||||||
bool AlignedAllocVirtualMemory(size_t size, size_t alignment, void* hint,
|
bool AlignedAllocVirtualMemory(size_t size, size_t alignment, void* hint,
|
||||||
VirtualMemory* result) {
|
VirtualMemory* result) {
|
||||||
VirtualMemory first_try(size, alignment, hint);
|
VirtualMemory first_try(size, hint, alignment);
|
||||||
if (first_try.IsReserved()) {
|
if (first_try.IsReserved()) {
|
||||||
result->TakeControl(&first_try);
|
result->TakeControl(&first_try);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
V8::GetCurrentPlatform()->OnCriticalMemoryPressure();
|
V8::GetCurrentPlatform()->OnCriticalMemoryPressure();
|
||||||
VirtualMemory second_try(size, alignment, hint);
|
VirtualMemory second_try(size, hint, alignment);
|
||||||
result->TakeControl(&second_try);
|
result->TakeControl(&second_try);
|
||||||
return result->IsReserved();
|
return result->IsReserved();
|
||||||
}
|
}
|
||||||
|
@ -76,19 +76,22 @@ class FreeStoreAllocationPolicy {
|
|||||||
void* AlignedAlloc(size_t size, size_t alignment);
|
void* AlignedAlloc(size_t size, size_t alignment);
|
||||||
void AlignedFree(void *ptr);
|
void AlignedFree(void *ptr);
|
||||||
|
|
||||||
|
// Allocates a single system memory page with read/write permissions. The
|
||||||
|
// address parameter is a hint. Returns the base address of the memory, or null
|
||||||
|
// on failure. Permissions can be changed on the base address.
|
||||||
|
byte* AllocateSystemPage(void* address, size_t* allocated);
|
||||||
|
|
||||||
// Represents and controls an area of reserved memory.
|
// Represents and controls an area of reserved memory.
|
||||||
class V8_EXPORT_PRIVATE VirtualMemory {
|
class V8_EXPORT_PRIVATE VirtualMemory {
|
||||||
public:
|
public:
|
||||||
// Empty VirtualMemory object, controlling no reserved memory.
|
// Empty VirtualMemory object, controlling no reserved memory.
|
||||||
VirtualMemory();
|
VirtualMemory();
|
||||||
|
|
||||||
// Reserves virtual memory with size.
|
// Reserves virtual memory containing an area of the given size that is
|
||||||
explicit VirtualMemory(size_t size, void* hint);
|
// aligned per alignment. This may not be at the position returned by
|
||||||
|
// address().
|
||||||
// Reserves virtual memory containing an area of the given size that
|
VirtualMemory(size_t size, void* hint,
|
||||||
// is aligned per alignment. This may not be at the position returned
|
size_t alignment = base::OS::AllocatePageSize());
|
||||||
// by address().
|
|
||||||
VirtualMemory(size_t size, size_t alignment, void* hint);
|
|
||||||
|
|
||||||
// Construct a virtual memory by assigning it some already mapped address
|
// Construct a virtual memory by assigning it some already mapped address
|
||||||
// and size.
|
// and size.
|
||||||
|
@ -489,10 +489,15 @@ class ArrayBufferAllocator : public v8::ArrayBuffer::Allocator {
|
|||||||
virtual void Free(void* data, size_t) { free(data); }
|
virtual void Free(void* data, size_t) { free(data); }
|
||||||
|
|
||||||
virtual void* Reserve(size_t length) {
|
virtual void* Reserve(size_t length) {
|
||||||
|
size_t page_size = base::OS::AllocatePageSize();
|
||||||
|
size_t allocated = RoundUp(length, page_size);
|
||||||
void* address =
|
void* address =
|
||||||
base::OS::ReserveRegion(length, base::OS::GetRandomMmapAddr());
|
base::OS::Allocate(base::OS::GetRandomMmapAddr(), allocated, page_size,
|
||||||
|
base::OS::MemoryPermission::kNoAccess);
|
||||||
#if defined(LEAK_SANITIZER)
|
#if defined(LEAK_SANITIZER)
|
||||||
__lsan_register_root_region(address, length);
|
if (address != nullptr) {
|
||||||
|
__lsan_register_root_region(address, allocated);
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
return address;
|
return address;
|
||||||
}
|
}
|
||||||
|
@ -14,21 +14,21 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
|
|
||||||
#define __ masm.
|
#define __ masm.
|
||||||
|
|
||||||
#if defined(V8_HOST_ARCH_ARM)
|
#if defined(V8_HOST_ARCH_ARM)
|
||||||
|
|
||||||
MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
||||||
MemCopyUint8Function stub) {
|
MemCopyUint8Function stub) {
|
||||||
#if defined(USE_SIMULATOR)
|
#if defined(USE_SIMULATOR)
|
||||||
return stub;
|
return stub;
|
||||||
#else
|
#else
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return stub;
|
if (buffer == nullptr) return stub;
|
||||||
|
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
Register dest = r0;
|
Register dest = r0;
|
||||||
@ -169,8 +169,8 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
|||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<MemCopyUint8Function>(buffer);
|
return FUNCTION_CAST<MemCopyUint8Function>(buffer);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
@ -182,12 +182,12 @@ MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function(
|
|||||||
#if defined(USE_SIMULATOR)
|
#if defined(USE_SIMULATOR)
|
||||||
return stub;
|
return stub;
|
||||||
#else
|
#else
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return stub;
|
if (buffer == nullptr) return stub;
|
||||||
|
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
Register dest = r0;
|
Register dest = r0;
|
||||||
@ -259,8 +259,8 @@ MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function(
|
|||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
|
|
||||||
return FUNCTION_CAST<MemCopyUint16Uint8Function>(buffer);
|
return FUNCTION_CAST<MemCopyUint16Uint8Function>(buffer);
|
||||||
#endif
|
#endif
|
||||||
@ -271,12 +271,12 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
#if defined(USE_SIMULATOR)
|
#if defined(USE_SIMULATOR)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
#else
|
#else
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return nullptr;
|
if (buffer == nullptr) return nullptr;
|
||||||
|
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
__ MovFromFloatParameter(d0);
|
__ MovFromFloatParameter(d0);
|
||||||
@ -288,8 +288,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -34,6 +34,18 @@ namespace {
|
|||||||
// This causes VirtualMemory::Commit to not always commit the memory region
|
// This causes VirtualMemory::Commit to not always commit the memory region
|
||||||
// specified.
|
// specified.
|
||||||
|
|
||||||
|
int GetProtectionFromMemoryPermission(OS::MemoryPermission access) {
|
||||||
|
switch (access) {
|
||||||
|
case OS::MemoryPermission::kNoAccess:
|
||||||
|
return PAGE_NOACCESS;
|
||||||
|
case OS::MemoryPermission::kReadWrite:
|
||||||
|
return PAGE_READWRITE;
|
||||||
|
case OS::MemoryPermission::kReadWriteExecute:
|
||||||
|
return PAGE_EXECUTE_READWRITE;
|
||||||
|
}
|
||||||
|
UNREACHABLE();
|
||||||
|
}
|
||||||
|
|
||||||
static void* RandomizedVirtualAlloc(size_t size, int action, int protection,
|
static void* RandomizedVirtualAlloc(size_t size, int action, int protection,
|
||||||
void* hint) {
|
void* hint) {
|
||||||
LPVOID base = nullptr;
|
LPVOID base = nullptr;
|
||||||
@ -80,43 +92,44 @@ double CygwinTimezoneCache::LocalTimeOffset() {
|
|||||||
(loc->tm_isdst > 0 ? 3600 * msPerSecond : 0));
|
(loc->tm_isdst > 0 ? 3600 * msPerSecond : 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
void* OS::Allocate(void* address, size_t size, size_t alignment,
|
||||||
void* OS::ReserveRegion(size_t size, void* hint) {
|
MemoryPermission access) {
|
||||||
return RandomizedVirtualAlloc(size, MEM_RESERVE, PAGE_NOACCESS, hint);
|
size_t page_size = AllocatePageSize();
|
||||||
|
DCHECK_EQ(0, size % page_size);
|
||||||
|
DCHECK_EQ(0, alignment % page_size);
|
||||||
|
address = AlignedAddress(address, alignment);
|
||||||
|
// Add the maximum misalignment so we are guaranteed an aligned base address.
|
||||||
|
size_t request_size = size + (alignment - page_size);
|
||||||
|
|
||||||
|
int flags = (access == OS::MemoryPermission::kNoAccess)
|
||||||
|
? MEM_RESERVE
|
||||||
|
: MEM_RESERVE | MEM_COMMIT;
|
||||||
|
int prot = GetProtectionFromMemoryPermission(access);
|
||||||
|
|
||||||
|
void* base = RandomizedVirtualAlloc(request_size, flags, prot, address);
|
||||||
|
if (base == nullptr) return nullptr;
|
||||||
|
|
||||||
|
uint8_t* aligned_base = RoundUp(static_cast<uint8_t*>(base), alignment);
|
||||||
|
int resize_attempts = 0;
|
||||||
|
const int kMaxResizeAttempts = 3;
|
||||||
|
while (aligned_base != base) {
|
||||||
|
// Try reducing the size by freeing and then re-allocating at the aligned
|
||||||
|
// base. Retry logic is needed since we may lose the memory due to a race.
|
||||||
|
Free(base, request_size);
|
||||||
|
if (resize_attempts == kMaxResizeAttempts) return nullptr;
|
||||||
|
base = RandomizedVirtualAlloc(size, flags, prot, aligned_base);
|
||||||
|
if (base == nullptr) return nullptr;
|
||||||
|
aligned_base = RoundUp(static_cast<uint8_t*>(base), alignment);
|
||||||
|
resize_attempts++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return static_cast<void*>(aligned_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
void OS::Free(void* address, const size_t size) {
|
||||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
// TODO(1240712): VirtualFree has a return value which is ignored here.
|
||||||
size_t* allocated) {
|
VirtualFree(address, 0, MEM_RELEASE);
|
||||||
hint = AlignedAddress(hint, alignment);
|
USE(size);
|
||||||
DCHECK_EQ(alignment % OS::AllocatePageSize(), 0);
|
|
||||||
size_t request_size =
|
|
||||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocatePageSize()));
|
|
||||||
void* address = ReserveRegion(request_size, hint);
|
|
||||||
if (address == nullptr) {
|
|
||||||
*allocated = 0;
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
uint8_t* base = RoundUp(static_cast<uint8_t*>(address), alignment);
|
|
||||||
// Try reducing the size by freeing and then reallocating a specific area.
|
|
||||||
bool result = ReleaseRegion(address, request_size);
|
|
||||||
USE(result);
|
|
||||||
DCHECK(result);
|
|
||||||
address = VirtualAlloc(base, size, MEM_RESERVE, PAGE_NOACCESS);
|
|
||||||
if (address != nullptr) {
|
|
||||||
request_size = size;
|
|
||||||
DCHECK(base == static_cast<uint8_t*>(address));
|
|
||||||
} else {
|
|
||||||
// Resizing failed, just go with a bigger area.
|
|
||||||
address = ReserveRegion(request_size, hint);
|
|
||||||
if (address == nullptr) {
|
|
||||||
*allocated = 0;
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
*allocated = request_size;
|
|
||||||
return static_cast<void*>(address);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
|
@ -18,42 +18,19 @@ TimezoneCache* OS::CreateTimezoneCache() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
void* OS::Allocate(const size_t requested, size_t* allocated,
|
void* OS::Allocate(void* address, size_t size, size_t alignment,
|
||||||
OS::MemoryPermission access, void* hint) {
|
OS::MemoryPermission access) {
|
||||||
CHECK(false); // TODO(scottmg): Port, https://crbug.com/731217.
|
// Currently we only support reserving memory.
|
||||||
return nullptr;
|
DCHECK_EQ(MemoryPermission::kNoAccess, access);
|
||||||
}
|
size_t page_size = OS::AllocatePageSize();
|
||||||
|
DCHECK_EQ(0, size % page_size);
|
||||||
// static
|
DCHECK_EQ(0, alignment % page_size);
|
||||||
void OS::Guard(void* address, size_t size) {
|
address = AlignedAddress(address, alignment);
|
||||||
CHECK_EQ(ZX_OK, zx_vmar_protect(zx_vmar_root_self(),
|
// Add the maximum misalignment so we are guaranteed an aligned base address.
|
||||||
reinterpret_cast<uintptr_t>(address), size,
|
size_t request_size = size + (alignment - page_size);
|
||||||
0 /*no permissions*/));
|
|
||||||
}
|
|
||||||
|
|
||||||
// static
|
|
||||||
void* OS::ReserveRegion(size_t size, void* hint) {
|
|
||||||
zx_handle_t vmo;
|
|
||||||
if (zx_vmo_create(size, 0, &vmo) != ZX_OK) return nullptr;
|
|
||||||
uintptr_t result;
|
|
||||||
zx_status_t status = zx_vmar_map(zx_vmar_root_self(), 0, vmo, 0, size,
|
|
||||||
0 /*no permissions*/, &result);
|
|
||||||
zx_handle_close(vmo);
|
|
||||||
if (status != ZX_OK) return nullptr;
|
|
||||||
return reinterpret_cast<void*>(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// static
|
|
||||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
|
||||||
size_t* allocated) {
|
|
||||||
DCHECK_EQ(alignment % OS::AllocatePageSize(), 0);
|
|
||||||
hint = AlignedAddress(hint, alignment);
|
|
||||||
size_t request_size =
|
|
||||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocatePageSize()));
|
|
||||||
|
|
||||||
zx_handle_t vmo;
|
zx_handle_t vmo;
|
||||||
if (zx_vmo_create(request_size, 0, &vmo) != ZX_OK) {
|
if (zx_vmo_create(request_size, 0, &vmo) != ZX_OK) {
|
||||||
*allocated = 0;
|
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
static const char kVirtualMemoryName[] = "v8-virtualmem";
|
static const char kVirtualMemoryName[] = "v8-virtualmem";
|
||||||
@ -66,26 +43,25 @@ void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
|||||||
// so close the vmo either way.
|
// so close the vmo either way.
|
||||||
zx_handle_close(vmo);
|
zx_handle_close(vmo);
|
||||||
if (status != ZX_OK) {
|
if (status != ZX_OK) {
|
||||||
*allocated = 0;
|
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint8_t* base = reinterpret_cast<uint8_t*>(reservation);
|
uint8_t* base = reinterpret_cast<uint8_t*>(reservation);
|
||||||
uint8_t* aligned_base = RoundUp(base, alignment);
|
uint8_t* aligned_base = RoundUp(base, alignment);
|
||||||
DCHECK_LE(base, aligned_base);
|
|
||||||
|
|
||||||
// Unmap extra memory reserved before and after the desired block.
|
// Unmap extra memory reserved before and after the desired block.
|
||||||
if (aligned_base != base) {
|
if (aligned_base != base) {
|
||||||
|
DCHECK_LT(base, aligned_base);
|
||||||
size_t prefix_size = static_cast<size_t>(aligned_base - base);
|
size_t prefix_size = static_cast<size_t>(aligned_base - base);
|
||||||
zx_vmar_unmap(zx_vmar_root_self(), reinterpret_cast<uintptr_t>(base),
|
zx_vmar_unmap(zx_vmar_root_self(), reinterpret_cast<uintptr_t>(base),
|
||||||
prefix_size);
|
prefix_size);
|
||||||
request_size -= prefix_size;
|
request_size -= prefix_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t aligned_size = RoundUp(size, OS::AllocatePageSize());
|
size_t aligned_size = RoundUp(size, page_size);
|
||||||
DCHECK_LE(aligned_size, request_size);
|
|
||||||
|
|
||||||
if (aligned_size != request_size) {
|
if (aligned_size != request_size) {
|
||||||
|
DCHECK_LT(aligned_size, request_size);
|
||||||
size_t suffix_size = request_size - aligned_size;
|
size_t suffix_size = request_size - aligned_size;
|
||||||
zx_vmar_unmap(zx_vmar_root_self(),
|
zx_vmar_unmap(zx_vmar_root_self(),
|
||||||
reinterpret_cast<uintptr_t>(aligned_base + aligned_size),
|
reinterpret_cast<uintptr_t>(aligned_base + aligned_size),
|
||||||
@ -94,11 +70,16 @@ void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
|||||||
}
|
}
|
||||||
|
|
||||||
DCHECK(aligned_size == request_size);
|
DCHECK(aligned_size == request_size);
|
||||||
|
|
||||||
*allocated = aligned_size;
|
|
||||||
return static_cast<void*>(aligned_base);
|
return static_cast<void*>(aligned_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
void OS::Guard(void* address, size_t size) {
|
||||||
|
CHECK_EQ(ZX_OK, zx_vmar_protect(zx_vmar_root_self(),
|
||||||
|
reinterpret_cast<uintptr_t>(address), size,
|
||||||
|
0 /*no permissions*/));
|
||||||
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
bool OS::CommitRegion(void* address, size_t size, bool is_executable) {
|
bool OS::CommitRegion(void* address, size_t size, bool is_executable) {
|
||||||
uint32_t prot = ZX_VM_FLAG_PERM_READ | ZX_VM_FLAG_PERM_WRITE |
|
uint32_t prot = ZX_VM_FLAG_PERM_READ | ZX_VM_FLAG_PERM_WRITE |
|
||||||
|
@ -97,6 +97,27 @@ int GetProtectionFromMemoryPermission(OS::MemoryPermission access) {
|
|||||||
}
|
}
|
||||||
UNREACHABLE();
|
UNREACHABLE();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void* Allocate(void* address, size_t size, OS::MemoryPermission access) {
|
||||||
|
const size_t actual_size = RoundUp(size, OS::AllocatePageSize());
|
||||||
|
int prot = GetProtectionFromMemoryPermission(access);
|
||||||
|
int flags = MAP_PRIVATE | MAP_ANONYMOUS;
|
||||||
|
if (access == OS::MemoryPermission::kNoAccess) {
|
||||||
|
// TODO(bbudge) Improve readability by moving platform specific code into
|
||||||
|
// helper functions.
|
||||||
|
#if !V8_OS_AIX && !V8_OS_FREEBSD && !V8_OS_QNX
|
||||||
|
flags |= MAP_NORESERVE;
|
||||||
|
#endif
|
||||||
|
#if V8_OS_QNX
|
||||||
|
flags |= MAP_LAZY;
|
||||||
|
#endif // V8_OS_QNX
|
||||||
|
}
|
||||||
|
void* result =
|
||||||
|
mmap(address, actual_size, prot, flags, kMmapFd, kMmapFdOffset);
|
||||||
|
if (result == MAP_FAILED) return nullptr;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
#endif // !V8_OS_FUCHSIA
|
#endif // !V8_OS_FUCHSIA
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
@ -208,25 +229,48 @@ void* OS::GetRandomMmapAddr() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bbudge) Move Cygwin and Fuschia stuff into platform-specific files.
|
// TODO(bbudge) Move Cygwin and Fuschia stuff into platform-specific files.
|
||||||
#if !V8_OS_FUCHSIA
|
#if !V8_OS_CYGWIN && !V8_OS_FUCHSIA
|
||||||
void* OS::Allocate(const size_t requested, size_t* allocated,
|
void* OS::Allocate(void* address, size_t size, size_t alignment,
|
||||||
OS::MemoryPermission access, void* hint) {
|
MemoryPermission access) {
|
||||||
const size_t msize = RoundUp(requested, AllocatePageSize());
|
size_t page_size = AllocatePageSize();
|
||||||
int prot = GetProtectionFromMemoryPermission(access);
|
DCHECK_EQ(0, size % page_size);
|
||||||
void* mbase = mmap(hint, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, kMmapFd,
|
DCHECK_EQ(0, alignment % page_size);
|
||||||
kMmapFdOffset);
|
address = AlignedAddress(address, alignment);
|
||||||
if (mbase == MAP_FAILED) return nullptr;
|
// Add the maximum misalignment so we are guaranteed an aligned base address.
|
||||||
*allocated = msize;
|
size_t request_size = size + (alignment - page_size);
|
||||||
return mbase;
|
void* result = base::Allocate(address, request_size, access);
|
||||||
}
|
if (result == nullptr) return nullptr;
|
||||||
#endif // !V8_OS_FUCHSIA
|
|
||||||
|
|
||||||
|
// Unmap memory allocated before the aligned base address.
|
||||||
|
uint8_t* base = static_cast<uint8_t*>(result);
|
||||||
|
uint8_t* aligned_base = RoundUp(base, alignment);
|
||||||
|
if (aligned_base != base) {
|
||||||
|
DCHECK_LT(base, aligned_base);
|
||||||
|
size_t prefix_size = static_cast<size_t>(aligned_base - base);
|
||||||
|
OS::Free(base, prefix_size);
|
||||||
|
request_size -= prefix_size;
|
||||||
|
}
|
||||||
|
// Unmap memory allocated after the potentially unaligned end.
|
||||||
|
if (size != request_size) {
|
||||||
|
DCHECK_LT(size, request_size);
|
||||||
|
size_t suffix_size = request_size - size;
|
||||||
|
OS::Free(aligned_base + size, suffix_size);
|
||||||
|
request_size -= suffix_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
DCHECK_EQ(size, request_size);
|
||||||
|
return static_cast<void*>(aligned_base);
|
||||||
|
}
|
||||||
|
#endif // !V8_OS_CYGWIN && !V8_OS_FUCHSIA
|
||||||
|
|
||||||
|
#if !V8_OS_CYGWIN
|
||||||
void OS::Free(void* address, const size_t size) {
|
void OS::Free(void* address, const size_t size) {
|
||||||
// TODO(1240712): munmap has a return value which is ignored here.
|
// TODO(1240712): munmap has a return value which is ignored here.
|
||||||
int result = munmap(address, size);
|
int result = munmap(address, size);
|
||||||
USE(result);
|
USE(result);
|
||||||
DCHECK_EQ(0, result);
|
DCHECK_EQ(0, result);
|
||||||
}
|
}
|
||||||
|
#endif // !V8_OS_CYGWIN
|
||||||
|
|
||||||
void OS::SetReadAndExecutable(void* address, const size_t size) {
|
void OS::SetReadAndExecutable(void* address, const size_t size) {
|
||||||
#if V8_OS_CYGWIN
|
#if V8_OS_CYGWIN
|
||||||
@ -272,60 +316,6 @@ void OS::SetReadWriteAndExecutable(void* address, const size_t size) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#if !V8_OS_CYGWIN && !V8_OS_FUCHSIA
|
#if !V8_OS_CYGWIN && !V8_OS_FUCHSIA
|
||||||
// static
|
|
||||||
void* OS::ReserveRegion(size_t size, void* hint) {
|
|
||||||
int map_flags = MAP_PRIVATE | MAP_ANONYMOUS;
|
|
||||||
#if !V8_OS_AIX && !V8_OS_FREEBSD && !V8_OS_QNX
|
|
||||||
map_flags |= MAP_NORESERVE;
|
|
||||||
#endif
|
|
||||||
#if V8_OS_QNX
|
|
||||||
map_flags |= MAP_LAZY;
|
|
||||||
#endif // V8_OS_QNX
|
|
||||||
void* result = mmap(hint, size, PROT_NONE, map_flags, kMmapFd, kMmapFdOffset);
|
|
||||||
if (result == MAP_FAILED) return nullptr;
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
// static
|
|
||||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
|
||||||
size_t* allocated) {
|
|
||||||
DCHECK_EQ(0, alignment % OS::AllocatePageSize());
|
|
||||||
hint = AlignedAddress(hint, alignment);
|
|
||||||
size_t request_size =
|
|
||||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocatePageSize()));
|
|
||||||
void* result = ReserveRegion(request_size, hint);
|
|
||||||
if (result == nullptr) {
|
|
||||||
*allocated = 0;
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t* base = static_cast<uint8_t*>(result);
|
|
||||||
uint8_t* aligned_base = RoundUp(base, alignment);
|
|
||||||
DCHECK_LE(base, aligned_base);
|
|
||||||
|
|
||||||
// Unmap extra memory reserved before and after the desired block.
|
|
||||||
if (aligned_base != base) {
|
|
||||||
size_t prefix_size = static_cast<size_t>(aligned_base - base);
|
|
||||||
OS::Free(base, prefix_size);
|
|
||||||
request_size -= prefix_size;
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t aligned_size = RoundUp(size, OS::AllocatePageSize());
|
|
||||||
DCHECK_LE(aligned_size, request_size);
|
|
||||||
|
|
||||||
if (aligned_size != request_size) {
|
|
||||||
size_t suffix_size = request_size - aligned_size;
|
|
||||||
OS::Free(aligned_base + aligned_size, suffix_size);
|
|
||||||
request_size -= suffix_size;
|
|
||||||
}
|
|
||||||
|
|
||||||
DCHECK(aligned_size == request_size);
|
|
||||||
|
|
||||||
*allocated = aligned_size;
|
|
||||||
return static_cast<void*>(aligned_base);
|
|
||||||
}
|
|
||||||
|
|
||||||
// static
|
// static
|
||||||
bool OS::CommitRegion(void* address, size_t size, bool is_executable) {
|
bool OS::CommitRegion(void* address, size_t size, bool is_executable) {
|
||||||
int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
|
int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
|
||||||
|
@ -732,6 +732,18 @@ void* OS::GetRandomMmapAddr() {
|
|||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
|
int GetProtectionFromMemoryPermission(OS::MemoryPermission access) {
|
||||||
|
switch (access) {
|
||||||
|
case OS::MemoryPermission::kNoAccess:
|
||||||
|
return PAGE_NOACCESS;
|
||||||
|
case OS::MemoryPermission::kReadWrite:
|
||||||
|
return PAGE_READWRITE;
|
||||||
|
case OS::MemoryPermission::kReadWriteExecute:
|
||||||
|
return PAGE_EXECUTE_READWRITE;
|
||||||
|
}
|
||||||
|
UNREACHABLE();
|
||||||
|
}
|
||||||
|
|
||||||
static void* RandomizedVirtualAlloc(size_t size, int action, int protection,
|
static void* RandomizedVirtualAlloc(size_t size, int action, int protection,
|
||||||
void* hint) {
|
void* hint) {
|
||||||
LPVOID base = NULL;
|
LPVOID base = NULL;
|
||||||
@ -747,49 +759,50 @@ static void* RandomizedVirtualAlloc(size_t size, int action, int protection,
|
|||||||
|
|
||||||
if (use_aslr &&
|
if (use_aslr &&
|
||||||
(protection == PAGE_EXECUTE_READWRITE || protection == PAGE_NOACCESS)) {
|
(protection == PAGE_EXECUTE_READWRITE || protection == PAGE_NOACCESS)) {
|
||||||
// For executable pages try and randomize the allocation address
|
// For executable or reserved pages try to randomize the allocation address.
|
||||||
base = VirtualAlloc(hint, size, action, protection);
|
base = VirtualAlloc(hint, size, action, protection);
|
||||||
}
|
}
|
||||||
|
|
||||||
// After three attempts give up and let the OS find an address to use.
|
// On failure, let the OS find an address to use.
|
||||||
if (base == NULL) base = VirtualAlloc(NULL, size, action, protection);
|
if (base == NULL) base = VirtualAlloc(nullptr, size, action, protection);
|
||||||
|
|
||||||
return base;
|
return base;
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
void* OS::Allocate(const size_t requested, size_t* allocated,
|
void* OS::Allocate(void* address, size_t size, size_t alignment,
|
||||||
OS::MemoryPermission access, void* hint) {
|
MemoryPermission access) {
|
||||||
// VirtualAlloc rounds allocated size to page size automatically.
|
size_t page_size = AllocatePageSize();
|
||||||
size_t msize = RoundUp(requested, static_cast<int>(AllocatePageSize()));
|
DCHECK_EQ(0, size % page_size);
|
||||||
|
DCHECK_EQ(0, alignment % page_size);
|
||||||
|
address = AlignedAddress(address, alignment);
|
||||||
|
// Add the maximum misalignment so we are guaranteed an aligned base address.
|
||||||
|
size_t request_size = size + (alignment - page_size);
|
||||||
|
|
||||||
// Windows XP SP2 allows Data Excution Prevention (DEP).
|
int flags = (access == OS::MemoryPermission::kNoAccess)
|
||||||
int prot = PAGE_NOACCESS;
|
? MEM_RESERVE
|
||||||
switch (access) {
|
: MEM_RESERVE | MEM_COMMIT;
|
||||||
case OS::MemoryPermission::kNoAccess: {
|
int prot = GetProtectionFromMemoryPermission(access);
|
||||||
prot = PAGE_NOACCESS;
|
|
||||||
break;
|
void* base = RandomizedVirtualAlloc(request_size, flags, prot, address);
|
||||||
}
|
if (base == nullptr) return nullptr;
|
||||||
case OS::MemoryPermission::kReadWrite: {
|
|
||||||
prot = PAGE_READWRITE;
|
uint8_t* aligned_base = RoundUp(static_cast<uint8_t*>(base), alignment);
|
||||||
break;
|
int resize_attempts = 0;
|
||||||
}
|
const int kMaxResizeAttempts = 3;
|
||||||
case OS::MemoryPermission::kReadWriteExecute: {
|
while (aligned_base != base) {
|
||||||
prot = PAGE_EXECUTE_READWRITE;
|
// Try reducing the size by freeing and then re-allocating at the aligned
|
||||||
break;
|
// base. Retry logic is needed since we may lose the memory due to a race.
|
||||||
}
|
Free(base, request_size);
|
||||||
|
if (resize_attempts == kMaxResizeAttempts) return nullptr;
|
||||||
|
base = RandomizedVirtualAlloc(size, flags, prot, aligned_base);
|
||||||
|
if (base == nullptr) return nullptr;
|
||||||
|
aligned_base = RoundUp(static_cast<uint8_t*>(base), alignment);
|
||||||
|
resize_attempts++;
|
||||||
}
|
}
|
||||||
|
|
||||||
LPVOID mbase =
|
return static_cast<void*>(aligned_base);
|
||||||
RandomizedVirtualAlloc(msize, MEM_COMMIT | MEM_RESERVE, prot, hint);
|
|
||||||
|
|
||||||
if (mbase == NULL) return NULL;
|
|
||||||
|
|
||||||
DCHECK_EQ(reinterpret_cast<uintptr_t>(mbase) % OS::AllocatePageSize(), 0);
|
|
||||||
|
|
||||||
*allocated = msize;
|
|
||||||
return mbase;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void OS::Free(void* address, const size_t size) {
|
void OS::Free(void* address, const size_t size) {
|
||||||
@ -825,44 +838,6 @@ void OS::SetReadWriteAndExecutable(void* address, const size_t size) {
|
|||||||
VirtualProtect(address, size, PAGE_EXECUTE_READWRITE, &oldprotect));
|
VirtualProtect(address, size, PAGE_EXECUTE_READWRITE, &oldprotect));
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
|
||||||
void* OS::ReserveRegion(size_t size, void* hint) {
|
|
||||||
return RandomizedVirtualAlloc(size, MEM_RESERVE, PAGE_NOACCESS, hint);
|
|
||||||
}
|
|
||||||
|
|
||||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
|
||||||
size_t* allocated) {
|
|
||||||
DCHECK_EQ(alignment % OS::AllocatePageSize(), 0);
|
|
||||||
hint = AlignedAddress(hint, alignment);
|
|
||||||
size_t request_size =
|
|
||||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocatePageSize()));
|
|
||||||
void* address = ReserveRegion(request_size, hint);
|
|
||||||
if (address == nullptr) {
|
|
||||||
*allocated = 0;
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
uint8_t* base = RoundUp(static_cast<uint8_t*>(address), alignment);
|
|
||||||
// Try reducing the size by freeing and then reallocating a specific area.
|
|
||||||
bool result = ReleaseRegion(address, request_size);
|
|
||||||
USE(result);
|
|
||||||
DCHECK(result);
|
|
||||||
address = VirtualAlloc(base, size, MEM_RESERVE, PAGE_NOACCESS);
|
|
||||||
if (address != nullptr) {
|
|
||||||
request_size = size;
|
|
||||||
DCHECK(base == static_cast<uint8_t*>(address));
|
|
||||||
} else {
|
|
||||||
// Resizing failed, just go with a bigger area.
|
|
||||||
address = ReserveRegion(request_size, hint);
|
|
||||||
if (address == nullptr) {
|
|
||||||
*allocated = 0;
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
*allocated = request_size;
|
|
||||||
return static_cast<void*>(address);
|
|
||||||
}
|
|
||||||
|
|
||||||
// static
|
// static
|
||||||
bool OS::CommitRegion(void* address, size_t size, bool is_executable) {
|
bool OS::CommitRegion(void* address, size_t size, bool is_executable) {
|
||||||
int prot = is_executable ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
|
int prot = is_executable ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
|
||||||
|
@ -172,9 +172,11 @@ class V8_BASE_EXPORT OS {
|
|||||||
static void* GetRandomMmapAddr();
|
static void* GetRandomMmapAddr();
|
||||||
|
|
||||||
// Allocates memory. Permissions are set according to the access argument.
|
// Allocates memory. Permissions are set according to the access argument.
|
||||||
// Returns the address of the allocated memory, or nullptr on failure.
|
// The address parameter is a hint. The size and alignment parameters must be
|
||||||
static void* Allocate(const size_t requested, size_t* allocated,
|
// multiples of AllocatePageSize(). Returns the address of the allocated
|
||||||
MemoryPermission access, void* hint = nullptr);
|
// memory, with the specified size and alignment, or nullptr on failure.
|
||||||
|
static void* Allocate(void* address, size_t size, size_t alignment,
|
||||||
|
MemoryPermission access);
|
||||||
|
|
||||||
// Frees memory allocated by a call to Allocate.
|
// Frees memory allocated by a call to Allocate.
|
||||||
static void Free(void* address, const size_t size);
|
static void Free(void* address, const size_t size);
|
||||||
@ -192,11 +194,6 @@ class V8_BASE_EXPORT OS {
|
|||||||
// function. This is only a temporary function and will go away soon.
|
// function. This is only a temporary function and will go away soon.
|
||||||
static void SetReadWriteAndExecutable(void* address, const size_t size);
|
static void SetReadWriteAndExecutable(void* address, const size_t size);
|
||||||
|
|
||||||
static void* ReserveRegion(size_t size, void* hint);
|
|
||||||
|
|
||||||
static void* ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
|
||||||
size_t* allocated);
|
|
||||||
|
|
||||||
static bool CommitRegion(void* address, size_t size, bool is_executable);
|
static bool CommitRegion(void* address, size_t size, bool is_executable);
|
||||||
|
|
||||||
static bool UncommitRegion(void* address, size_t size);
|
static bool UncommitRegion(void* address, size_t size);
|
||||||
|
17
src/d8.cc
17
src/d8.cc
@ -160,16 +160,17 @@ class ShellArrayBufferAllocator : public ArrayBufferAllocatorBase {
|
|||||||
}
|
}
|
||||||
#if USE_VM
|
#if USE_VM
|
||||||
void* VirtualMemoryAllocate(size_t length) {
|
void* VirtualMemoryAllocate(size_t length) {
|
||||||
void* data = base::OS::ReserveRegion(length, nullptr);
|
size_t page_size = base::OS::AllocatePageSize();
|
||||||
if (data && !base::OS::CommitRegion(data, length, false)) {
|
size_t alloc_size = RoundUp(length, page_size);
|
||||||
base::OS::ReleaseRegion(data, length);
|
void* address = base::OS::Allocate(nullptr, alloc_size, page_size,
|
||||||
return nullptr;
|
base::OS::MemoryPermission::kReadWrite);
|
||||||
}
|
if (address != nullptr) {
|
||||||
#if defined(LEAK_SANITIZER)
|
#if defined(LEAK_SANITIZER)
|
||||||
__lsan_register_root_region(data, length);
|
__lsan_register_root_region(address, alloc_size);
|
||||||
#endif
|
#endif
|
||||||
MSAN_MEMORY_IS_INITIALIZED(data, length);
|
MSAN_MEMORY_IS_INITIALIZED(address, alloc_size);
|
||||||
return data;
|
}
|
||||||
|
return address;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
@ -125,7 +125,7 @@ bool CodeRange::SetUp(size_t requested) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// We are sure that we have mapped a block of requested addresses.
|
// We are sure that we have mapped a block of requested addresses.
|
||||||
DCHECK(reservation.size() == requested);
|
DCHECK_GE(reservation.size(), requested);
|
||||||
Address base = reinterpret_cast<Address>(reservation.address());
|
Address base = reinterpret_cast<Address>(reservation.address());
|
||||||
|
|
||||||
// On some platforms, specifically Win64, we need to reserve some pages at
|
// On some platforms, specifically Win64, we need to reserve some pages at
|
||||||
@ -456,15 +456,10 @@ Address MemoryAllocator::ReserveAlignedMemory(size_t size, size_t alignment,
|
|||||||
if (!AlignedAllocVirtualMemory(size, alignment, hint, &reservation))
|
if (!AlignedAllocVirtualMemory(size, alignment, hint, &reservation))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
const Address base =
|
Address result = static_cast<Address>(reservation.address());
|
||||||
::RoundUp(static_cast<Address>(reservation.address()), alignment);
|
|
||||||
if (base + size != reservation.end()) {
|
|
||||||
const Address unused_start = ::RoundUp(base + size, GetCommitPageSize());
|
|
||||||
reservation.ReleasePartial(unused_start);
|
|
||||||
}
|
|
||||||
size_.Increment(reservation.size());
|
size_.Increment(reservation.size());
|
||||||
controller->TakeControl(&reservation);
|
controller->TakeControl(&reservation);
|
||||||
return base;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
Address MemoryAllocator::AllocateAlignedMemory(
|
Address MemoryAllocator::AllocateAlignedMemory(
|
||||||
|
@ -14,14 +14,13 @@ namespace internal {
|
|||||||
|
|
||||||
#define __ masm.
|
#define __ masm.
|
||||||
|
|
||||||
|
|
||||||
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
// Allocate buffer in executable space.
|
byte* buffer =
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
|
||||||
if (buffer == nullptr) return nullptr;
|
if (buffer == nullptr) return nullptr;
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
// esp[1 * kPointerSize]: raw double input
|
// esp[1 * kPointerSize]: raw double input
|
||||||
// esp[0 * kPointerSize]: return address
|
// esp[0 * kPointerSize]: return address
|
||||||
@ -39,8 +38,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,12 +131,12 @@ class LabelConverter {
|
|||||||
|
|
||||||
|
|
||||||
MemMoveFunction CreateMemMoveFunction(Isolate* isolate) {
|
MemMoveFunction CreateMemMoveFunction(Isolate* isolate) {
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
// Allocate buffer in executable space.
|
byte* buffer =
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
|
||||||
if (buffer == nullptr) return nullptr;
|
if (buffer == nullptr) return nullptr;
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
LabelConverter conv(buffer);
|
LabelConverter conv(buffer);
|
||||||
|
|
||||||
@ -451,8 +450,8 @@ MemMoveFunction CreateMemMoveFunction(Isolate* isolate) {
|
|||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
// TODO(jkummerow): It would be nice to register this code creation event
|
// TODO(jkummerow): It would be nice to register this code creation event
|
||||||
// with the PROFILE / GDBJIT system.
|
// with the PROFILE / GDBJIT system.
|
||||||
return FUNCTION_CAST<MemMoveFunction>(buffer);
|
return FUNCTION_CAST<MemMoveFunction>(buffer);
|
||||||
|
@ -13,26 +13,26 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
|
|
||||||
#define __ masm.
|
#define __ masm.
|
||||||
|
|
||||||
#if defined(V8_HOST_ARCH_MIPS)
|
#if defined(V8_HOST_ARCH_MIPS)
|
||||||
|
|
||||||
MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
||||||
MemCopyUint8Function stub) {
|
MemCopyUint8Function stub) {
|
||||||
#if defined(USE_SIMULATOR) || defined(_MIPS_ARCH_MIPS32R6) || \
|
#if defined(USE_SIMULATOR) || defined(_MIPS_ARCH_MIPS32R6) || \
|
||||||
defined(_MIPS_ARCH_MIPS32RX)
|
defined(_MIPS_ARCH_MIPS32RX)
|
||||||
return stub;
|
return stub;
|
||||||
#else
|
#else
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
3 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return stub;
|
if (buffer == nullptr) return nullptr;
|
||||||
|
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
// This code assumes that cache lines are 32 bytes and if the cache line is
|
// This code assumes that cache lines are 32 bytes and if the cache line is
|
||||||
// larger it will not work correctly.
|
// larger it will not work correctly.
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
CodeObjectRequired::kNo);
|
|
||||||
|
|
||||||
{
|
{
|
||||||
Label lastb, unaligned, aligned, chkw,
|
Label lastb, unaligned, aligned, chkw,
|
||||||
loop16w, chk1w, wordCopy_loop, skip_pref, lastbloop,
|
loop16w, chk1w, wordCopy_loop, skip_pref, lastbloop,
|
||||||
@ -544,8 +544,8 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
|||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<MemCopyUint8Function>(buffer);
|
return FUNCTION_CAST<MemCopyUint8Function>(buffer);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
@ -555,12 +555,12 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
#if defined(USE_SIMULATOR)
|
#if defined(USE_SIMULATOR)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
#else
|
#else
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return nullptr;
|
if (buffer == nullptr) return nullptr;
|
||||||
|
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
__ MovFromFloatParameter(f12);
|
__ MovFromFloatParameter(f12);
|
||||||
@ -572,8 +572,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -13,27 +13,26 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
|
|
||||||
#define __ masm.
|
#define __ masm.
|
||||||
|
|
||||||
|
|
||||||
#if defined(V8_HOST_ARCH_MIPS)
|
#if defined(V8_HOST_ARCH_MIPS)
|
||||||
|
|
||||||
MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
||||||
MemCopyUint8Function stub) {
|
MemCopyUint8Function stub) {
|
||||||
#if defined(USE_SIMULATOR)
|
#if defined(USE_SIMULATOR)
|
||||||
return stub;
|
return stub;
|
||||||
#else
|
#else
|
||||||
|
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
3 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return stub;
|
if (buffer == nullptr) return stub;
|
||||||
|
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
// This code assumes that cache lines are 32 bytes and if the cache line is
|
// This code assumes that cache lines are 32 bytes and if the cache line is
|
||||||
// larger it will not work correctly.
|
// larger it will not work correctly.
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
CodeObjectRequired::kNo);
|
|
||||||
|
|
||||||
{
|
{
|
||||||
Label lastb, unaligned, aligned, chkw,
|
Label lastb, unaligned, aligned, chkw,
|
||||||
loop16w, chk1w, wordCopy_loop, skip_pref, lastbloop,
|
loop16w, chk1w, wordCopy_loop, skip_pref, lastbloop,
|
||||||
@ -546,8 +545,8 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
|||||||
masm.GetCode(isolte, &desc);
|
masm.GetCode(isolte, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<MemCopyUint8Function>(buffer);
|
return FUNCTION_CAST<MemCopyUint8Function>(buffer);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
@ -557,12 +556,12 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
#if defined(USE_SIMULATOR)
|
#if defined(USE_SIMULATOR)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
#else
|
#else
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return nullptr;
|
if (buffer == nullptr) return nullptr;
|
||||||
|
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
__ MovFromFloatParameter(f12);
|
__ MovFromFloatParameter(f12);
|
||||||
@ -574,8 +573,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -13,22 +13,21 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
|
|
||||||
#define __ masm.
|
#define __ masm.
|
||||||
|
|
||||||
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||||
#if defined(USE_SIMULATOR)
|
#if defined(USE_SIMULATOR)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
#else
|
#else
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return nullptr;
|
if (buffer == nullptr) return nullptr;
|
||||||
|
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
// Called from C
|
// Called from C
|
||||||
__ function_descriptor();
|
__ function_descriptor();
|
||||||
|
|
||||||
__ MovFromFloatParameter(d1);
|
__ MovFromFloatParameter(d1);
|
||||||
@ -41,8 +40,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
DCHECK(ABI_USES_FUNCTION_DESCRIPTORS ||
|
DCHECK(ABI_USES_FUNCTION_DESCRIPTORS ||
|
||||||
!RelocInfo::RequiresRelocation(isolate, desc));
|
!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -19,12 +19,12 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
#if defined(USE_SIMULATOR)
|
#if defined(USE_SIMULATOR)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
#else
|
#else
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
byte* buffer =
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
if (buffer == nullptr) return nullptr;
|
if (buffer == nullptr) return nullptr;
|
||||||
|
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
__ MovFromFloatParameter(d0);
|
__ MovFromFloatParameter(d0);
|
||||||
@ -37,8 +37,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
DCHECK(ABI_USES_FUNCTION_DESCRIPTORS ||
|
DCHECK(ABI_USES_FUNCTION_DESCRIPTORS ||
|
||||||
!RelocInfo::RequiresRelocation(isolate, desc));
|
!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
@ -13,17 +13,15 @@ namespace internal {
|
|||||||
|
|
||||||
#define __ masm.
|
#define __ masm.
|
||||||
|
|
||||||
|
|
||||||
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||||
size_t actual_size;
|
size_t allocated = 0;
|
||||||
// Allocate buffer in executable space.
|
byte* buffer =
|
||||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
AllocateSystemPage(isolate->heap()->GetRandomMmapAddr(), &allocated);
|
||||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute,
|
|
||||||
isolate->heap()->GetRandomMmapAddr()));
|
|
||||||
if (buffer == nullptr) return nullptr;
|
if (buffer == nullptr) return nullptr;
|
||||||
|
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
CodeObjectRequired::kNo);
|
CodeObjectRequired::kNo);
|
||||||
|
|
||||||
// xmm0: raw double input.
|
// xmm0: raw double input.
|
||||||
// Move double input into registers.
|
// Move double input into registers.
|
||||||
__ Sqrtsd(xmm0, xmm0);
|
__ Sqrtsd(xmm0, xmm0);
|
||||||
@ -33,8 +31,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
|||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
DCHECK(!RelocInfo::RequiresRelocation(isolate, desc));
|
||||||
|
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
base::OS::SetReadAndExecutable(buffer, actual_size);
|
base::OS::SetReadAndExecutable(buffer, allocated);
|
||||||
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,6 +32,7 @@
|
|||||||
|
|
||||||
#include "include/libplatform/libplatform.h"
|
#include "include/libplatform/libplatform.h"
|
||||||
#include "include/v8-platform.h"
|
#include "include/v8-platform.h"
|
||||||
|
#include "src/assembler.h"
|
||||||
#include "src/debug/debug-interface.h"
|
#include "src/debug/debug-interface.h"
|
||||||
#include "src/factory.h"
|
#include "src/factory.h"
|
||||||
#include "src/flags.h"
|
#include "src/flags.h"
|
||||||
@ -567,6 +568,19 @@ static inline void CheckDoubleEquals(double expected, double actual) {
|
|||||||
CHECK_GE(expected, actual - kEpsilon);
|
CHECK_GE(expected, actual - kEpsilon);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline uint8_t* AllocateAssemblerBuffer(
|
||||||
|
size_t* allocated,
|
||||||
|
size_t requested = v8::internal::AssemblerBase::kMinimalBufferSize) {
|
||||||
|
size_t page_size = v8::base::OS::AllocatePageSize();
|
||||||
|
size_t alloc_size = RoundUp(requested, page_size);
|
||||||
|
void* result =
|
||||||
|
v8::base::OS::Allocate(nullptr, alloc_size, page_size,
|
||||||
|
v8::base::OS::MemoryPermission::kReadWriteExecute);
|
||||||
|
CHECK(result);
|
||||||
|
*allocated = alloc_size;
|
||||||
|
return static_cast<uint8_t*>(result);
|
||||||
|
}
|
||||||
|
|
||||||
static v8::debug::DebugDelegate dummy_delegate;
|
static v8::debug::DebugDelegate dummy_delegate;
|
||||||
|
|
||||||
static inline void EnableDebugger(v8::Isolate* isolate) {
|
static inline void EnableDebugger(v8::Isolate* isolate) {
|
||||||
|
@ -174,15 +174,14 @@ static void InitializeVM() {
|
|||||||
|
|
||||||
#else // ifdef USE_SIMULATOR.
|
#else // ifdef USE_SIMULATOR.
|
||||||
// Run the test on real hardware or models.
|
// Run the test on real hardware or models.
|
||||||
#define SETUP_SIZE(buf_size) \
|
#define SETUP_SIZE(buf_size) \
|
||||||
Isolate* isolate = CcTest::i_isolate(); \
|
Isolate* isolate = CcTest::i_isolate(); \
|
||||||
HandleScope scope(isolate); \
|
HandleScope scope(isolate); \
|
||||||
CHECK_NOT_NULL(isolate); \
|
CHECK_NOT_NULL(isolate); \
|
||||||
size_t actual_size; \
|
size_t allocated; \
|
||||||
byte* buf = static_cast<byte*>(v8::base::OS::Allocate( \
|
byte* buffer = AllocateAssemblerBuffer(&allocated); \
|
||||||
buf_size, &actual_size, base::OS::MemoryPermission::kReadWriteExecute)); \
|
MacroAssembler masm(isolate, buf, static_cast<int>(allocated), \
|
||||||
MacroAssembler masm(isolate, buf, static_cast<unsigned>(actual_size), \
|
v8::internal::CodeObjectRequired::kYes); \
|
||||||
v8::internal::CodeObjectRequired::kYes); \
|
|
||||||
RegisterDump core;
|
RegisterDump core;
|
||||||
|
|
||||||
#define RESET() \
|
#define RESET() \
|
||||||
|
@ -67,27 +67,13 @@ static const Register arg1 = rdi;
|
|||||||
static const Register arg2 = rsi;
|
static const Register arg2 = rsi;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define __ assm.
|
#define __ masm.
|
||||||
|
|
||||||
namespace {
|
|
||||||
|
|
||||||
byte* AllocateExecutablePage(int* actual_size) {
|
|
||||||
size_t allocated = 0;
|
|
||||||
void* result =
|
|
||||||
v8::base::OS::Allocate(Assembler::kMinimalBufferSize, &allocated,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute);
|
|
||||||
CHECK(result);
|
|
||||||
*actual_size = static_cast<int>(allocated);
|
|
||||||
return static_cast<byte*>(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
TEST(AssemblerX64ReturnOperation) {
|
TEST(AssemblerX64ReturnOperation) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Assemble a simple function that copies argument 2 and returns it.
|
// Assemble a simple function that copies argument 2 and returns it.
|
||||||
__ movq(rax, arg2);
|
__ movq(rax, arg2);
|
||||||
@ -95,7 +81,7 @@ TEST(AssemblerX64ReturnOperation) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
||||||
CHECK_EQ(2, result);
|
CHECK_EQ(2, result);
|
||||||
@ -104,9 +90,9 @@ TEST(AssemblerX64ReturnOperation) {
|
|||||||
|
|
||||||
TEST(AssemblerX64StackOperations) {
|
TEST(AssemblerX64StackOperations) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Assemble a simple function that copies argument 2 and returns it.
|
// Assemble a simple function that copies argument 2 and returns it.
|
||||||
// We compile without stack frame pointers, so the gdb debugger shows
|
// We compile without stack frame pointers, so the gdb debugger shows
|
||||||
@ -124,7 +110,7 @@ TEST(AssemblerX64StackOperations) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
||||||
CHECK_EQ(2, result);
|
CHECK_EQ(2, result);
|
||||||
@ -133,9 +119,9 @@ TEST(AssemblerX64StackOperations) {
|
|||||||
|
|
||||||
TEST(AssemblerX64ArithmeticOperations) {
|
TEST(AssemblerX64ArithmeticOperations) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Assemble a simple function that adds arguments returning the sum.
|
// Assemble a simple function that adds arguments returning the sum.
|
||||||
__ movq(rax, arg2);
|
__ movq(rax, arg2);
|
||||||
@ -143,7 +129,7 @@ TEST(AssemblerX64ArithmeticOperations) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
||||||
CHECK_EQ(5, result);
|
CHECK_EQ(5, result);
|
||||||
@ -152,9 +138,9 @@ TEST(AssemblerX64ArithmeticOperations) {
|
|||||||
|
|
||||||
TEST(AssemblerX64CmpbOperation) {
|
TEST(AssemblerX64CmpbOperation) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Assemble a function that compare argument byte returing 1 if equal else 0.
|
// Assemble a function that compare argument byte returing 1 if equal else 0.
|
||||||
// On Windows, it compares rcx with rdx which does not require REX prefix;
|
// On Windows, it compares rcx with rdx which does not require REX prefix;
|
||||||
@ -169,7 +155,7 @@ TEST(AssemblerX64CmpbOperation) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F2>(buffer)(0x1002, 0x2002);
|
int result = FUNCTION_CAST<F2>(buffer)(0x1002, 0x2002);
|
||||||
CHECK_EQ(1, result);
|
CHECK_EQ(1, result);
|
||||||
@ -179,14 +165,14 @@ TEST(AssemblerX64CmpbOperation) {
|
|||||||
|
|
||||||
TEST(Regression684407) {
|
TEST(Regression684407) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
Address before = assm.pc();
|
Address before = masm.pc();
|
||||||
__ cmpl(Operand(arg1, 0),
|
__ cmpl(Operand(arg1, 0),
|
||||||
Immediate(0, RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE));
|
Immediate(0, RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE));
|
||||||
Address after = assm.pc();
|
Address after = masm.pc();
|
||||||
size_t instruction_size = static_cast<size_t>(after - before);
|
size_t instruction_size = static_cast<size_t>(after - before);
|
||||||
// Check that the immediate is not encoded as uint8.
|
// Check that the immediate is not encoded as uint8.
|
||||||
CHECK_LT(sizeof(uint32_t), instruction_size);
|
CHECK_LT(sizeof(uint32_t), instruction_size);
|
||||||
@ -194,9 +180,9 @@ TEST(Regression684407) {
|
|||||||
|
|
||||||
TEST(AssemblerX64ImulOperation) {
|
TEST(AssemblerX64ImulOperation) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Assemble a simple function that multiplies arguments returning the high
|
// Assemble a simple function that multiplies arguments returning the high
|
||||||
// word.
|
// word.
|
||||||
@ -206,7 +192,7 @@ TEST(AssemblerX64ImulOperation) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
||||||
CHECK_EQ(0, result);
|
CHECK_EQ(0, result);
|
||||||
@ -219,9 +205,9 @@ TEST(AssemblerX64ImulOperation) {
|
|||||||
TEST(AssemblerX64testbwqOperation) {
|
TEST(AssemblerX64testbwqOperation) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
v8::HandleScope scope(CcTest::isolate());
|
v8::HandleScope scope(CcTest::isolate());
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
__ pushq(rbx);
|
__ pushq(rbx);
|
||||||
__ pushq(rdi);
|
__ pushq(rdi);
|
||||||
@ -375,7 +361,7 @@ TEST(AssemblerX64testbwqOperation) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F2>(buffer)(0, 0);
|
int result = FUNCTION_CAST<F2>(buffer)(0, 0);
|
||||||
CHECK_EQ(1, result);
|
CHECK_EQ(1, result);
|
||||||
@ -383,9 +369,9 @@ TEST(AssemblerX64testbwqOperation) {
|
|||||||
|
|
||||||
TEST(AssemblerX64XchglOperations) {
|
TEST(AssemblerX64XchglOperations) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
__ movq(rax, Operand(arg1, 0));
|
__ movq(rax, Operand(arg1, 0));
|
||||||
__ movq(r11, Operand(arg2, 0));
|
__ movq(r11, Operand(arg2, 0));
|
||||||
@ -395,7 +381,7 @@ TEST(AssemblerX64XchglOperations) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
||||||
uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
|
uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
|
||||||
@ -408,16 +394,16 @@ TEST(AssemblerX64XchglOperations) {
|
|||||||
|
|
||||||
TEST(AssemblerX64OrlOperations) {
|
TEST(AssemblerX64OrlOperations) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
__ movq(rax, Operand(arg2, 0));
|
__ movq(rax, Operand(arg2, 0));
|
||||||
__ orl(Operand(arg1, 0), rax);
|
__ orl(Operand(arg1, 0), rax);
|
||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
||||||
uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
|
uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
|
||||||
@ -429,16 +415,16 @@ TEST(AssemblerX64OrlOperations) {
|
|||||||
|
|
||||||
TEST(AssemblerX64RollOperations) {
|
TEST(AssemblerX64RollOperations) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
__ movq(rax, arg1);
|
__ movq(rax, arg1);
|
||||||
__ roll(rax, Immediate(1));
|
__ roll(rax, Immediate(1));
|
||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
uint64_t src = V8_2PART_UINT64_C(0x10000000, C0000000);
|
uint64_t src = V8_2PART_UINT64_C(0x10000000, C0000000);
|
||||||
uint64_t result = FUNCTION_CAST<F5>(buffer)(src);
|
uint64_t result = FUNCTION_CAST<F5>(buffer)(src);
|
||||||
@ -448,16 +434,16 @@ TEST(AssemblerX64RollOperations) {
|
|||||||
|
|
||||||
TEST(AssemblerX64SublOperations) {
|
TEST(AssemblerX64SublOperations) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
__ movq(rax, Operand(arg2, 0));
|
__ movq(rax, Operand(arg2, 0));
|
||||||
__ subl(Operand(arg1, 0), rax);
|
__ subl(Operand(arg1, 0), rax);
|
||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
||||||
uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
|
uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
|
||||||
@ -469,9 +455,9 @@ TEST(AssemblerX64SublOperations) {
|
|||||||
|
|
||||||
TEST(AssemblerX64TestlOperations) {
|
TEST(AssemblerX64TestlOperations) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Set rax with the ZF flag of the testl instruction.
|
// Set rax with the ZF flag of the testl instruction.
|
||||||
Label done;
|
Label done;
|
||||||
@ -484,7 +470,7 @@ TEST(AssemblerX64TestlOperations) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
||||||
uint64_t right = V8_2PART_UINT64_C(0x30000000, 00000000);
|
uint64_t right = V8_2PART_UINT64_C(0x30000000, 00000000);
|
||||||
@ -495,9 +481,9 @@ TEST(AssemblerX64TestlOperations) {
|
|||||||
TEST(AssemblerX64TestwOperations) {
|
TEST(AssemblerX64TestwOperations) {
|
||||||
typedef uint16_t (*F)(uint16_t * x);
|
typedef uint16_t (*F)(uint16_t * x);
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Set rax with the ZF flag of the testl instruction.
|
// Set rax with the ZF flag of the testl instruction.
|
||||||
Label done;
|
Label done;
|
||||||
@ -509,7 +495,7 @@ TEST(AssemblerX64TestwOperations) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
uint16_t operand = 0x8000;
|
uint16_t operand = 0x8000;
|
||||||
uint16_t result = FUNCTION_CAST<F>(buffer)(&operand);
|
uint16_t result = FUNCTION_CAST<F>(buffer)(&operand);
|
||||||
@ -518,16 +504,16 @@ TEST(AssemblerX64TestwOperations) {
|
|||||||
|
|
||||||
TEST(AssemblerX64XorlOperations) {
|
TEST(AssemblerX64XorlOperations) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
__ movq(rax, Operand(arg2, 0));
|
__ movq(rax, Operand(arg2, 0));
|
||||||
__ xorl(Operand(arg1, 0), rax);
|
__ xorl(Operand(arg1, 0), rax);
|
||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
|
||||||
uint64_t right = V8_2PART_UINT64_C(0x30000000, 60000000);
|
uint64_t right = V8_2PART_UINT64_C(0x30000000, 60000000);
|
||||||
@ -539,9 +525,9 @@ TEST(AssemblerX64XorlOperations) {
|
|||||||
|
|
||||||
TEST(AssemblerX64MemoryOperands) {
|
TEST(AssemblerX64MemoryOperands) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Assemble a simple function that copies argument 2 and returns it.
|
// Assemble a simple function that copies argument 2 and returns it.
|
||||||
__ pushq(rbp);
|
__ pushq(rbp);
|
||||||
@ -561,7 +547,7 @@ TEST(AssemblerX64MemoryOperands) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
||||||
CHECK_EQ(3, result);
|
CHECK_EQ(3, result);
|
||||||
@ -570,9 +556,9 @@ TEST(AssemblerX64MemoryOperands) {
|
|||||||
|
|
||||||
TEST(AssemblerX64ControlFlow) {
|
TEST(AssemblerX64ControlFlow) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Assemble a simple function that copies argument 1 and returns it.
|
// Assemble a simple function that copies argument 1 and returns it.
|
||||||
__ pushq(rbp);
|
__ pushq(rbp);
|
||||||
@ -587,7 +573,7 @@ TEST(AssemblerX64ControlFlow) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
int result = FUNCTION_CAST<F2>(buffer)(3, 2);
|
||||||
CHECK_EQ(3, result);
|
CHECK_EQ(3, result);
|
||||||
@ -596,9 +582,9 @@ TEST(AssemblerX64ControlFlow) {
|
|||||||
|
|
||||||
TEST(AssemblerX64LoopImmediates) {
|
TEST(AssemblerX64LoopImmediates) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
// Assemble two loops using rax as counter, and verify the ending counts.
|
// Assemble two loops using rax as counter, and verify the ending counts.
|
||||||
Label Fail;
|
Label Fail;
|
||||||
@ -635,7 +621,7 @@ TEST(AssemblerX64LoopImmediates) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
// Call the function from C++.
|
// Call the function from C++.
|
||||||
int result = FUNCTION_CAST<F0>(buffer)();
|
int result = FUNCTION_CAST<F0>(buffer)();
|
||||||
CHECK_EQ(1, result);
|
CHECK_EQ(1, result);
|
||||||
@ -689,7 +675,7 @@ TEST(AssemblerX64LabelChaining) {
|
|||||||
// Test chaining of label usages within instructions (issue 1644).
|
// Test chaining of label usages within instructions (issue 1644).
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
v8::HandleScope scope(CcTest::isolate());
|
v8::HandleScope scope(CcTest::isolate());
|
||||||
Assembler assm(CcTest::i_isolate(), nullptr, 0);
|
Assembler masm(CcTest::i_isolate(), nullptr, 0);
|
||||||
|
|
||||||
Label target;
|
Label target;
|
||||||
__ j(equal, &target);
|
__ j(equal, &target);
|
||||||
@ -704,7 +690,7 @@ TEST(AssemblerMultiByteNop) {
|
|||||||
v8::HandleScope scope(CcTest::isolate());
|
v8::HandleScope scope(CcTest::isolate());
|
||||||
byte buffer[1024];
|
byte buffer[1024];
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
Assembler assm(isolate, buffer, sizeof(buffer));
|
Assembler masm(isolate, buffer, sizeof(buffer));
|
||||||
__ pushq(rbx);
|
__ pushq(rbx);
|
||||||
__ pushq(rcx);
|
__ pushq(rcx);
|
||||||
__ pushq(rdx);
|
__ pushq(rdx);
|
||||||
@ -717,9 +703,9 @@ TEST(AssemblerMultiByteNop) {
|
|||||||
__ movq(rdi, Immediate(5));
|
__ movq(rdi, Immediate(5));
|
||||||
__ movq(rsi, Immediate(6));
|
__ movq(rsi, Immediate(6));
|
||||||
for (int i = 0; i < 16; i++) {
|
for (int i = 0; i < 16; i++) {
|
||||||
int before = assm.pc_offset();
|
int before = masm.pc_offset();
|
||||||
__ Nop(i);
|
__ Nop(i);
|
||||||
CHECK_EQ(assm.pc_offset() - before, i);
|
CHECK_EQ(masm.pc_offset() - before, i);
|
||||||
}
|
}
|
||||||
|
|
||||||
Label fail;
|
Label fail;
|
||||||
@ -752,7 +738,7 @@ TEST(AssemblerMultiByteNop) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
|
|
||||||
@ -775,7 +761,7 @@ void DoSSE2(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
|||||||
CHECK_EQ(ELEMENT_COUNT, vec->Length());
|
CHECK_EQ(ELEMENT_COUNT, vec->Length());
|
||||||
|
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
Assembler assm(isolate, buffer, sizeof(buffer));
|
Assembler masm(isolate, buffer, sizeof(buffer));
|
||||||
|
|
||||||
// Remove return address from the stack for fix stack frame alignment.
|
// Remove return address from the stack for fix stack frame alignment.
|
||||||
__ popq(rcx);
|
__ popq(rcx);
|
||||||
@ -808,7 +794,7 @@ void DoSSE2(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
|
|
||||||
@ -864,14 +850,15 @@ TEST(AssemblerX64Extractps) {
|
|||||||
v8::HandleScope scope(CcTest::isolate());
|
v8::HandleScope scope(CcTest::isolate());
|
||||||
byte buffer[256];
|
byte buffer[256];
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
Assembler assm(isolate, buffer, sizeof(buffer));
|
Assembler masm(isolate, buffer, sizeof(buffer));
|
||||||
{ CpuFeatureScope fscope2(&assm, SSE4_1);
|
{
|
||||||
|
CpuFeatureScope fscope2(&masm, SSE4_1);
|
||||||
__ extractps(rax, xmm0, 0x1);
|
__ extractps(rax, xmm0, 0x1);
|
||||||
__ ret(0);
|
__ ret(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -894,7 +881,7 @@ TEST(AssemblerX64SSE) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[256];
|
v8::internal::byte buffer[256];
|
||||||
MacroAssembler assm(isolate, buffer, sizeof(buffer),
|
MacroAssembler masm(isolate, buffer, sizeof(buffer),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
{
|
{
|
||||||
__ shufps(xmm0, xmm0, 0x0); // brocast first argument
|
__ shufps(xmm0, xmm0, 0x0); // brocast first argument
|
||||||
@ -909,7 +896,7 @@ TEST(AssemblerX64SSE) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -930,10 +917,10 @@ TEST(AssemblerX64FMA_sd) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[1024];
|
v8::internal::byte buffer[1024];
|
||||||
MacroAssembler assm(isolate, buffer, sizeof(buffer),
|
MacroAssembler masm(isolate, buffer, sizeof(buffer),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
{
|
{
|
||||||
CpuFeatureScope fscope(&assm, FMA3);
|
CpuFeatureScope fscope(&masm, FMA3);
|
||||||
Label exit;
|
Label exit;
|
||||||
// argument in xmm0, xmm1 and xmm2
|
// argument in xmm0, xmm1 and xmm2
|
||||||
// xmm0 * xmm1 + xmm2
|
// xmm0 * xmm1 + xmm2
|
||||||
@ -1135,7 +1122,7 @@ TEST(AssemblerX64FMA_sd) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -1156,10 +1143,10 @@ TEST(AssemblerX64FMA_ss) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[1024];
|
v8::internal::byte buffer[1024];
|
||||||
MacroAssembler assm(isolate, buffer, sizeof(buffer),
|
MacroAssembler masm(isolate, buffer, sizeof(buffer),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
{
|
{
|
||||||
CpuFeatureScope fscope(&assm, FMA3);
|
CpuFeatureScope fscope(&masm, FMA3);
|
||||||
Label exit;
|
Label exit;
|
||||||
// arguments in xmm0, xmm1 and xmm2
|
// arguments in xmm0, xmm1 and xmm2
|
||||||
// xmm0 * xmm1 + xmm2
|
// xmm0 * xmm1 + xmm2
|
||||||
@ -1361,7 +1348,7 @@ TEST(AssemblerX64FMA_ss) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -1380,7 +1367,7 @@ TEST(AssemblerX64SSE_ss) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[1024];
|
v8::internal::byte buffer[1024];
|
||||||
Assembler assm(isolate, buffer, sizeof(buffer));
|
Assembler masm(isolate, buffer, sizeof(buffer));
|
||||||
{
|
{
|
||||||
Label exit;
|
Label exit;
|
||||||
// arguments in xmm0, xmm1 and xmm2
|
// arguments in xmm0, xmm1 and xmm2
|
||||||
@ -1436,7 +1423,7 @@ TEST(AssemblerX64SSE_ss) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -1458,9 +1445,9 @@ TEST(AssemblerX64AVX_ss) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[1024];
|
v8::internal::byte buffer[1024];
|
||||||
Assembler assm(isolate, buffer, sizeof(buffer));
|
Assembler masm(isolate, buffer, sizeof(buffer));
|
||||||
{
|
{
|
||||||
CpuFeatureScope avx_scope(&assm, AVX);
|
CpuFeatureScope avx_scope(&masm, AVX);
|
||||||
Label exit;
|
Label exit;
|
||||||
// arguments in xmm0, xmm1 and xmm2
|
// arguments in xmm0, xmm1 and xmm2
|
||||||
__ subq(rsp, Immediate(kDoubleSize * 2)); // For memory operand
|
__ subq(rsp, Immediate(kDoubleSize * 2)); // For memory operand
|
||||||
@ -1521,7 +1508,7 @@ TEST(AssemblerX64AVX_ss) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -1543,9 +1530,9 @@ TEST(AssemblerX64AVX_sd) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[1024];
|
v8::internal::byte buffer[1024];
|
||||||
Assembler assm(isolate, buffer, sizeof(buffer));
|
Assembler masm(isolate, buffer, sizeof(buffer));
|
||||||
{
|
{
|
||||||
CpuFeatureScope avx_scope(&assm, AVX);
|
CpuFeatureScope avx_scope(&masm, AVX);
|
||||||
Label exit;
|
Label exit;
|
||||||
// arguments in xmm0, xmm1 and xmm2
|
// arguments in xmm0, xmm1 and xmm2
|
||||||
__ subq(rsp, Immediate(kDoubleSize * 2)); // For memory operand
|
__ subq(rsp, Immediate(kDoubleSize * 2)); // For memory operand
|
||||||
@ -1760,7 +1747,7 @@ TEST(AssemblerX64AVX_sd) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -1782,10 +1769,10 @@ TEST(AssemblerX64BMI1) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[1024];
|
v8::internal::byte buffer[1024];
|
||||||
MacroAssembler assm(isolate, buffer, sizeof(buffer),
|
MacroAssembler masm(isolate, buffer, sizeof(buffer),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
{
|
{
|
||||||
CpuFeatureScope fscope(&assm, BMI1);
|
CpuFeatureScope fscope(&masm, BMI1);
|
||||||
Label exit;
|
Label exit;
|
||||||
|
|
||||||
__ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
|
__ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
|
||||||
@ -1952,7 +1939,7 @@ TEST(AssemblerX64BMI1) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -1972,10 +1959,10 @@ TEST(AssemblerX64LZCNT) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[256];
|
v8::internal::byte buffer[256];
|
||||||
MacroAssembler assm(isolate, buffer, sizeof(buffer),
|
MacroAssembler masm(isolate, buffer, sizeof(buffer),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
{
|
{
|
||||||
CpuFeatureScope fscope(&assm, LZCNT);
|
CpuFeatureScope fscope(&masm, LZCNT);
|
||||||
Label exit;
|
Label exit;
|
||||||
|
|
||||||
__ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
|
__ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
|
||||||
@ -2012,7 +1999,7 @@ TEST(AssemblerX64LZCNT) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -2032,10 +2019,10 @@ TEST(AssemblerX64POPCNT) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[256];
|
v8::internal::byte buffer[256];
|
||||||
MacroAssembler assm(isolate, buffer, sizeof(buffer),
|
MacroAssembler masm(isolate, buffer, sizeof(buffer),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
{
|
{
|
||||||
CpuFeatureScope fscope(&assm, POPCNT);
|
CpuFeatureScope fscope(&masm, POPCNT);
|
||||||
Label exit;
|
Label exit;
|
||||||
|
|
||||||
__ movq(rcx, V8_UINT64_C(0x1111111111111100)); // source operand
|
__ movq(rcx, V8_UINT64_C(0x1111111111111100)); // source operand
|
||||||
@ -2072,7 +2059,7 @@ TEST(AssemblerX64POPCNT) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -2092,10 +2079,10 @@ TEST(AssemblerX64BMI2) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[2048];
|
v8::internal::byte buffer[2048];
|
||||||
MacroAssembler assm(isolate, buffer, sizeof(buffer),
|
MacroAssembler masm(isolate, buffer, sizeof(buffer),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
{
|
{
|
||||||
CpuFeatureScope fscope(&assm, BMI2);
|
CpuFeatureScope fscope(&masm, BMI2);
|
||||||
Label exit;
|
Label exit;
|
||||||
__ pushq(rbx); // save rbx
|
__ pushq(rbx); // save rbx
|
||||||
__ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
|
__ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
|
||||||
@ -2335,7 +2322,7 @@ TEST(AssemblerX64BMI2) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -2353,7 +2340,7 @@ TEST(AssemblerX64JumpTables1) {
|
|||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
MacroAssembler assm(isolate, nullptr, 0,
|
MacroAssembler masm(isolate, nullptr, 0,
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
const int kNumCases = 512;
|
const int kNumCases = 512;
|
||||||
@ -2380,7 +2367,7 @@ TEST(AssemblerX64JumpTables1) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -2401,7 +2388,7 @@ TEST(AssemblerX64JumpTables2) {
|
|||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
MacroAssembler assm(isolate, nullptr, 0,
|
MacroAssembler masm(isolate, nullptr, 0,
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
const int kNumCases = 512;
|
const int kNumCases = 512;
|
||||||
@ -2429,7 +2416,7 @@ TEST(AssemblerX64JumpTables2) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
@ -2446,9 +2433,9 @@ TEST(AssemblerX64JumpTables2) {
|
|||||||
|
|
||||||
TEST(AssemblerX64PslldWithXmm15) {
|
TEST(AssemblerX64PslldWithXmm15) {
|
||||||
CcTest::InitializeVM();
|
CcTest::InitializeVM();
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
Assembler masm(CcTest::i_isolate(), buffer, static_cast<int>(allocated));
|
||||||
|
|
||||||
__ movq(xmm15, arg1);
|
__ movq(xmm15, arg1);
|
||||||
__ pslld(xmm15, 1);
|
__ pslld(xmm15, 1);
|
||||||
@ -2456,7 +2443,7 @@ TEST(AssemblerX64PslldWithXmm15) {
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(CcTest::i_isolate(), &desc);
|
masm.GetCode(CcTest::i_isolate(), &desc);
|
||||||
uint64_t result = FUNCTION_CAST<F5>(buffer)(V8_UINT64_C(0x1122334455667788));
|
uint64_t result = FUNCTION_CAST<F5>(buffer)(V8_UINT64_C(0x1122334455667788));
|
||||||
CHECK_EQ(V8_UINT64_C(0x22446688aaccef10), result);
|
CHECK_EQ(V8_UINT64_C(0x22446688aaccef10), result);
|
||||||
}
|
}
|
||||||
@ -2469,10 +2456,10 @@ TEST(AssemblerX64vmovups) {
|
|||||||
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
|
||||||
HandleScope scope(isolate);
|
HandleScope scope(isolate);
|
||||||
v8::internal::byte buffer[256];
|
v8::internal::byte buffer[256];
|
||||||
MacroAssembler assm(isolate, buffer, sizeof(buffer),
|
MacroAssembler masm(isolate, buffer, sizeof(buffer),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
{
|
{
|
||||||
CpuFeatureScope avx_scope(&assm, AVX);
|
CpuFeatureScope avx_scope(&masm, AVX);
|
||||||
__ shufps(xmm0, xmm0, 0x0); // brocast first argument
|
__ shufps(xmm0, xmm0, 0x0); // brocast first argument
|
||||||
__ shufps(xmm1, xmm1, 0x0); // brocast second argument
|
__ shufps(xmm1, xmm1, 0x0); // brocast second argument
|
||||||
// copy xmm1 to xmm0 through the stack to test the "vmovups reg, mem".
|
// copy xmm1 to xmm0 through the stack to test the "vmovups reg, mem".
|
||||||
@ -2485,7 +2472,7 @@ TEST(AssemblerX64vmovups) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Handle<Code> code =
|
Handle<Code> code =
|
||||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||||
#ifdef OBJECT_PRINT
|
#ifdef OBJECT_PRINT
|
||||||
|
@ -45,15 +45,13 @@ namespace internal {
|
|||||||
|
|
||||||
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||||
Register destination_reg) {
|
Register destination_reg) {
|
||||||
// Allocate an executable page of memory.
|
|
||||||
size_t actual_size;
|
|
||||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
|
||||||
Assembler::kMinimalBufferSize, &actual_size,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
|
||||||
CHECK(buffer);
|
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
|
size_t allocated;
|
||||||
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
DoubleToIStub stub(isolate, destination_reg);
|
DoubleToIStub stub(isolate, destination_reg);
|
||||||
|
|
||||||
byte* start = stub.GetCode()->instruction_start();
|
byte* start = stub.GetCode()->instruction_start();
|
||||||
@ -117,7 +115,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
|||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
return (reinterpret_cast<ConvertDToIFunc>(
|
return (reinterpret_cast<ConvertDToIFunc>(
|
||||||
reinterpret_cast<intptr_t>(buffer)));
|
reinterpret_cast<intptr_t>(buffer)));
|
||||||
}
|
}
|
||||||
|
@ -45,15 +45,14 @@ namespace internal {
|
|||||||
|
|
||||||
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||||
Register destination_reg) {
|
Register destination_reg) {
|
||||||
// Allocate an executable page of memory.
|
|
||||||
size_t actual_size = 4 * Assembler::kMinimalBufferSize;
|
|
||||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
|
||||||
actual_size, &actual_size,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
|
||||||
CHECK(buffer);
|
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
|
size_t allocated;
|
||||||
|
byte* buffer =
|
||||||
|
AllocateAssemblerBuffer(&allocated, 4 * Assembler::kMinimalBufferSize);
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
DoubleToIStub stub(isolate, destination_reg);
|
DoubleToIStub stub(isolate, destination_reg);
|
||||||
|
|
||||||
byte* start = stub.GetCode()->instruction_start();
|
byte* start = stub.GetCode()->instruction_start();
|
||||||
@ -116,7 +115,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
|||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
return (reinterpret_cast<ConvertDToIFunc>(
|
return (reinterpret_cast<ConvertDToIFunc>(
|
||||||
reinterpret_cast<intptr_t>(buffer)));
|
reinterpret_cast<intptr_t>(buffer)));
|
||||||
}
|
}
|
||||||
|
@ -42,19 +42,17 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
#define __ assm.
|
#define __ masm.
|
||||||
|
|
||||||
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||||
Register destination_reg) {
|
Register destination_reg) {
|
||||||
// Allocate an executable page of memory.
|
|
||||||
size_t actual_size;
|
|
||||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
|
||||||
Assembler::kMinimalBufferSize, &actual_size,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
|
||||||
CHECK(buffer);
|
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
|
size_t allocated;
|
||||||
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
DoubleToIStub stub(isolate, destination_reg);
|
DoubleToIStub stub(isolate, destination_reg);
|
||||||
byte* start = stub.GetCode()->instruction_start();
|
byte* start = stub.GetCode()->instruction_start();
|
||||||
|
|
||||||
@ -109,7 +107,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
|||||||
__ ret(kDoubleSize);
|
__ ret(kDoubleSize);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
return reinterpret_cast<ConvertDToIFunc>(
|
return reinterpret_cast<ConvertDToIFunc>(
|
||||||
reinterpret_cast<intptr_t>(buffer));
|
reinterpret_cast<intptr_t>(buffer));
|
||||||
}
|
}
|
||||||
|
@ -47,15 +47,13 @@ namespace internal {
|
|||||||
|
|
||||||
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||||
Register destination_reg) {
|
Register destination_reg) {
|
||||||
// Allocate an executable page of memory.
|
|
||||||
size_t actual_size;
|
|
||||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
|
||||||
Assembler::kMinimalBufferSize, &actual_size,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
|
||||||
CHECK(buffer);
|
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
|
size_t allocated;
|
||||||
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
DoubleToIStub stub(isolate, destination_reg);
|
DoubleToIStub stub(isolate, destination_reg);
|
||||||
|
|
||||||
byte* start = stub.GetCode()->instruction_start();
|
byte* start = stub.GetCode()->instruction_start();
|
||||||
@ -130,7 +128,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
|||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
return (reinterpret_cast<ConvertDToIFunc>(
|
return (reinterpret_cast<ConvertDToIFunc>(
|
||||||
reinterpret_cast<intptr_t>(buffer)));
|
reinterpret_cast<intptr_t>(buffer)));
|
||||||
}
|
}
|
||||||
|
@ -47,15 +47,13 @@ namespace internal {
|
|||||||
|
|
||||||
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||||
Register destination_reg) {
|
Register destination_reg) {
|
||||||
// Allocate an executable page of memory.
|
|
||||||
size_t actual_size;
|
|
||||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
|
||||||
Assembler::kMinimalBufferSize, &actual_size,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
|
||||||
CHECK(buffer);
|
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
|
size_t allocated;
|
||||||
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
DoubleToIStub stub(isolate, destination_reg);
|
DoubleToIStub stub(isolate, destination_reg);
|
||||||
|
|
||||||
byte* start = stub.GetCode()->instruction_start();
|
byte* start = stub.GetCode()->instruction_start();
|
||||||
@ -127,7 +125,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
|||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
masm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
Assembler::FlushICache(isolate, buffer, actual_size);
|
Assembler::FlushICache(isolate, buffer, allocated);
|
||||||
return (reinterpret_cast<ConvertDToIFunc>(
|
return (reinterpret_cast<ConvertDToIFunc>(
|
||||||
reinterpret_cast<intptr_t>(buffer)));
|
reinterpret_cast<intptr_t>(buffer)));
|
||||||
}
|
}
|
||||||
|
@ -42,19 +42,17 @@ namespace v8 {
|
|||||||
namespace internal {
|
namespace internal {
|
||||||
namespace test_code_stubs_x64 {
|
namespace test_code_stubs_x64 {
|
||||||
|
|
||||||
#define __ assm.
|
#define __ masm.
|
||||||
|
|
||||||
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||||
Register destination_reg) {
|
Register destination_reg) {
|
||||||
// Allocate an executable page of memory.
|
|
||||||
size_t actual_size;
|
|
||||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
|
||||||
Assembler::kMinimalBufferSize, &actual_size,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
|
||||||
CHECK(buffer);
|
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size),
|
|
||||||
|
size_t allocated;
|
||||||
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
|
MacroAssembler masm(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
DoubleToIStub stub(isolate, destination_reg);
|
DoubleToIStub stub(isolate, destination_reg);
|
||||||
byte* start = stub.GetCode()->instruction_start();
|
byte* start = stub.GetCode()->instruction_start();
|
||||||
|
|
||||||
@ -107,7 +105,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
|||||||
__ ret(0);
|
__ ret(0);
|
||||||
|
|
||||||
CodeDesc desc;
|
CodeDesc desc;
|
||||||
assm.GetCode(isolate, &desc);
|
masm.GetCode(isolate, &desc);
|
||||||
return reinterpret_cast<ConvertDToIFunc>(
|
return reinterpret_cast<ConvertDToIFunc>(
|
||||||
reinterpret_cast<intptr_t>(buffer));
|
reinterpret_cast<intptr_t>(buffer));
|
||||||
}
|
}
|
||||||
|
@ -45,24 +45,16 @@ typedef void* (*F)(int x, int y, int p2, int p3, int p4);
|
|||||||
typedef Object* (*F3)(void* p0, int p1, int p2, int p3, int p4);
|
typedef Object* (*F3)(void* p0, int p1, int p2, int p3, int p4);
|
||||||
typedef int (*F5)(void*, void*, void*, void*, void*);
|
typedef int (*F5)(void*, void*, void*, void*, void*);
|
||||||
|
|
||||||
byte* AllocateExecutablePage(int* actual_size) {
|
|
||||||
size_t allocated = 0;
|
|
||||||
void* result =
|
|
||||||
v8::base::OS::Allocate(Assembler::kMinimalBufferSize, &allocated,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute);
|
|
||||||
CHECK(result);
|
|
||||||
*actual_size = static_cast<int>(allocated);
|
|
||||||
return static_cast<byte*>(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST(LoadAndStoreWithRepresentation) {
|
TEST(LoadAndStoreWithRepresentation) {
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
size_t allocated;
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||||
|
|
||||||
__ sub(sp, sp, Operand(1 * kPointerSize));
|
__ sub(sp, sp, Operand(1 * kPointerSize));
|
||||||
Label exit;
|
Label exit;
|
||||||
|
|
||||||
@ -146,9 +138,10 @@ TEST(ExtractLane) {
|
|||||||
|
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
size_t allocated;
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||||
|
|
||||||
@ -286,9 +279,10 @@ TEST(ReplaceLane) {
|
|||||||
|
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
size_t allocated;
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||||
|
|
||||||
|
@ -52,16 +52,6 @@ typedef int (*F0)();
|
|||||||
|
|
||||||
#define __ masm->
|
#define __ masm->
|
||||||
|
|
||||||
byte* AllocateExecutablePage(int* actual_size) {
|
|
||||||
size_t allocated = 0;
|
|
||||||
void* result =
|
|
||||||
v8::base::OS::Allocate(Assembler::kMinimalBufferSize, &allocated,
|
|
||||||
v8::base::OS::MemoryPermission::kReadWriteExecute);
|
|
||||||
CHECK(result);
|
|
||||||
*actual_size = static_cast<int>(allocated);
|
|
||||||
return static_cast<byte*>(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
static void EntryCode(MacroAssembler* masm) {
|
static void EntryCode(MacroAssembler* masm) {
|
||||||
// Smi constant register is callee save.
|
// Smi constant register is callee save.
|
||||||
__ pushq(kRootRegister);
|
__ pushq(kRootRegister);
|
||||||
@ -109,9 +99,9 @@ static void TestMoveSmi(MacroAssembler* masm, Label* exit, int id, Smi* value) {
|
|||||||
TEST(SmiMove) {
|
TEST(SmiMove) {
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||||
EntryCode(masm);
|
EntryCode(masm);
|
||||||
@ -192,9 +182,9 @@ void TestSmiCompare(MacroAssembler* masm, Label* exit, int id, int x, int y) {
|
|||||||
TEST(SmiCompare) {
|
TEST(SmiCompare) {
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
MacroAssembler* masm = &assembler;
|
MacroAssembler* masm = &assembler;
|
||||||
@ -238,9 +228,9 @@ TEST(SmiCompare) {
|
|||||||
TEST(Integer32ToSmi) {
|
TEST(Integer32ToSmi) {
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
MacroAssembler* masm = &assembler;
|
MacroAssembler* masm = &assembler;
|
||||||
@ -335,9 +325,9 @@ TEST(Integer32ToSmi) {
|
|||||||
TEST(SmiCheck) {
|
TEST(SmiCheck) {
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
MacroAssembler* masm = &assembler;
|
MacroAssembler* masm = &assembler;
|
||||||
@ -432,9 +422,9 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) {
|
|||||||
TEST(SmiIndex) {
|
TEST(SmiIndex) {
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
MacroAssembler* masm = &assembler;
|
MacroAssembler* masm = &assembler;
|
||||||
@ -465,9 +455,9 @@ TEST(OperandOffset) {
|
|||||||
|
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
MacroAssembler* masm = &assembler;
|
MacroAssembler* masm = &assembler;
|
||||||
@ -813,9 +803,9 @@ TEST(OperandOffset) {
|
|||||||
TEST(LoadAndStoreWithRepresentation) {
|
TEST(LoadAndStoreWithRepresentation) {
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||||
@ -1080,9 +1070,9 @@ void TestFloat64x2Neg(MacroAssembler* masm, Label* exit, double x, double y) {
|
|||||||
TEST(SIMDMacros) {
|
TEST(SIMDMacros) {
|
||||||
Isolate* isolate = CcTest::i_isolate();
|
Isolate* isolate = CcTest::i_isolate();
|
||||||
HandleScope handles(isolate);
|
HandleScope handles(isolate);
|
||||||
int actual_size;
|
size_t allocated;
|
||||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
byte* buffer = AllocateAssemblerBuffer(&allocated);
|
||||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
MacroAssembler assembler(isolate, buffer, static_cast<int>(allocated),
|
||||||
v8::internal::CodeObjectRequired::kYes);
|
v8::internal::CodeObjectRequired::kYes);
|
||||||
|
|
||||||
MacroAssembler* masm = &assembler;
|
MacroAssembler* masm = &assembler;
|
||||||
|
@ -13,18 +13,18 @@ namespace v8 {
|
|||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
TEST(OSReserveMemory) {
|
TEST(OSReserveMemory) {
|
||||||
size_t mem_size = 0;
|
size_t page_size = OS::AllocatePageSize();
|
||||||
void* mem_addr = OS::ReserveAlignedRegion(1 * MB, OS::AllocatePageSize(),
|
void* mem_addr = OS::Allocate(OS::GetRandomMmapAddr(), 1 * MB, page_size,
|
||||||
OS::GetRandomMmapAddr(), &mem_size);
|
OS::MemoryPermission::kReadWrite);
|
||||||
CHECK_NE(0, mem_size);
|
CHECK_NE(0, page_size);
|
||||||
CHECK_NOT_NULL(mem_addr);
|
CHECK_NOT_NULL(mem_addr);
|
||||||
size_t block_size = 4 * KB;
|
size_t commit_size = OS::CommitPageSize();
|
||||||
CHECK(OS::CommitRegion(mem_addr, block_size, false));
|
CHECK(OS::CommitRegion(mem_addr, commit_size, false));
|
||||||
// Check whether we can write to memory.
|
// Check whether we can write to memory.
|
||||||
int* addr = static_cast<int*>(mem_addr);
|
int* addr = static_cast<int*>(mem_addr);
|
||||||
addr[KB - 1] = 2;
|
addr[KB - 1] = 2;
|
||||||
CHECK(OS::UncommitRegion(mem_addr, block_size));
|
CHECK(OS::UncommitRegion(mem_addr, commit_size));
|
||||||
OS::ReleaseRegion(mem_addr, mem_size);
|
OS::ReleaseRegion(mem_addr, page_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef V8_CC_GNU
|
#ifdef V8_CC_GNU
|
||||||
|
@ -179,13 +179,12 @@ class MemoryAllocationPermissionsTest : public ::testing::Test {
|
|||||||
|
|
||||||
void TestPermissions(OS::MemoryPermission permission, bool can_read,
|
void TestPermissions(OS::MemoryPermission permission, bool can_read,
|
||||||
bool can_write) {
|
bool can_write) {
|
||||||
const size_t allocation_size = OS::CommitPageSize();
|
const size_t page_size = OS::AllocatePageSize();
|
||||||
size_t actual = 0;
|
int* buffer = static_cast<int*>(
|
||||||
int* buffer =
|
OS::Allocate(nullptr, page_size, page_size, permission));
|
||||||
static_cast<int*>(OS::Allocate(allocation_size, &actual, permission));
|
|
||||||
ProbeMemory(buffer, MemoryAction::kRead, can_read);
|
ProbeMemory(buffer, MemoryAction::kRead, can_read);
|
||||||
ProbeMemory(buffer, MemoryAction::kWrite, can_write);
|
ProbeMemory(buffer, MemoryAction::kWrite, can_write);
|
||||||
OS::Free(buffer, actual);
|
OS::Free(buffer, page_size);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user