[Memory] Clean up base OS memory abstractions.
- Sanitize Windows page size / alignment code. - Reorder some methods to match header file. - Rename AllocateAlignment to AllocatePageSize to be consistent with CommitPageSize. - Eliminate OS::Allocate overload with is_executable argument. - Eliminate base::OS::AllocateGuarded - it's not implemented. Bug: chromium:756050 Change-Id: I046bb019cddde0c0063d617adc2c94a23989d9d1 Reviewed-on: https://chromium-review.googlesource.com/742684 Commit-Queue: Bill Budge <bbudge@chromium.org> Reviewed-by: Bill Budge <bbudge@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#49114}
This commit is contained in:
parent
91ec9872fb
commit
6346cc53ad
@ -24,8 +24,8 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
||||
return stub;
|
||||
#else
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return stub;
|
||||
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
@ -183,8 +183,8 @@ MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function(
|
||||
return stub;
|
||||
#else
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return stub;
|
||||
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
@ -272,8 +272,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||
return nullptr;
|
||||
#else
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return nullptr;
|
||||
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -89,9 +89,9 @@ void* OS::ReserveRegion(size_t size, void* hint) {
|
||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
||||
size_t* allocated) {
|
||||
hint = AlignedAddress(hint, alignment);
|
||||
DCHECK_EQ(alignment % OS::AllocateAlignment(), 0);
|
||||
DCHECK_EQ(alignment % OS::AllocatePageSize(), 0);
|
||||
size_t request_size =
|
||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocateAlignment()));
|
||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocatePageSize()));
|
||||
void* address = ReserveRegion(request_size, hint);
|
||||
if (address == nullptr) {
|
||||
*allocated = 0;
|
||||
|
@ -46,10 +46,10 @@ void* OS::ReserveRegion(size_t size, void* hint) {
|
||||
// static
|
||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
||||
size_t* allocated) {
|
||||
DCHECK_EQ(alignment % OS::AllocateAlignment(), 0);
|
||||
DCHECK_EQ(alignment % OS::AllocatePageSize(), 0);
|
||||
hint = AlignedAddress(hint, alignment);
|
||||
size_t request_size =
|
||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocateAlignment()));
|
||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocatePageSize()));
|
||||
|
||||
zx_handle_t vmo;
|
||||
if (zx_vmo_create(request_size, 0, &vmo) != ZX_OK) {
|
||||
@ -82,7 +82,7 @@ void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
||||
request_size -= prefix_size;
|
||||
}
|
||||
|
||||
size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
|
||||
size_t aligned_size = RoundUp(size, OS::AllocatePageSize());
|
||||
DCHECK_LE(aligned_size, request_size);
|
||||
|
||||
if (aligned_size != request_size) {
|
||||
|
@ -71,6 +71,9 @@ bool g_hard_abort = false;
|
||||
|
||||
const char* g_gc_fake_mmap = nullptr;
|
||||
|
||||
static LazyInstance<RandomNumberGenerator>::type
|
||||
platform_random_number_generator = LAZY_INSTANCE_INITIALIZER;
|
||||
|
||||
#if !V8_OS_FUCHSIA
|
||||
#if V8_OS_MACOSX
|
||||
// kMmapFd is used to pass vm_alloc flags to tag the region with the user
|
||||
@ -102,6 +105,15 @@ int GetProtectionFromMemoryPermission(OS::MemoryPermission access) {
|
||||
#define MAP_ANONYMOUS MAP_ANON
|
||||
#endif
|
||||
|
||||
void OS::Initialize(int64_t random_seed, bool hard_abort,
|
||||
const char* const gc_fake_mmap) {
|
||||
if (random_seed) {
|
||||
platform_random_number_generator.Pointer()->SetSeed(random_seed);
|
||||
}
|
||||
g_hard_abort = hard_abort;
|
||||
g_gc_fake_mmap = gc_fake_mmap;
|
||||
}
|
||||
|
||||
int OS::ActivationFrameAlignment() {
|
||||
#if V8_TARGET_ARCH_ARM
|
||||
// On EABI ARM targets this is required for fp correctness in the
|
||||
@ -121,196 +133,15 @@ int OS::ActivationFrameAlignment() {
|
||||
#endif
|
||||
}
|
||||
|
||||
intptr_t OS::CommitPageSize() {
|
||||
static intptr_t page_size = getpagesize();
|
||||
size_t OS::AllocatePageSize() {
|
||||
return static_cast<size_t>(sysconf(_SC_PAGESIZE));
|
||||
}
|
||||
|
||||
size_t OS::CommitPageSize() {
|
||||
static size_t page_size = getpagesize();
|
||||
return page_size;
|
||||
}
|
||||
|
||||
void* OS::Allocate(const size_t requested, size_t* allocated,
|
||||
bool is_executable, void* hint) {
|
||||
return OS::Allocate(requested, allocated,
|
||||
is_executable ? OS::MemoryPermission::kReadWriteExecute
|
||||
: OS::MemoryPermission::kReadWrite,
|
||||
hint);
|
||||
}
|
||||
|
||||
// TODO(bbudge) Move Cygwin and Fuschia stuff into platform-specific files.
|
||||
#if !V8_OS_FUCHSIA
|
||||
void* OS::Allocate(const size_t requested, size_t* allocated,
|
||||
OS::MemoryPermission access, void* hint) {
|
||||
const size_t msize = RoundUp(requested, AllocateAlignment());
|
||||
int prot = GetProtectionFromMemoryPermission(access);
|
||||
void* mbase = mmap(hint, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, kMmapFd,
|
||||
kMmapFdOffset);
|
||||
if (mbase == MAP_FAILED) return nullptr;
|
||||
*allocated = msize;
|
||||
return mbase;
|
||||
}
|
||||
#endif // !V8_OS_FUCHSIA
|
||||
|
||||
void OS::Free(void* address, const size_t size) {
|
||||
// TODO(1240712): munmap has a return value which is ignored here.
|
||||
int result = munmap(address, size);
|
||||
USE(result);
|
||||
DCHECK_EQ(0, result);
|
||||
}
|
||||
|
||||
void OS::SetReadAndExecutable(void* address, const size_t size) {
|
||||
#if V8_OS_CYGWIN
|
||||
DWORD old_protect;
|
||||
CHECK_NOT_NULL(
|
||||
VirtualProtect(address, size, PAGE_EXECUTE_READ, &old_protect));
|
||||
#else
|
||||
CHECK_EQ(0, mprotect(address, size, PROT_READ | PROT_EXEC));
|
||||
#endif
|
||||
}
|
||||
|
||||
// Create guard pages.
|
||||
#if !V8_OS_FUCHSIA
|
||||
void OS::Guard(void* address, const size_t size) {
|
||||
#if V8_OS_CYGWIN
|
||||
DWORD oldprotect;
|
||||
VirtualProtect(address, size, PAGE_NOACCESS, &oldprotect);
|
||||
#else
|
||||
mprotect(address, size, PROT_NONE);
|
||||
#endif
|
||||
}
|
||||
#endif // !V8_OS_FUCHSIA
|
||||
|
||||
// Make a region of memory readable and writable.
|
||||
void OS::SetReadAndWritable(void* address, const size_t size, bool commit) {
|
||||
#if V8_OS_CYGWIN
|
||||
DWORD oldprotect;
|
||||
CHECK_NOT_NULL(VirtualProtect(address, size, PAGE_READWRITE, &oldprotect));
|
||||
#else
|
||||
CHECK_EQ(0, mprotect(address, size, PROT_READ | PROT_WRITE));
|
||||
#endif
|
||||
}
|
||||
|
||||
#if !V8_OS_CYGWIN && !V8_OS_FUCHSIA
|
||||
// static
|
||||
void* OS::ReserveRegion(size_t size, void* hint) {
|
||||
int map_flags = MAP_PRIVATE | MAP_ANONYMOUS;
|
||||
#if !V8_OS_AIX && !V8_OS_FREEBSD && !V8_OS_QNX
|
||||
map_flags |= MAP_NORESERVE;
|
||||
#endif
|
||||
#if V8_OS_QNX
|
||||
map_flags |= MAP_LAZY;
|
||||
#endif // V8_OS_QNX
|
||||
void* result = mmap(hint, size, PROT_NONE, map_flags, kMmapFd, kMmapFdOffset);
|
||||
if (result == MAP_FAILED) return nullptr;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// static
|
||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
||||
size_t* allocated) {
|
||||
DCHECK_EQ(0, alignment % OS::AllocateAlignment());
|
||||
hint = AlignedAddress(hint, alignment);
|
||||
size_t request_size =
|
||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocateAlignment()));
|
||||
void* result = ReserveRegion(request_size, hint);
|
||||
if (result == nullptr) {
|
||||
*allocated = 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
uint8_t* base = static_cast<uint8_t*>(result);
|
||||
uint8_t* aligned_base = RoundUp(base, alignment);
|
||||
DCHECK_LE(base, aligned_base);
|
||||
|
||||
// Unmap extra memory reserved before and after the desired block.
|
||||
if (aligned_base != base) {
|
||||
size_t prefix_size = static_cast<size_t>(aligned_base - base);
|
||||
OS::Free(base, prefix_size);
|
||||
request_size -= prefix_size;
|
||||
}
|
||||
|
||||
size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
|
||||
DCHECK_LE(aligned_size, request_size);
|
||||
|
||||
if (aligned_size != request_size) {
|
||||
size_t suffix_size = request_size - aligned_size;
|
||||
OS::Free(aligned_base + aligned_size, suffix_size);
|
||||
request_size -= suffix_size;
|
||||
}
|
||||
|
||||
DCHECK(aligned_size == request_size);
|
||||
|
||||
*allocated = aligned_size;
|
||||
return static_cast<void*>(aligned_base);
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::CommitRegion(void* address, size_t size, bool is_executable) {
|
||||
int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
|
||||
#if !V8_OS_AIX
|
||||
if (MAP_FAILED == mmap(address, size, prot,
|
||||
MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, kMmapFd,
|
||||
kMmapFdOffset)) {
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
if (mprotect(address, size, prot) == -1) return false;
|
||||
#endif // !V8_OS_AIX
|
||||
return true;
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::UncommitRegion(void* address, size_t size) {
|
||||
#if !V8_OS_AIX
|
||||
int map_flags = MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED;
|
||||
#if !V8_OS_FREEBSD && !V8_OS_QNX
|
||||
map_flags |= MAP_NORESERVE;
|
||||
#endif // !V8_OS_FREEBSD && !V8_OS_QNX
|
||||
#if V8_OS_QNX
|
||||
map_flags |= MAP_LAZY;
|
||||
#endif // V8_OS_QNX
|
||||
return mmap(address, size, PROT_NONE, map_flags, kMmapFd, kMmapFdOffset) !=
|
||||
MAP_FAILED;
|
||||
#else // V8_OS_AIX
|
||||
return mprotect(address, size, PROT_NONE) != -1;
|
||||
#endif // V8_OS_AIX
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::ReleaseRegion(void* address, size_t size) {
|
||||
return munmap(address, size) == 0;
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::ReleasePartialRegion(void* address, size_t size) {
|
||||
return munmap(address, size) == 0;
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::HasLazyCommits() {
|
||||
#if V8_OS_AIX || V8_OS_LINUX || V8_OS_MACOSX
|
||||
return true;
|
||||
#else
|
||||
// TODO(bbudge) Return true for all POSIX platforms.
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
#endif // !V8_OS_CYGWIN && !V8_OS_FUCHSIA
|
||||
|
||||
static LazyInstance<RandomNumberGenerator>::type
|
||||
platform_random_number_generator = LAZY_INSTANCE_INITIALIZER;
|
||||
|
||||
void OS::Initialize(int64_t random_seed, bool hard_abort,
|
||||
const char* const gc_fake_mmap) {
|
||||
if (random_seed) {
|
||||
platform_random_number_generator.Pointer()->SetSeed(random_seed);
|
||||
}
|
||||
g_hard_abort = hard_abort;
|
||||
g_gc_fake_mmap = gc_fake_mmap;
|
||||
}
|
||||
|
||||
const char* OS::GetGCFakeMMapFile() {
|
||||
return g_gc_fake_mmap;
|
||||
}
|
||||
|
||||
void* OS::GetRandomMmapAddr() {
|
||||
#if defined(ADDRESS_SANITIZER) || defined(MEMORY_SANITIZER) || \
|
||||
defined(THREAD_SANITIZER)
|
||||
@ -376,8 +207,169 @@ void* OS::GetRandomMmapAddr() {
|
||||
return reinterpret_cast<void*>(raw_addr);
|
||||
}
|
||||
|
||||
size_t OS::AllocateAlignment() {
|
||||
return static_cast<size_t>(sysconf(_SC_PAGESIZE));
|
||||
// TODO(bbudge) Move Cygwin and Fuschia stuff into platform-specific files.
|
||||
#if !V8_OS_FUCHSIA
|
||||
void* OS::Allocate(const size_t requested, size_t* allocated,
|
||||
OS::MemoryPermission access, void* hint) {
|
||||
const size_t msize = RoundUp(requested, AllocatePageSize());
|
||||
int prot = GetProtectionFromMemoryPermission(access);
|
||||
void* mbase = mmap(hint, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, kMmapFd,
|
||||
kMmapFdOffset);
|
||||
if (mbase == MAP_FAILED) return nullptr;
|
||||
*allocated = msize;
|
||||
return mbase;
|
||||
}
|
||||
#endif // !V8_OS_FUCHSIA
|
||||
|
||||
void OS::Free(void* address, const size_t size) {
|
||||
// TODO(1240712): munmap has a return value which is ignored here.
|
||||
int result = munmap(address, size);
|
||||
USE(result);
|
||||
DCHECK_EQ(0, result);
|
||||
}
|
||||
|
||||
void OS::SetReadAndExecutable(void* address, const size_t size) {
|
||||
#if V8_OS_CYGWIN
|
||||
DWORD old_protect;
|
||||
CHECK_NOT_NULL(
|
||||
VirtualProtect(address, size, PAGE_EXECUTE_READ, &old_protect));
|
||||
#else
|
||||
CHECK_EQ(0, mprotect(address, size, PROT_READ | PROT_EXEC));
|
||||
#endif
|
||||
}
|
||||
|
||||
// Create guard pages.
|
||||
#if !V8_OS_FUCHSIA
|
||||
void OS::Guard(void* address, const size_t size) {
|
||||
#if V8_OS_CYGWIN
|
||||
DWORD oldprotect;
|
||||
VirtualProtect(address, size, PAGE_NOACCESS, &oldprotect);
|
||||
#else
|
||||
mprotect(address, size, PROT_NONE);
|
||||
#endif
|
||||
}
|
||||
#endif // !V8_OS_FUCHSIA
|
||||
|
||||
// Make a region of memory readable and writable.
|
||||
void OS::SetReadAndWritable(void* address, const size_t size, bool commit) {
|
||||
#if V8_OS_CYGWIN
|
||||
DWORD oldprotect;
|
||||
CHECK_NOT_NULL(VirtualProtect(address, size, PAGE_READWRITE, &oldprotect));
|
||||
#else
|
||||
CHECK_EQ(0, mprotect(address, size, PROT_READ | PROT_WRITE));
|
||||
#endif
|
||||
}
|
||||
|
||||
#if !V8_OS_CYGWIN && !V8_OS_FUCHSIA
|
||||
// static
|
||||
void* OS::ReserveRegion(size_t size, void* hint) {
|
||||
int map_flags = MAP_PRIVATE | MAP_ANONYMOUS;
|
||||
#if !V8_OS_AIX && !V8_OS_FREEBSD && !V8_OS_QNX
|
||||
map_flags |= MAP_NORESERVE;
|
||||
#endif
|
||||
#if V8_OS_QNX
|
||||
map_flags |= MAP_LAZY;
|
||||
#endif // V8_OS_QNX
|
||||
void* result = mmap(hint, size, PROT_NONE, map_flags, kMmapFd, kMmapFdOffset);
|
||||
if (result == MAP_FAILED) return nullptr;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// static
|
||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
||||
size_t* allocated) {
|
||||
DCHECK_EQ(0, alignment % OS::AllocatePageSize());
|
||||
hint = AlignedAddress(hint, alignment);
|
||||
size_t request_size =
|
||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocatePageSize()));
|
||||
void* result = ReserveRegion(request_size, hint);
|
||||
if (result == nullptr) {
|
||||
*allocated = 0;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
uint8_t* base = static_cast<uint8_t*>(result);
|
||||
uint8_t* aligned_base = RoundUp(base, alignment);
|
||||
DCHECK_LE(base, aligned_base);
|
||||
|
||||
// Unmap extra memory reserved before and after the desired block.
|
||||
if (aligned_base != base) {
|
||||
size_t prefix_size = static_cast<size_t>(aligned_base - base);
|
||||
OS::Free(base, prefix_size);
|
||||
request_size -= prefix_size;
|
||||
}
|
||||
|
||||
size_t aligned_size = RoundUp(size, OS::AllocatePageSize());
|
||||
DCHECK_LE(aligned_size, request_size);
|
||||
|
||||
if (aligned_size != request_size) {
|
||||
size_t suffix_size = request_size - aligned_size;
|
||||
OS::Free(aligned_base + aligned_size, suffix_size);
|
||||
request_size -= suffix_size;
|
||||
}
|
||||
|
||||
DCHECK(aligned_size == request_size);
|
||||
|
||||
*allocated = aligned_size;
|
||||
return static_cast<void*>(aligned_base);
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::CommitRegion(void* address, size_t size, bool is_executable) {
|
||||
int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
|
||||
#if !V8_OS_AIX
|
||||
if (MAP_FAILED == mmap(address, size, prot,
|
||||
MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, kMmapFd,
|
||||
kMmapFdOffset)) {
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
if (mprotect(address, size, prot) == -1) return false;
|
||||
#endif // !V8_OS_AIX
|
||||
return true;
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::UncommitRegion(void* address, size_t size) {
|
||||
#if !V8_OS_AIX
|
||||
int map_flags = MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED;
|
||||
#if !V8_OS_FREEBSD && !V8_OS_QNX
|
||||
map_flags |= MAP_NORESERVE;
|
||||
#endif // !V8_OS_FREEBSD && !V8_OS_QNX
|
||||
#if V8_OS_QNX
|
||||
map_flags |= MAP_LAZY;
|
||||
#endif // V8_OS_QNX
|
||||
return mmap(address, size, PROT_NONE, map_flags, kMmapFd, kMmapFdOffset) !=
|
||||
MAP_FAILED;
|
||||
#else // V8_OS_AIX
|
||||
return mprotect(address, size, PROT_NONE) != -1;
|
||||
#endif // V8_OS_AIX
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::ReleaseRegion(void* address, size_t size) {
|
||||
return munmap(address, size) == 0;
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::ReleasePartialRegion(void* address, size_t size) {
|
||||
return munmap(address, size) == 0;
|
||||
}
|
||||
|
||||
// static
|
||||
bool OS::HasLazyCommits() {
|
||||
#if V8_OS_AIX || V8_OS_LINUX || V8_OS_MACOSX
|
||||
return true;
|
||||
#else
|
||||
// TODO(bbudge) Return true for all POSIX platforms.
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
#endif // !V8_OS_CYGWIN && !V8_OS_FUCHSIA
|
||||
|
||||
const char* OS::GetGCFakeMMapFile() {
|
||||
return g_gc_fake_mmap;
|
||||
}
|
||||
|
||||
|
||||
|
@ -674,24 +674,9 @@ void OS::StrNCpy(char* dest, int length, const char* src, size_t n) {
|
||||
#undef _TRUNCATE
|
||||
#undef STRUNCATE
|
||||
|
||||
|
||||
// Get the system's page size used by VirtualAlloc() or the next power
|
||||
// of two. The reason for always returning a power of two is that the
|
||||
// rounding up in OS::Allocate expects that.
|
||||
static size_t GetPageSize() {
|
||||
static size_t page_size = 0;
|
||||
if (page_size == 0) {
|
||||
SYSTEM_INFO info;
|
||||
GetSystemInfo(&info);
|
||||
page_size = base::bits::RoundUpToPowerOfTwo32(info.dwPageSize);
|
||||
}
|
||||
return page_size;
|
||||
}
|
||||
|
||||
|
||||
// The allocation alignment is the guaranteed alignment for
|
||||
// VirtualAlloc'ed blocks of memory.
|
||||
size_t OS::AllocateAlignment() {
|
||||
size_t OS::AllocatePageSize() {
|
||||
static size_t allocate_alignment = 0;
|
||||
if (allocate_alignment == 0) {
|
||||
SYSTEM_INFO info;
|
||||
@ -701,6 +686,17 @@ size_t OS::AllocateAlignment() {
|
||||
return allocate_alignment;
|
||||
}
|
||||
|
||||
size_t OS::CommitPageSize() {
|
||||
static size_t page_size = 0;
|
||||
if (page_size == 0) {
|
||||
SYSTEM_INFO info;
|
||||
GetSystemInfo(&info);
|
||||
page_size = info.dwPageSize;
|
||||
DCHECK_EQ(4096, page_size);
|
||||
}
|
||||
return page_size;
|
||||
}
|
||||
|
||||
static LazyInstance<RandomNumberGenerator>::type
|
||||
platform_random_number_generator = LAZY_INSTANCE_INITIALIZER;
|
||||
|
||||
@ -763,18 +759,10 @@ static void* RandomizedVirtualAlloc(size_t size, int action, int protection,
|
||||
|
||||
} // namespace
|
||||
|
||||
void* OS::Allocate(const size_t requested, size_t* allocated,
|
||||
bool is_executable, void* hint) {
|
||||
return OS::Allocate(requested, allocated,
|
||||
is_executable ? OS::MemoryPermission::kReadWriteExecute
|
||||
: OS::MemoryPermission::kReadWrite,
|
||||
hint);
|
||||
}
|
||||
|
||||
void* OS::Allocate(const size_t requested, size_t* allocated,
|
||||
OS::MemoryPermission access, void* hint) {
|
||||
// VirtualAlloc rounds allocated size to page size automatically.
|
||||
size_t msize = RoundUp(requested, static_cast<int>(GetPageSize()));
|
||||
size_t msize = RoundUp(requested, static_cast<int>(AllocatePageSize()));
|
||||
|
||||
// Windows XP SP2 allows Data Excution Prevention (DEP).
|
||||
int prot = PAGE_NOACCESS;
|
||||
@ -798,7 +786,7 @@ void* OS::Allocate(const size_t requested, size_t* allocated,
|
||||
|
||||
if (mbase == NULL) return NULL;
|
||||
|
||||
DCHECK_EQ(reinterpret_cast<uintptr_t>(mbase) % OS::AllocateAlignment(), 0);
|
||||
DCHECK_EQ(reinterpret_cast<uintptr_t>(mbase) % OS::AllocatePageSize(), 0);
|
||||
|
||||
*allocated = msize;
|
||||
return mbase;
|
||||
@ -810,10 +798,6 @@ void OS::Free(void* address, const size_t size) {
|
||||
USE(size);
|
||||
}
|
||||
|
||||
intptr_t OS::CommitPageSize() {
|
||||
return 4096;
|
||||
}
|
||||
|
||||
void OS::SetReadAndExecutable(void* address, const size_t size) {
|
||||
DWORD old_protect;
|
||||
CHECK_NE(NULL,
|
||||
@ -841,10 +825,10 @@ void* OS::ReserveRegion(size_t size, void* hint) {
|
||||
|
||||
void* OS::ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
||||
size_t* allocated) {
|
||||
DCHECK_EQ(alignment % OS::AllocateAlignment(), 0);
|
||||
DCHECK_EQ(alignment % OS::AllocatePageSize(), 0);
|
||||
hint = AlignedAddress(hint, alignment);
|
||||
size_t request_size =
|
||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocateAlignment()));
|
||||
RoundUp(size + alignment, static_cast<intptr_t>(OS::AllocatePageSize()));
|
||||
void* address = ReserveRegion(request_size, hint);
|
||||
if (address == nullptr) {
|
||||
*allocated = 0;
|
||||
|
@ -161,26 +161,24 @@ class V8_BASE_EXPORT OS {
|
||||
// here even though most systems support additional modes.
|
||||
enum class MemoryPermission { kNoAccess, kReadWrite, kReadWriteExecute };
|
||||
|
||||
// Allocate/Free memory used by JS heap. Permissions are set according to the
|
||||
// is_* flags. Returns the address of allocated memory, or nullptr if failed.
|
||||
// Gets the page granularity for Allocate. Addresses returned by Allocate are
|
||||
// aligned to this size.
|
||||
static size_t AllocatePageSize();
|
||||
|
||||
// Gets the granularity at which the permissions and commit calls can be made.
|
||||
static size_t CommitPageSize();
|
||||
|
||||
// Generate a random address to be used for hinting allocation calls.
|
||||
static void* GetRandomMmapAddr();
|
||||
|
||||
// Allocates memory. Permissions are set according to the access argument.
|
||||
// Returns the address of the allocated memory, or nullptr on failure.
|
||||
static void* Allocate(const size_t requested, size_t* allocated,
|
||||
MemoryPermission access, void* hint = nullptr);
|
||||
// Allocate/Free memory used by JS heap. Pages are readable/writable, but
|
||||
// they are not guaranteed to be executable unless 'executable' is true.
|
||||
// Returns the address of allocated memory, or nullptr if failed.
|
||||
static void* Allocate(const size_t requested, size_t* allocated,
|
||||
bool is_executable, void* hint = nullptr);
|
||||
|
||||
// Frees memory allocated by a call to Allocate.
|
||||
static void Free(void* address, const size_t size);
|
||||
|
||||
// Allocates a region of memory that is inaccessible. On Windows this reserves
|
||||
// but does not commit the memory. On POSIX systems it allocates memory as
|
||||
// PROT_NONE, which also prevents it from being committed.
|
||||
static void* AllocateGuarded(const size_t requested);
|
||||
|
||||
// This is the granularity at which the SetReadAndExecutable(...) call can
|
||||
// set page permissions.
|
||||
static intptr_t CommitPageSize();
|
||||
|
||||
// Mark a region of memory executable and readable but not writable.
|
||||
static void SetReadAndExecutable(void* address, const size_t size);
|
||||
|
||||
@ -190,12 +188,6 @@ class V8_BASE_EXPORT OS {
|
||||
// Make a region of memory non-executable but readable and writable.
|
||||
static void SetReadAndWritable(void* address, const size_t size, bool commit);
|
||||
|
||||
// Generate a random address to be used for hinting mmap().
|
||||
static void* GetRandomMmapAddr();
|
||||
|
||||
// Get the Alignment guaranteed by Allocate().
|
||||
static size_t AllocateAlignment();
|
||||
|
||||
static void* ReserveRegion(size_t size, void* hint);
|
||||
|
||||
static void* ReserveAlignedRegion(size_t size, size_t alignment, void* hint,
|
||||
|
@ -119,9 +119,7 @@ bool CodeRange::SetUp(size_t requested) {
|
||||
|
||||
VirtualMemory reservation;
|
||||
if (!AlignedAllocVirtualMemory(
|
||||
requested,
|
||||
Max(kCodeRangeAreaAlignment,
|
||||
static_cast<size_t>(base::OS::AllocateAlignment())),
|
||||
requested, Max(kCodeRangeAreaAlignment, base::OS::AllocatePageSize()),
|
||||
base::OS::GetRandomMmapAddr(), &reservation)) {
|
||||
return false;
|
||||
}
|
||||
@ -2403,7 +2401,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) {
|
||||
DCHECK_LE(new_capacity, maximum_capacity_);
|
||||
DCHECK_GT(new_capacity, current_capacity_);
|
||||
const size_t delta = new_capacity - current_capacity_;
|
||||
DCHECK(IsAligned(delta, base::OS::AllocateAlignment()));
|
||||
DCHECK(IsAligned(delta, base::OS::AllocatePageSize()));
|
||||
const int delta_pages = static_cast<int>(delta / Page::kPageSize);
|
||||
Page* last_page = anchor()->prev_page();
|
||||
DCHECK_NE(last_page, anchor());
|
||||
@ -2447,7 +2445,7 @@ bool SemiSpace::ShrinkTo(size_t new_capacity) {
|
||||
DCHECK_LT(new_capacity, current_capacity_);
|
||||
if (is_committed()) {
|
||||
const size_t delta = current_capacity_ - new_capacity;
|
||||
DCHECK(IsAligned(delta, base::OS::AllocateAlignment()));
|
||||
DCHECK(IsAligned(delta, base::OS::AllocatePageSize()));
|
||||
int delta_pages = static_cast<int>(delta / Page::kPageSize);
|
||||
Page* new_last_page;
|
||||
Page* last_page;
|
||||
|
@ -18,8 +18,8 @@ namespace internal {
|
||||
UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||
size_t actual_size;
|
||||
// Allocate buffer in executable space.
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return nullptr;
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
CodeObjectRequired::kNo);
|
||||
@ -134,8 +134,8 @@ class LabelConverter {
|
||||
MemMoveFunction CreateMemMoveFunction(Isolate* isolate) {
|
||||
size_t actual_size;
|
||||
// Allocate buffer in executable space.
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return nullptr;
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
CodeObjectRequired::kNo);
|
||||
|
@ -24,8 +24,8 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
||||
return stub;
|
||||
#else
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(3 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
3 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return stub;
|
||||
|
||||
// This code assumes that cache lines are 32 bytes and if the cache line is
|
||||
@ -556,8 +556,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||
return nullptr;
|
||||
#else
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return nullptr;
|
||||
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -25,8 +25,8 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
|
||||
#else
|
||||
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(3 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
3 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return stub;
|
||||
|
||||
// This code assumes that cache lines are 32 bytes and if the cache line is
|
||||
@ -558,8 +558,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||
return nullptr;
|
||||
#else
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return nullptr;
|
||||
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -21,8 +21,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||
return nullptr;
|
||||
#else
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return nullptr;
|
||||
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -20,8 +20,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||
return nullptr;
|
||||
#else
|
||||
size_t actual_size;
|
||||
byte* buffer =
|
||||
static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute));
|
||||
if (buffer == nullptr) return nullptr;
|
||||
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -27,7 +27,7 @@ void* TryAllocateBackingStore(Isolate* isolate, size_t size,
|
||||
allocation_length = RoundUp(kWasmMaxHeapOffset, base::OS::CommitPageSize());
|
||||
DCHECK_EQ(0, size % base::OS::CommitPageSize());
|
||||
|
||||
// AllocateGuarded makes the whole region inaccessible by default.
|
||||
// The Reserve makes the whole region inaccessible by default.
|
||||
allocation_base =
|
||||
isolate->array_buffer_allocator()->Reserve(allocation_length);
|
||||
if (allocation_base == nullptr) {
|
||||
|
@ -18,7 +18,8 @@ UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
|
||||
size_t actual_size;
|
||||
// Allocate buffer in executable space.
|
||||
byte* buffer = static_cast<byte*>(base::OS::Allocate(
|
||||
1 * KB, &actual_size, true, isolate->heap()->GetRandomMmapAddr()));
|
||||
1 * KB, &actual_size, base::OS::MemoryPermission::kReadWriteExecute,
|
||||
isolate->heap()->GetRandomMmapAddr()));
|
||||
if (buffer == nullptr) return nullptr;
|
||||
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -122,7 +122,7 @@ TEST(AlignedAllocOOM) {
|
||||
// On failure, this won't return, since an AlignedAlloc failure is fatal.
|
||||
// In that case, behavior is checked in OnAlignedAllocOOM before exit.
|
||||
void* result = v8::internal::AlignedAlloc(GetHugeMemoryAmount(),
|
||||
v8::base::OS::AllocateAlignment());
|
||||
v8::base::OS::AllocatePageSize());
|
||||
// On a few systems, allocation somehow succeeds.
|
||||
CHECK_EQ(result == nullptr, platform.oom_callback_called);
|
||||
}
|
||||
@ -143,7 +143,7 @@ TEST(AlignedAllocVirtualMemoryOOM) {
|
||||
CHECK(!platform.oom_callback_called);
|
||||
v8::internal::VirtualMemory result;
|
||||
bool success = v8::internal::AlignedAllocVirtualMemory(
|
||||
GetHugeMemoryAmount(), v8::base::OS::AllocateAlignment(), nullptr,
|
||||
GetHugeMemoryAmount(), v8::base::OS::AllocatePageSize(), nullptr,
|
||||
&result);
|
||||
// On a few systems, allocation somehow succeeds.
|
||||
CHECK_IMPLIES(success, result.IsReserved());
|
||||
|
@ -174,15 +174,15 @@ static void InitializeVM() {
|
||||
|
||||
#else // ifdef USE_SIMULATOR.
|
||||
// Run the test on real hardware or models.
|
||||
#define SETUP_SIZE(buf_size) \
|
||||
Isolate* isolate = CcTest::i_isolate(); \
|
||||
HandleScope scope(isolate); \
|
||||
CHECK_NOT_NULL(isolate); \
|
||||
size_t actual_size; \
|
||||
byte* buf = static_cast<byte*>( \
|
||||
v8::base::OS::Allocate(buf_size, &actual_size, true)); \
|
||||
MacroAssembler masm(isolate, buf, static_cast<unsigned>(actual_size), \
|
||||
v8::internal::CodeObjectRequired::kYes); \
|
||||
#define SETUP_SIZE(buf_size) \
|
||||
Isolate* isolate = CcTest::i_isolate(); \
|
||||
HandleScope scope(isolate); \
|
||||
CHECK_NOT_NULL(isolate); \
|
||||
size_t actual_size; \
|
||||
byte* buf = static_cast<byte*>(v8::base::OS::Allocate( \
|
||||
buf_size, &actual_size, base::OS::MemoryPermission::kReadWriteExecute)); \
|
||||
MacroAssembler masm(isolate, buf, static_cast<unsigned>(actual_size), \
|
||||
v8::internal::CodeObjectRequired::kYes); \
|
||||
RegisterDump core;
|
||||
|
||||
#define RESET() \
|
||||
@ -15655,3 +15655,23 @@ TEST(internal_reference_linked) {
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#undef __
|
||||
#undef BUF_SIZE
|
||||
#undef SETUP
|
||||
#undef INIT_V8
|
||||
#undef SETUP_SIZE
|
||||
#undef RESET
|
||||
#undef START_AFTER_RESET
|
||||
#undef START
|
||||
#undef RUN
|
||||
#undef END
|
||||
#undef TEARDOWN
|
||||
#undef CHECK_EQUAL_NZCV
|
||||
#undef CHECK_EQUAL_REGISTERS
|
||||
#undef CHECK_EQUAL_32
|
||||
#undef CHECK_EQUAL_FP32
|
||||
#undef CHECK_EQUAL_64
|
||||
#undef CHECK_EQUAL_FP64
|
||||
#undef CHECK_EQUAL_128
|
||||
#undef CHECK_CONSTANT_POOL_SIZE
|
||||
|
@ -69,15 +69,25 @@ static const Register arg2 = rsi;
|
||||
|
||||
#define __ assm.
|
||||
|
||||
namespace {
|
||||
|
||||
byte* AllocateExecutablePage(int* actual_size) {
|
||||
size_t allocated = 0;
|
||||
void* result =
|
||||
v8::base::OS::Allocate(Assembler::kMinimalBufferSize, &allocated,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute);
|
||||
CHECK(result);
|
||||
*actual_size = static_cast<int>(allocated);
|
||||
return static_cast<byte*>(result);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST(AssemblerX64ReturnOperation) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Assemble a simple function that copies argument 2 and returns it.
|
||||
__ movq(rax, arg2);
|
||||
@ -94,12 +104,9 @@ TEST(AssemblerX64ReturnOperation) {
|
||||
|
||||
TEST(AssemblerX64StackOperations) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Assemble a simple function that copies argument 2 and returns it.
|
||||
// We compile without stack frame pointers, so the gdb debugger shows
|
||||
@ -126,12 +133,9 @@ TEST(AssemblerX64StackOperations) {
|
||||
|
||||
TEST(AssemblerX64ArithmeticOperations) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Assemble a simple function that adds arguments returning the sum.
|
||||
__ movq(rax, arg2);
|
||||
@ -148,12 +152,9 @@ TEST(AssemblerX64ArithmeticOperations) {
|
||||
|
||||
TEST(AssemblerX64CmpbOperation) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Assemble a function that compare argument byte returing 1 if equal else 0.
|
||||
// On Windows, it compares rcx with rdx which does not require REX prefix;
|
||||
@ -178,12 +179,10 @@ TEST(AssemblerX64CmpbOperation) {
|
||||
|
||||
TEST(Regression684407) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
Address before = assm.pc();
|
||||
__ cmpl(Operand(arg1, 0),
|
||||
Immediate(0, RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE));
|
||||
@ -195,12 +194,9 @@ TEST(Regression684407) {
|
||||
|
||||
TEST(AssemblerX64ImulOperation) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Assemble a simple function that multiplies arguments returning the high
|
||||
// word.
|
||||
@ -223,12 +219,9 @@ TEST(AssemblerX64ImulOperation) {
|
||||
TEST(AssemblerX64testbwqOperation) {
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
__ pushq(rbx);
|
||||
__ pushq(rdi);
|
||||
@ -390,12 +383,9 @@ TEST(AssemblerX64testbwqOperation) {
|
||||
|
||||
TEST(AssemblerX64XchglOperations) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
__ movq(rax, Operand(arg1, 0));
|
||||
__ movq(r11, Operand(arg2, 0));
|
||||
@ -418,12 +408,9 @@ TEST(AssemblerX64XchglOperations) {
|
||||
|
||||
TEST(AssemblerX64OrlOperations) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
__ movq(rax, Operand(arg2, 0));
|
||||
__ orl(Operand(arg1, 0), rax);
|
||||
@ -442,12 +429,9 @@ TEST(AssemblerX64OrlOperations) {
|
||||
|
||||
TEST(AssemblerX64RollOperations) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
__ movq(rax, arg1);
|
||||
__ roll(rax, Immediate(1));
|
||||
@ -464,12 +448,9 @@ TEST(AssemblerX64RollOperations) {
|
||||
|
||||
TEST(AssemblerX64SublOperations) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
__ movq(rax, Operand(arg2, 0));
|
||||
__ subl(Operand(arg1, 0), rax);
|
||||
@ -488,12 +469,9 @@ TEST(AssemblerX64SublOperations) {
|
||||
|
||||
TEST(AssemblerX64TestlOperations) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Set rax with the ZF flag of the testl instruction.
|
||||
Label done;
|
||||
@ -517,12 +495,9 @@ TEST(AssemblerX64TestlOperations) {
|
||||
TEST(AssemblerX64TestwOperations) {
|
||||
typedef uint16_t (*F)(uint16_t * x);
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Set rax with the ZF flag of the testl instruction.
|
||||
Label done;
|
||||
@ -543,12 +518,9 @@ TEST(AssemblerX64TestwOperations) {
|
||||
|
||||
TEST(AssemblerX64XorlOperations) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
__ movq(rax, Operand(arg2, 0));
|
||||
__ xorl(Operand(arg1, 0), rax);
|
||||
@ -567,12 +539,9 @@ TEST(AssemblerX64XorlOperations) {
|
||||
|
||||
TEST(AssemblerX64MemoryOperands) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Assemble a simple function that copies argument 2 and returns it.
|
||||
__ pushq(rbp);
|
||||
@ -601,12 +570,9 @@ TEST(AssemblerX64MemoryOperands) {
|
||||
|
||||
TEST(AssemblerX64ControlFlow) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Assemble a simple function that copies argument 1 and returns it.
|
||||
__ pushq(rbp);
|
||||
@ -630,12 +596,10 @@ TEST(AssemblerX64ControlFlow) {
|
||||
|
||||
TEST(AssemblerX64LoopImmediates) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
// Assemble two loops using rax as counter, and verify the ending counts.
|
||||
Label Fail;
|
||||
__ movq(rax, Immediate(-3));
|
||||
@ -2482,12 +2446,9 @@ TEST(AssemblerX64JumpTables2) {
|
||||
|
||||
TEST(AssemblerX64PslldWithXmm15) {
|
||||
CcTest::InitializeVM();
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
Assembler assm(CcTest::i_isolate(), buffer, actual_size);
|
||||
|
||||
__ movq(xmm15, arg1);
|
||||
__ pslld(xmm15, 1);
|
||||
|
@ -48,7 +48,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
Assembler::kMinimalBufferSize, &actual_size,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
||||
CHECK(buffer);
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -47,8 +47,9 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||
Register destination_reg) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size = 4 * Assembler::kMinimalBufferSize;
|
||||
byte* buffer = static_cast<byte*>(
|
||||
v8::base::OS::Allocate(actual_size, &actual_size, true));
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
actual_size, &actual_size,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
||||
CHECK(buffer);
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -49,7 +49,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
Assembler::kMinimalBufferSize, &actual_size,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
||||
CHECK(buffer);
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -50,7 +50,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
Assembler::kMinimalBufferSize, &actual_size,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
||||
CHECK(buffer);
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -50,7 +50,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
Assembler::kMinimalBufferSize, &actual_size,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
||||
CHECK(buffer);
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -49,7 +49,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
Assembler::kMinimalBufferSize, &actual_size,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute));
|
||||
CHECK(buffer);
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size),
|
||||
|
@ -45,16 +45,22 @@ typedef void* (*F)(int x, int y, int p2, int p3, int p4);
|
||||
typedef Object* (*F3)(void* p0, int p1, int p2, int p3, int p4);
|
||||
typedef int (*F5)(void*, void*, void*, void*, void*);
|
||||
|
||||
byte* AllocateExecutablePage(int* actual_size) {
|
||||
size_t allocated = 0;
|
||||
void* result =
|
||||
v8::base::OS::Allocate(Assembler::kMinimalBufferSize, &allocated,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute);
|
||||
CHECK(result);
|
||||
*actual_size = static_cast<int>(allocated);
|
||||
return static_cast<byte*>(result);
|
||||
}
|
||||
|
||||
TEST(LoadAndStoreWithRepresentation) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||
__ sub(sp, sp, Operand(1 * kPointerSize));
|
||||
@ -138,14 +144,11 @@ TEST(LoadAndStoreWithRepresentation) {
|
||||
TEST(ExtractLane) {
|
||||
if (!CpuFeatures::IsSupported(NEON)) return;
|
||||
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||
|
||||
@ -281,14 +284,11 @@ TEST(ExtractLane) {
|
||||
TEST(ReplaceLane) {
|
||||
if (!CpuFeatures::IsSupported(NEON)) return;
|
||||
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||
|
||||
|
@ -52,6 +52,15 @@ typedef int (*F0)();
|
||||
|
||||
#define __ masm->
|
||||
|
||||
byte* AllocateExecutablePage(int* actual_size) {
|
||||
size_t allocated = 0;
|
||||
void* result =
|
||||
v8::base::OS::Allocate(Assembler::kMinimalBufferSize, &allocated,
|
||||
v8::base::OS::MemoryPermission::kReadWriteExecute);
|
||||
CHECK(result);
|
||||
*actual_size = static_cast<int>(allocated);
|
||||
return static_cast<byte*>(result);
|
||||
}
|
||||
|
||||
static void EntryCode(MacroAssembler* masm) {
|
||||
// Smi constant register is callee save.
|
||||
@ -98,14 +107,11 @@ static void TestMoveSmi(MacroAssembler* masm, Label* exit, int id, Smi* value) {
|
||||
|
||||
// Test that we can move a Smi value literally into a register.
|
||||
TEST(SmiMove) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||
EntryCode(masm);
|
||||
@ -184,14 +190,11 @@ void TestSmiCompare(MacroAssembler* masm, Label* exit, int id, int x, int y) {
|
||||
|
||||
// Test that we can compare smis for equality (and more).
|
||||
TEST(SmiCompare) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize * 2, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
|
||||
MacroAssembler* masm = &assembler;
|
||||
@ -233,14 +236,11 @@ TEST(SmiCompare) {
|
||||
|
||||
|
||||
TEST(Integer32ToSmi) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
|
||||
MacroAssembler* masm = &assembler;
|
||||
@ -333,14 +333,11 @@ TEST(Integer32ToSmi) {
|
||||
}
|
||||
|
||||
TEST(SmiCheck) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
|
||||
MacroAssembler* masm = &assembler;
|
||||
@ -433,14 +430,11 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) {
|
||||
}
|
||||
|
||||
TEST(SmiIndex) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize * 5, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
|
||||
MacroAssembler* masm = &assembler;
|
||||
@ -469,14 +463,11 @@ TEST(OperandOffset) {
|
||||
uint32_t data[256];
|
||||
for (uint32_t i = 0; i < 256; i++) { data[i] = i * 0x01010101; }
|
||||
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize * 2, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
|
||||
MacroAssembler* masm = &assembler;
|
||||
@ -820,15 +811,13 @@ TEST(OperandOffset) {
|
||||
|
||||
|
||||
TEST(LoadAndStoreWithRepresentation) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
|
||||
MacroAssembler* masm = &assembler; // Create a pointer for the __ macro.
|
||||
EntryCode(masm);
|
||||
__ subq(rsp, Immediate(1 * kPointerSize));
|
||||
@ -1089,14 +1078,11 @@ void TestFloat64x2Neg(MacroAssembler* masm, Label* exit, double x, double y) {
|
||||
}
|
||||
|
||||
TEST(SIMDMacros) {
|
||||
// Allocate an executable page of memory.
|
||||
size_t actual_size;
|
||||
byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
|
||||
Assembler::kMinimalBufferSize * 2, &actual_size, true));
|
||||
CHECK(buffer);
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
HandleScope handles(isolate);
|
||||
MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size),
|
||||
int actual_size;
|
||||
byte* buffer = AllocateExecutablePage(&actual_size);
|
||||
MacroAssembler assembler(isolate, buffer, actual_size,
|
||||
v8::internal::CodeObjectRequired::kYes);
|
||||
|
||||
MacroAssembler* masm = &assembler;
|
||||
|
@ -14,7 +14,7 @@ namespace internal {
|
||||
|
||||
TEST(OSReserveMemory) {
|
||||
size_t mem_size = 0;
|
||||
void* mem_addr = OS::ReserveAlignedRegion(1 * MB, OS::AllocateAlignment(),
|
||||
void* mem_addr = OS::ReserveAlignedRegion(1 * MB, OS::AllocatePageSize(),
|
||||
OS::GetRandomMmapAddr(), &mem_size);
|
||||
CHECK_NE(0, mem_size);
|
||||
CHECK_NOT_NULL(mem_addr);
|
||||
|
Loading…
Reference in New Issue
Block a user