[cleanup] Introduce base::AddressRegion helper class
Bug: v8:8015 Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng Change-Id: I2ce078b662e3dd93e0fac310b0d73c4cadbaccb3 Reviewed-on: https://chromium-review.googlesource.com/1226640 Commit-Queue: Igor Sheludko <ishell@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#55957}
This commit is contained in:
parent
2c97e1458f
commit
69621ef0c1
1
BUILD.gn
1
BUILD.gn
@ -3008,6 +3008,7 @@ v8_source_set("torque_base") {
|
||||
v8_component("v8_libbase") {
|
||||
sources = [
|
||||
"src/base/adapters.h",
|
||||
"src/base/address-region.h",
|
||||
"src/base/atomic-utils.h",
|
||||
"src/base/atomicops.h",
|
||||
"src/base/atomicops_internals_atomicword_compat.h",
|
||||
|
@ -212,10 +212,10 @@ VirtualMemory::VirtualMemory(v8::PageAllocator* page_allocator, size_t size,
|
||||
size_t page_size = page_allocator_->AllocatePageSize();
|
||||
alignment = RoundUp(alignment, page_size);
|
||||
size = RoundUp(size, page_size);
|
||||
address_ = reinterpret_cast<Address>(AllocatePages(
|
||||
Address address = reinterpret_cast<Address>(AllocatePages(
|
||||
page_allocator_, hint, size, alignment, PageAllocator::kNoAccess));
|
||||
if (address_ != kNullAddress) {
|
||||
size_ = size;
|
||||
if (address != kNullAddress) {
|
||||
region_ = base::AddressRegion(address, size);
|
||||
}
|
||||
}
|
||||
|
||||
@ -227,8 +227,7 @@ VirtualMemory::~VirtualMemory() {
|
||||
|
||||
void VirtualMemory::Reset() {
|
||||
page_allocator_ = nullptr;
|
||||
address_ = kNullAddress;
|
||||
size_ = 0;
|
||||
region_ = base::AddressRegion();
|
||||
}
|
||||
|
||||
bool VirtualMemory::SetPermissions(Address address, size_t size,
|
||||
@ -245,14 +244,13 @@ size_t VirtualMemory::Release(Address free_start) {
|
||||
DCHECK(IsAddressAligned(free_start, page_allocator_->CommitPageSize()));
|
||||
// Notice: Order is important here. The VirtualMemory object might live
|
||||
// inside the allocated region.
|
||||
const size_t free_size = size_ - (free_start - address_);
|
||||
size_t old_size = size_;
|
||||
|
||||
const size_t old_size = region_.size();
|
||||
const size_t free_size = old_size - (free_start - region_.begin());
|
||||
CHECK(InVM(free_start, free_size));
|
||||
DCHECK_LT(address_, free_start);
|
||||
DCHECK_LT(free_start, address_ + size_);
|
||||
size_ -= free_size;
|
||||
CHECK(ReleasePages(page_allocator_, reinterpret_cast<void*>(address_),
|
||||
old_size, size_));
|
||||
region_.set_size(old_size - free_size);
|
||||
CHECK(ReleasePages(page_allocator_, reinterpret_cast<void*>(region_.begin()),
|
||||
old_size, region_.size()));
|
||||
return free_size;
|
||||
}
|
||||
|
||||
@ -261,21 +259,18 @@ void VirtualMemory::Free() {
|
||||
// Notice: Order is important here. The VirtualMemory object might live
|
||||
// inside the allocated region.
|
||||
v8::PageAllocator* page_allocator = page_allocator_;
|
||||
Address address = address_;
|
||||
size_t size = size_;
|
||||
CHECK(InVM(address, size));
|
||||
base::AddressRegion region = region_;
|
||||
Reset();
|
||||
// FreePages expects size to be aligned to allocation granularity. Trimming
|
||||
// may leave size at only commit granularity. Align it here.
|
||||
CHECK(FreePages(page_allocator, reinterpret_cast<void*>(address),
|
||||
RoundUp(size, page_allocator->AllocatePageSize())));
|
||||
// FreePages expects size to be aligned to allocation granularity however
|
||||
// ReleasePages may leave size at only commit granularity. Align it here.
|
||||
CHECK(FreePages(page_allocator, reinterpret_cast<void*>(region.begin()),
|
||||
RoundUp(region.size(), page_allocator->AllocatePageSize())));
|
||||
}
|
||||
|
||||
void VirtualMemory::TakeControl(VirtualMemory* from) {
|
||||
DCHECK(!IsReserved());
|
||||
page_allocator_ = from->page_allocator_;
|
||||
address_ = from->address_;
|
||||
size_ = from->size_;
|
||||
region_ = from->region_;
|
||||
from->Reset();
|
||||
}
|
||||
|
||||
|
@ -6,6 +6,7 @@
|
||||
#define V8_ALLOCATION_H_
|
||||
|
||||
#include "include/v8-platform.h"
|
||||
#include "src/base/address-region.h"
|
||||
#include "src/base/compiler-specific.h"
|
||||
#include "src/base/platform/platform.h"
|
||||
#include "src/globals.h"
|
||||
@ -167,7 +168,7 @@ class V8_EXPORT_PRIVATE VirtualMemory final {
|
||||
// Construct a virtual memory by assigning it some already mapped address
|
||||
// and size.
|
||||
VirtualMemory(v8::PageAllocator* page_allocator, Address address, size_t size)
|
||||
: page_allocator_(page_allocator), address_(address), size_(size) {
|
||||
: page_allocator_(page_allocator), region_(address, size) {
|
||||
DCHECK_NOT_NULL(page_allocator);
|
||||
}
|
||||
|
||||
@ -185,32 +186,34 @@ class V8_EXPORT_PRIVATE VirtualMemory final {
|
||||
}
|
||||
|
||||
// Returns whether the memory has been reserved.
|
||||
bool IsReserved() const { return address_ != kNullAddress; }
|
||||
bool IsReserved() const { return region_.begin() != kNullAddress; }
|
||||
|
||||
// Initialize or resets an embedded VirtualMemory object.
|
||||
void Reset();
|
||||
|
||||
v8::PageAllocator* page_allocator() { return page_allocator_; }
|
||||
|
||||
const base::AddressRegion& region() const { return region_; }
|
||||
|
||||
// Returns the start address of the reserved memory.
|
||||
// If the memory was reserved with an alignment, this address is not
|
||||
// necessarily aligned. The user might need to round it up to a multiple of
|
||||
// the alignment to get the start of the aligned block.
|
||||
Address address() const {
|
||||
DCHECK(IsReserved());
|
||||
return address_;
|
||||
return region_.begin();
|
||||
}
|
||||
|
||||
Address end() const {
|
||||
DCHECK(IsReserved());
|
||||
return address_ + size_;
|
||||
return region_.end();
|
||||
}
|
||||
|
||||
// Returns the size of the reserved memory. The returned value is only
|
||||
// meaningful when IsReserved() returns true.
|
||||
// If the memory was reserved with an alignment, this size may be larger
|
||||
// than the requested size.
|
||||
size_t size() const { return size_; }
|
||||
size_t size() const { return region_.size(); }
|
||||
|
||||
// Sets permissions according to the access argument. address and size must be
|
||||
// multiples of CommitPageSize(). Returns true on success, otherwise false.
|
||||
@ -228,14 +231,13 @@ class V8_EXPORT_PRIVATE VirtualMemory final {
|
||||
void TakeControl(VirtualMemory* from);
|
||||
|
||||
bool InVM(Address address, size_t size) {
|
||||
return (address_ <= address) && ((address_ + size_) >= (address + size));
|
||||
return region_.contains(address, size);
|
||||
}
|
||||
|
||||
private:
|
||||
// Page allocator that controls the virtual memory.
|
||||
v8::PageAllocator* page_allocator_ = nullptr;
|
||||
Address address_ = kNullAddress; // Start address of the virtual memory.
|
||||
size_t size_ = 0; // Size of the virtual memory.
|
||||
base::AddressRegion region_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(VirtualMemory);
|
||||
};
|
||||
|
12
src/api.cc
12
src/api.cc
@ -8707,14 +8707,10 @@ void Isolate::SetStackLimit(uintptr_t stack_limit) {
|
||||
|
||||
void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) {
|
||||
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
|
||||
i::MemoryAllocator* memory_allocator = isolate->heap()->memory_allocator();
|
||||
if (memory_allocator->code_range_valid()) {
|
||||
*start = reinterpret_cast<void*>(memory_allocator->code_range_start());
|
||||
*length_in_bytes = memory_allocator->code_range_size();
|
||||
} else {
|
||||
*start = nullptr;
|
||||
*length_in_bytes = 0;
|
||||
}
|
||||
const base::AddressRegion& code_range =
|
||||
isolate->heap()->memory_allocator()->code_range();
|
||||
*start = reinterpret_cast<void*>(code_range.begin());
|
||||
*length_in_bytes = code_range.size();
|
||||
}
|
||||
|
||||
MemoryRange Isolate::GetBuiltinsCodeRange() {
|
||||
|
@ -74,9 +74,12 @@ AssemblerOptions AssemblerOptions::Default(
|
||||
options.enable_simulator_code = !serializer;
|
||||
#endif
|
||||
options.inline_offheap_trampolines = !serializer;
|
||||
|
||||
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64
|
||||
options.code_range_start =
|
||||
isolate->heap()->memory_allocator()->code_range_start();
|
||||
const base::AddressRegion& code_range =
|
||||
isolate->heap()->memory_allocator()->code_range();
|
||||
DCHECK_IMPLIES(code_range.begin() != kNullAddress, !code_range.is_empty());
|
||||
options.code_range_start = code_range.begin();
|
||||
#endif
|
||||
return options;
|
||||
}
|
||||
|
56
src/base/address-region.h
Normal file
56
src/base/address-region.h
Normal file
@ -0,0 +1,56 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_BASE_ADDRESS_REGION_H_
|
||||
#define V8_BASE_ADDRESS_REGION_H_
|
||||
|
||||
#include <type_traits>
|
||||
|
||||
#include "src/base/macros.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace base {
|
||||
|
||||
// Helper class representing an address region of certian size.
|
||||
class AddressRegion {
|
||||
public:
|
||||
typedef uintptr_t Address;
|
||||
|
||||
AddressRegion() = default;
|
||||
|
||||
AddressRegion(Address address, size_t size)
|
||||
: address_(address), size_(size) {}
|
||||
|
||||
Address begin() const { return address_; }
|
||||
Address end() const { return address_ + size_; }
|
||||
|
||||
size_t size() const { return size_; }
|
||||
void set_size(size_t size) { size_ = size; }
|
||||
|
||||
bool is_empty() const { return size_ == 0; }
|
||||
|
||||
bool contains(Address address) const {
|
||||
STATIC_ASSERT(std::is_unsigned<Address>::value);
|
||||
return (address - begin()) < size();
|
||||
}
|
||||
|
||||
bool contains(Address address, size_t size) const {
|
||||
STATIC_ASSERT(std::is_unsigned<Address>::value);
|
||||
Address offset = address - begin();
|
||||
return (offset < size_) && (offset <= size_ - size);
|
||||
}
|
||||
|
||||
bool contains(const AddressRegion& region) const {
|
||||
return contains(region.address_, region.size_);
|
||||
}
|
||||
|
||||
private:
|
||||
Address address_ = 0;
|
||||
size_t size_ = 0;
|
||||
};
|
||||
|
||||
} // namespace base
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_BASE_ADDRESS_REGION_H_
|
@ -7,6 +7,7 @@
|
||||
|
||||
#include <set>
|
||||
|
||||
#include "src/base/address-region.h"
|
||||
#include "src/base/utils/random-number-generator.h"
|
||||
#include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck
|
||||
|
||||
@ -75,36 +76,17 @@ class V8_BASE_EXPORT RegionAllocator final {
|
||||
void Print(std::ostream& os) const;
|
||||
|
||||
private:
|
||||
class Region {
|
||||
class Region : public AddressRegion {
|
||||
public:
|
||||
Address begin() const { return address_; }
|
||||
Address end() const { return address_ + size_; }
|
||||
|
||||
size_t size() const { return size_; }
|
||||
void set_size(size_t size) { size_ = size; }
|
||||
|
||||
bool contains(Address address) const {
|
||||
STATIC_ASSERT(std::is_unsigned<Address>::value);
|
||||
return (address - begin()) < size();
|
||||
}
|
||||
|
||||
bool contains(Address address, size_t size) const {
|
||||
STATIC_ASSERT(std::is_unsigned<Address>::value);
|
||||
Address offset = address - begin();
|
||||
return (offset < size_) && (offset <= size_ - size);
|
||||
}
|
||||
Region(Address address, size_t size, bool is_used)
|
||||
: AddressRegion(address, size), is_used_(is_used) {}
|
||||
|
||||
bool is_used() const { return is_used_; }
|
||||
void set_is_used(bool used) { is_used_ = used; }
|
||||
|
||||
Region(Address address, size_t size, bool is_used)
|
||||
: address_(address), size_(size), is_used_(is_used) {}
|
||||
|
||||
void Print(std::ostream& os) const;
|
||||
|
||||
private:
|
||||
Address address_;
|
||||
size_t size_;
|
||||
bool is_used_;
|
||||
};
|
||||
|
||||
|
@ -49,10 +49,11 @@ AssemblerOptions BuiltinAssemblerOptions(Isolate* isolate,
|
||||
return options;
|
||||
}
|
||||
|
||||
const base::AddressRegion& code_range =
|
||||
isolate->heap()->memory_allocator()->code_range();
|
||||
bool pc_relative_calls_fit_in_code_range =
|
||||
isolate->heap()->memory_allocator()->code_range_valid() &&
|
||||
isolate->heap()->memory_allocator()->code_range_size() <=
|
||||
kMaxPCRelativeCodeRangeInMB * MB;
|
||||
!code_range.is_empty() &&
|
||||
code_range.size() <= kMaxPCRelativeCodeRangeInMB * MB;
|
||||
|
||||
options.isolate_independent_code = true;
|
||||
options.use_pc_relative_calls_and_jumps = pc_relative_calls_fit_in_code_range;
|
||||
|
@ -64,9 +64,9 @@ void InitializeCode(Heap* heap, Handle<Code> code, int object_size,
|
||||
bool is_turbofanned, int stack_slots,
|
||||
int safepoint_table_offset, int handler_table_offset) {
|
||||
DCHECK(IsAligned(code->address(), kCodeAlignment));
|
||||
DCHECK(!heap->memory_allocator()->code_range_valid() ||
|
||||
heap->memory_allocator()->code_range_contains(code->address()) ||
|
||||
object_size <= heap->code_space()->AreaSize());
|
||||
DCHECK_IMPLIES(
|
||||
!heap->memory_allocator()->code_range().is_empty(),
|
||||
heap->memory_allocator()->code_range().contains(code->address()));
|
||||
|
||||
bool has_unwinding_info = desc.unwinding_info != nullptr;
|
||||
|
||||
@ -2674,9 +2674,9 @@ Handle<Code> Factory::NewCodeForDeserialization(uint32_t size) {
|
||||
heap->ZapCodeObject(result->address(), size);
|
||||
result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER);
|
||||
DCHECK(IsAligned(result->address(), kCodeAlignment));
|
||||
DCHECK(!heap->memory_allocator()->code_range_valid() ||
|
||||
heap->memory_allocator()->code_range_contains(result->address()) ||
|
||||
static_cast<int>(size) <= heap->code_space()->AreaSize());
|
||||
DCHECK_IMPLIES(
|
||||
!heap->memory_allocator()->code_range().is_empty(),
|
||||
heap->memory_allocator()->code_range().contains(result->address()));
|
||||
return handle(Code::cast(result), isolate());
|
||||
}
|
||||
|
||||
@ -2738,9 +2738,9 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) {
|
||||
if (FLAG_verify_heap) new_code->ObjectVerify(isolate());
|
||||
#endif
|
||||
DCHECK(IsAligned(new_code->address(), kCodeAlignment));
|
||||
DCHECK(!heap->memory_allocator()->code_range_valid() ||
|
||||
heap->memory_allocator()->code_range_contains(new_code->address()) ||
|
||||
obj_size <= heap->code_space()->AreaSize());
|
||||
DCHECK_IMPLIES(
|
||||
!heap->memory_allocator()->code_range().is_empty(),
|
||||
heap->memory_allocator()->code_range().contains(new_code->address()));
|
||||
return new_code;
|
||||
}
|
||||
|
||||
|
@ -546,32 +546,6 @@ bool LocalAllocationBuffer::TryFreeLast(HeapObject* object, int object_size) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// MemoryAllocator
|
||||
|
||||
bool MemoryAllocator::code_range_valid() const {
|
||||
return code_page_allocator_instance_.get() != nullptr;
|
||||
}
|
||||
|
||||
Address MemoryAllocator::code_range_start() const {
|
||||
DCHECK(code_range_valid());
|
||||
// TODO(ishell): once a follow-up CL is landed add assert that
|
||||
// |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|
|
||||
return code_range_start_;
|
||||
}
|
||||
|
||||
size_t MemoryAllocator::code_range_size() const {
|
||||
DCHECK(code_range_valid());
|
||||
// TODO(ishell): once a follow-up CL is landed add assert that
|
||||
// |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|
|
||||
return code_range_size_;
|
||||
}
|
||||
|
||||
bool MemoryAllocator::code_range_contains(Address address) const {
|
||||
DCHECK(code_range_valid());
|
||||
return (address - code_range_start_) < code_range_size_;
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -123,8 +123,6 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate, size_t capacity,
|
||||
: isolate_(isolate),
|
||||
data_page_allocator_(GetPlatformPageAllocator()),
|
||||
code_page_allocator_(nullptr),
|
||||
code_range_start_(kNullAddress),
|
||||
code_range_size_(0),
|
||||
capacity_(RoundUp(capacity, Page::kPageSize)),
|
||||
size_(0),
|
||||
size_executable_(0),
|
||||
@ -168,8 +166,7 @@ void MemoryAllocator::InitializeCodePageAllocator(
|
||||
V8::FatalProcessOutOfMemory(isolate_,
|
||||
"CodeRange setup: allocate virtual memory");
|
||||
}
|
||||
code_range_start_ = reservation.address();
|
||||
code_range_size_ = reservation.size();
|
||||
code_range_ = reservation.region();
|
||||
|
||||
// We are sure that we have mapped a block of requested addresses.
|
||||
DCHECK_GE(reservation.size(), requested);
|
||||
|
@ -1389,10 +1389,16 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
|
||||
: data_page_allocator_;
|
||||
}
|
||||
|
||||
V8_INLINE bool code_range_valid() const;
|
||||
V8_INLINE Address code_range_start() const;
|
||||
V8_INLINE size_t code_range_size() const;
|
||||
V8_INLINE bool code_range_contains(Address address) const;
|
||||
// A region of memory that may contain executable code including reserved
|
||||
// OS page with read-write access in the beginning.
|
||||
const base::AddressRegion& code_range() const {
|
||||
// |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|
|
||||
DCHECK_IMPLIES(!code_range_.is_empty(), code_page_allocator_instance_);
|
||||
DCHECK_IMPLIES(!code_range_.is_empty(),
|
||||
code_range_.contains(code_page_allocator_instance_->begin(),
|
||||
code_page_allocator_instance_->size()));
|
||||
return code_range_;
|
||||
}
|
||||
|
||||
Unmapper* unmapper() { return &unmapper_; }
|
||||
|
||||
@ -1472,14 +1478,12 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
|
||||
// A part of the |heap_reservation_| that may contain executable code
|
||||
// including reserved page with read-write access in the beginning.
|
||||
// See details below.
|
||||
// TODO(ishell): introduce base::AddressRange code_range_; instead.
|
||||
Address code_range_start_;
|
||||
size_t code_range_size_;
|
||||
base::AddressRegion code_range_;
|
||||
|
||||
// This unique pointer owns the instance of bounded code allocator
|
||||
// that controls executable pages allocation. It does not control the
|
||||
// optionally existing page in the beginning of the |code_range_|.
|
||||
// So, summarizing all above, the following condition holds:
|
||||
// So, summarizing all above, the following conditions hold:
|
||||
// 1) |heap_reservation_| >= |code_range_|
|
||||
// 2) |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|.
|
||||
// 3) |heap_reservation_| is AllocatePageSize()-aligned
|
||||
|
@ -205,8 +205,8 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs,
|
||||
|
||||
// Check whether we interrupted setup/teardown of a stack frame in JS code.
|
||||
// Avoid this check for C++ code, as that would trigger false positives.
|
||||
if (regs->pc && isolate->heap()->memory_allocator()->code_range_valid() &&
|
||||
isolate->heap()->memory_allocator()->code_range_contains(
|
||||
if (regs->pc &&
|
||||
isolate->heap()->memory_allocator()->code_range().contains(
|
||||
reinterpret_cast<i::Address>(regs->pc)) &&
|
||||
IsNoFrameRegion(reinterpret_cast<i::Address>(regs->pc))) {
|
||||
// The frame is not setup, so it'd be hard to iterate the stack. Bailout.
|
||||
|
@ -55,6 +55,7 @@ v8_source_set("unittests_sources") {
|
||||
"asmjs/asm-scanner-unittest.cc",
|
||||
"asmjs/asm-types-unittest.cc",
|
||||
"asmjs/switch-logic-unittest.cc",
|
||||
"base/address-region-unittest.cc",
|
||||
"base/atomic-utils-unittest.cc",
|
||||
"base/bits-unittest.cc",
|
||||
"base/cpu-unittest.cc",
|
||||
|
64
test/unittests/base/address-region-unittest.cc
Normal file
64
test/unittests/base/address-region-unittest.cc
Normal file
@ -0,0 +1,64 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/base/address-region.h"
|
||||
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace base {
|
||||
|
||||
using Address = AddressRegion::Address;
|
||||
|
||||
TEST(AddressRegionTest, Contains) {
|
||||
struct {
|
||||
Address start;
|
||||
size_t size;
|
||||
} test_cases[] = {{153, 771}, {0, 227}, {-447, 447}};
|
||||
|
||||
for (size_t i = 0; i < arraysize(test_cases); i++) {
|
||||
Address start = test_cases[i].start;
|
||||
size_t size = test_cases[i].size;
|
||||
Address end = start + size; // exclusive
|
||||
|
||||
AddressRegion region(start, size);
|
||||
|
||||
// Test single-argument contains().
|
||||
CHECK(!region.contains(start - 1041));
|
||||
CHECK(!region.contains(start - 1));
|
||||
CHECK(!region.contains(end));
|
||||
CHECK(!region.contains(end + 1));
|
||||
CHECK(!region.contains(end + 113));
|
||||
|
||||
CHECK(region.contains(start));
|
||||
CHECK(region.contains(start + 1));
|
||||
CHECK(region.contains(start + size / 2));
|
||||
CHECK(region.contains(end - 1));
|
||||
|
||||
// Test two-arguments contains().
|
||||
CHECK(!region.contains(start - 17, 17));
|
||||
CHECK(!region.contains(start - 17, size * 2));
|
||||
CHECK(!region.contains(end, 1));
|
||||
CHECK(!region.contains(end, static_cast<size_t>(0 - end)));
|
||||
|
||||
CHECK(region.contains(start, size));
|
||||
CHECK(region.contains(start, 10));
|
||||
CHECK(region.contains(start + 11, 120));
|
||||
CHECK(region.contains(end - 13, 13));
|
||||
CHECK(!region.contains(end, 0));
|
||||
|
||||
// Zero-size queries.
|
||||
CHECK(!region.contains(start - 10, 0));
|
||||
CHECK(!region.contains(start - 1, 0));
|
||||
CHECK(!region.contains(end, 0));
|
||||
CHECK(!region.contains(end + 10, 0));
|
||||
|
||||
CHECK(region.contains(start, 0));
|
||||
CHECK(region.contains(start + 10, 0));
|
||||
CHECK(region.contains(end - 1, 0));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace base
|
||||
} // namespace v8
|
@ -322,56 +322,5 @@ TEST(RegionAllocatorTest, FindRegion) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST(RegionAllocatorTest, Contains) {
|
||||
using Region = RegionAllocator::Region;
|
||||
|
||||
struct {
|
||||
Address start;
|
||||
size_t size;
|
||||
} test_cases[] = {{153, 771}, {0, 227}, {-447, 447}};
|
||||
|
||||
for (size_t i = 0; i < arraysize(test_cases); i++) {
|
||||
Address start = test_cases[i].start;
|
||||
size_t size = test_cases[i].size;
|
||||
Address end = start + size; // exclusive
|
||||
|
||||
Region region(start, size, true);
|
||||
|
||||
// Test single-argument contains().
|
||||
CHECK(!region.contains(start - 1041));
|
||||
CHECK(!region.contains(start - 1));
|
||||
CHECK(!region.contains(end));
|
||||
CHECK(!region.contains(end + 1));
|
||||
CHECK(!region.contains(end + 113));
|
||||
|
||||
CHECK(region.contains(start));
|
||||
CHECK(region.contains(start + 1));
|
||||
CHECK(region.contains(start + size / 2));
|
||||
CHECK(region.contains(end - 1));
|
||||
|
||||
// Test two-arguments contains().
|
||||
CHECK(!region.contains(start - 17, 17));
|
||||
CHECK(!region.contains(start - 17, size * 2));
|
||||
CHECK(!region.contains(end, 1));
|
||||
CHECK(!region.contains(end, static_cast<size_t>(0 - end)));
|
||||
|
||||
CHECK(region.contains(start, size));
|
||||
CHECK(region.contains(start, 10));
|
||||
CHECK(region.contains(start + 11, 120));
|
||||
CHECK(region.contains(end - 13, 13));
|
||||
CHECK(!region.contains(end, 0));
|
||||
|
||||
// Zero-size queries.
|
||||
CHECK(!region.contains(start - 10, 0));
|
||||
CHECK(!region.contains(start - 1, 0));
|
||||
CHECK(!region.contains(end, 0));
|
||||
CHECK(!region.contains(end + 10, 0));
|
||||
|
||||
CHECK(region.contains(start, 0));
|
||||
CHECK(region.contains(start + 10, 0));
|
||||
CHECK(region.contains(end - 1, 0));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace base
|
||||
} // namespace v8
|
||||
|
Loading…
Reference in New Issue
Block a user