[ubsan] Change Address typedef to uintptr_t
The "Address" type is V8's general-purpose type for manipulating memory addresses. Per the C++ spec, pointer arithmetic and pointer comparisons are undefined behavior except within the same array; since we generally don't operate within a C++ array, our general-purpose type shouldn't be a pointer type. Bug: v8:3770 Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng;master.tryserver.blink:linux_trusty_blink_rel Change-Id: Ib96016c24a0f18bcdba916dabd83e3f24a1b5779 Reviewed-on: https://chromium-review.googlesource.com/988657 Commit-Queue: Jakob Kummerow <jkummerow@chromium.org> Reviewed-by: Leszek Swirski <leszeks@chromium.org> Cr-Commit-Position: refs/heads/master@{#52601}
This commit is contained in:
parent
1bb5d012bb
commit
2459046c1d
@ -37,7 +37,7 @@ Handle<AccessorInfo> Accessors::MakeAccessor(
|
||||
info->set_getter(*get);
|
||||
info->set_setter(*set);
|
||||
Address redirected = info->redirected_getter();
|
||||
if (redirected != nullptr) {
|
||||
if (redirected != kNullAddress) {
|
||||
Handle<Object> js_get = v8::FromCData(isolate, redirected);
|
||||
info->set_js_getter(*js_get);
|
||||
}
|
||||
|
@ -34,13 +34,21 @@ class PointerToIndexHashMap
|
||||
}
|
||||
|
||||
private:
|
||||
static uintptr_t Key(Type value) {
|
||||
return reinterpret_cast<uintptr_t>(value);
|
||||
}
|
||||
static inline uintptr_t Key(Type value);
|
||||
|
||||
static uint32_t Hash(uintptr_t key) { return static_cast<uint32_t>(key); }
|
||||
};
|
||||
|
||||
template <>
|
||||
inline uintptr_t PointerToIndexHashMap<Address>::Key(Address value) {
|
||||
return static_cast<uintptr_t>(value);
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
inline uintptr_t PointerToIndexHashMap<Type>::Key(Type value) {
|
||||
return reinterpret_cast<uintptr_t>(value);
|
||||
}
|
||||
|
||||
class AddressToIndexHashMap : public PointerToIndexHashMap<Address> {};
|
||||
class HeapObjectToIndexHashMap : public PointerToIndexHashMap<HeapObject*> {};
|
||||
|
||||
|
@ -206,15 +206,15 @@ bool OnCriticalMemoryPressure(size_t length) {
|
||||
return true;
|
||||
}
|
||||
|
||||
VirtualMemory::VirtualMemory() : address_(nullptr), size_(0) {}
|
||||
VirtualMemory::VirtualMemory() : address_(kNullAddress), size_(0) {}
|
||||
|
||||
VirtualMemory::VirtualMemory(size_t size, void* hint, size_t alignment)
|
||||
: address_(nullptr), size_(0) {
|
||||
: address_(kNullAddress), size_(0) {
|
||||
size_t page_size = AllocatePageSize();
|
||||
size_t alloc_size = RoundUp(size, page_size);
|
||||
address_ =
|
||||
AllocatePages(hint, alloc_size, alignment, PageAllocator::kNoAccess);
|
||||
if (address_ != nullptr) {
|
||||
address_ = reinterpret_cast<Address>(
|
||||
AllocatePages(hint, alloc_size, alignment, PageAllocator::kNoAccess));
|
||||
if (address_ != kNullAddress) {
|
||||
size_ = alloc_size;
|
||||
}
|
||||
}
|
||||
@ -226,31 +226,29 @@ VirtualMemory::~VirtualMemory() {
|
||||
}
|
||||
|
||||
void VirtualMemory::Reset() {
|
||||
address_ = nullptr;
|
||||
address_ = kNullAddress;
|
||||
size_ = 0;
|
||||
}
|
||||
|
||||
bool VirtualMemory::SetPermissions(void* address, size_t size,
|
||||
bool VirtualMemory::SetPermissions(Address address, size_t size,
|
||||
PageAllocator::Permission access) {
|
||||
CHECK(InVM(address, size));
|
||||
bool result = v8::internal::SetPermissions(address, size, access);
|
||||
DCHECK(result);
|
||||
USE(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
size_t VirtualMemory::Release(void* free_start) {
|
||||
size_t VirtualMemory::Release(Address free_start) {
|
||||
DCHECK(IsReserved());
|
||||
DCHECK(IsAddressAligned(static_cast<Address>(free_start), CommitPageSize()));
|
||||
DCHECK(IsAddressAligned(free_start, CommitPageSize()));
|
||||
// Notice: Order is important here. The VirtualMemory object might live
|
||||
// inside the allocated region.
|
||||
const size_t free_size = size_ - (reinterpret_cast<size_t>(free_start) -
|
||||
reinterpret_cast<size_t>(address_));
|
||||
const size_t free_size = size_ - (free_start - address_);
|
||||
CHECK(InVM(free_start, free_size));
|
||||
DCHECK_LT(address_, free_start);
|
||||
DCHECK_LT(free_start, reinterpret_cast<void*>(
|
||||
reinterpret_cast<size_t>(address_) + size_));
|
||||
CHECK(ReleasePages(address_, size_, size_ - free_size));
|
||||
DCHECK_LT(free_start, address_ + size_);
|
||||
CHECK(ReleasePages(reinterpret_cast<void*>(address_), size_,
|
||||
size_ - free_size));
|
||||
size_ -= free_size;
|
||||
return free_size;
|
||||
}
|
||||
@ -259,13 +257,14 @@ void VirtualMemory::Free() {
|
||||
DCHECK(IsReserved());
|
||||
// Notice: Order is important here. The VirtualMemory object might live
|
||||
// inside the allocated region.
|
||||
void* address = address_;
|
||||
Address address = address_;
|
||||
size_t size = size_;
|
||||
CHECK(InVM(address, size));
|
||||
Reset();
|
||||
// FreePages expects size to be aligned to allocation granularity. Trimming
|
||||
// may leave size at only commit granularity. Align it here.
|
||||
CHECK(FreePages(address, RoundUp(size, AllocatePageSize())));
|
||||
CHECK(FreePages(reinterpret_cast<void*>(address),
|
||||
RoundUp(size, AllocatePageSize())));
|
||||
}
|
||||
|
||||
void VirtualMemory::TakeControl(VirtualMemory* from) {
|
||||
|
@ -126,6 +126,10 @@ V8_WARN_UNUSED_RESULT bool ReleasePages(void* address, size_t size,
|
||||
V8_EXPORT_PRIVATE
|
||||
V8_WARN_UNUSED_RESULT bool SetPermissions(void* address, size_t size,
|
||||
PageAllocator::Permission access);
|
||||
inline bool SetPermissions(Address address, size_t size,
|
||||
PageAllocator::Permission access) {
|
||||
return SetPermissions(reinterpret_cast<void*>(address), size, access);
|
||||
}
|
||||
|
||||
// Convenience function that allocates a single system page with read and write
|
||||
// permissions. |address| is a hint. Returns the base address of the memory and
|
||||
@ -151,14 +155,15 @@ class V8_EXPORT_PRIVATE VirtualMemory {
|
||||
|
||||
// Construct a virtual memory by assigning it some already mapped address
|
||||
// and size.
|
||||
VirtualMemory(void* address, size_t size) : address_(address), size_(size) {}
|
||||
VirtualMemory(Address address, size_t size)
|
||||
: address_(address), size_(size) {}
|
||||
|
||||
// Releases the reserved memory, if any, controlled by this VirtualMemory
|
||||
// object.
|
||||
~VirtualMemory();
|
||||
|
||||
// Returns whether the memory has been reserved.
|
||||
bool IsReserved() const { return address_ != nullptr; }
|
||||
bool IsReserved() const { return address_ != kNullAddress; }
|
||||
|
||||
// Initialize or resets an embedded VirtualMemory object.
|
||||
void Reset();
|
||||
@ -167,15 +172,14 @@ class V8_EXPORT_PRIVATE VirtualMemory {
|
||||
// If the memory was reserved with an alignment, this address is not
|
||||
// necessarily aligned. The user might need to round it up to a multiple of
|
||||
// the alignment to get the start of the aligned block.
|
||||
void* address() const {
|
||||
Address address() const {
|
||||
DCHECK(IsReserved());
|
||||
return address_;
|
||||
}
|
||||
|
||||
void* end() const {
|
||||
Address end() const {
|
||||
DCHECK(IsReserved());
|
||||
return reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(address_) +
|
||||
size_);
|
||||
return address_ + size_;
|
||||
}
|
||||
|
||||
// Returns the size of the reserved memory. The returned value is only
|
||||
@ -186,11 +190,11 @@ class V8_EXPORT_PRIVATE VirtualMemory {
|
||||
|
||||
// Sets permissions according to the access argument. address and size must be
|
||||
// multiples of CommitPageSize(). Returns true on success, otherwise false.
|
||||
bool SetPermissions(void* address, size_t size,
|
||||
bool SetPermissions(Address address, size_t size,
|
||||
PageAllocator::Permission access);
|
||||
|
||||
// Releases memory after |free_start|. Returns the number of bytes released.
|
||||
size_t Release(void* free_start);
|
||||
size_t Release(Address free_start);
|
||||
|
||||
// Frees all memory.
|
||||
void Free();
|
||||
@ -199,15 +203,12 @@ class V8_EXPORT_PRIVATE VirtualMemory {
|
||||
// The old object is no longer functional (IsReserved() returns false).
|
||||
void TakeControl(VirtualMemory* from);
|
||||
|
||||
bool InVM(void* address, size_t size) {
|
||||
return (reinterpret_cast<uintptr_t>(address_) <=
|
||||
reinterpret_cast<uintptr_t>(address)) &&
|
||||
((reinterpret_cast<uintptr_t>(address_) + size_) >=
|
||||
(reinterpret_cast<uintptr_t>(address) + size));
|
||||
bool InVM(Address address, size_t size) {
|
||||
return (address_ <= address) && ((address_ + size_) >= (address + size));
|
||||
}
|
||||
|
||||
private:
|
||||
void* address_; // Start address of the virtual memory.
|
||||
Address address_; // Start address of the virtual memory.
|
||||
size_t size_; // Size of the virtual memory.
|
||||
};
|
||||
|
||||
|
15
src/api.cc
15
src/api.cc
@ -1527,7 +1527,9 @@ i::Handle<i::AccessorInfo> MakeAccessorInfo(
|
||||
}
|
||||
SET_FIELD_WRAPPED(obj, set_setter, setter);
|
||||
i::Address redirected = obj->redirected_getter();
|
||||
if (redirected != nullptr) SET_FIELD_WRAPPED(obj, set_js_getter, redirected);
|
||||
if (redirected != i::kNullAddress) {
|
||||
SET_FIELD_WRAPPED(obj, set_js_getter, redirected);
|
||||
}
|
||||
if (data.IsEmpty()) {
|
||||
data = v8::Undefined(reinterpret_cast<v8::Isolate*>(isolate));
|
||||
}
|
||||
@ -6075,7 +6077,7 @@ static void* ExternalValue(i::Object* obj) {
|
||||
return nullptr;
|
||||
}
|
||||
i::Object* foreign = i::JSObject::cast(obj)->GetEmbedderField(0);
|
||||
return i::Foreign::cast(foreign)->foreign_address();
|
||||
return reinterpret_cast<void*>(i::Foreign::cast(foreign)->foreign_address());
|
||||
}
|
||||
|
||||
|
||||
@ -8847,7 +8849,8 @@ void Isolate::SetStackLimit(uintptr_t stack_limit) {
|
||||
void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) {
|
||||
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
|
||||
if (isolate->heap()->memory_allocator()->code_range()->valid()) {
|
||||
*start = isolate->heap()->memory_allocator()->code_range()->start();
|
||||
*start = reinterpret_cast<void*>(
|
||||
isolate->heap()->memory_allocator()->code_range()->start());
|
||||
*length_in_bytes =
|
||||
isolate->heap()->memory_allocator()->code_range()->size();
|
||||
} else {
|
||||
@ -10670,8 +10673,7 @@ void InvokeAccessorGetterCallback(
|
||||
Isolate* isolate = reinterpret_cast<Isolate*>(info.GetIsolate());
|
||||
RuntimeCallTimerScope timer(isolate,
|
||||
RuntimeCallCounterId::kAccessorGetterCallback);
|
||||
Address getter_address = reinterpret_cast<Address>(reinterpret_cast<intptr_t>(
|
||||
getter));
|
||||
Address getter_address = reinterpret_cast<Address>(getter);
|
||||
VMState<EXTERNAL> state(isolate);
|
||||
ExternalCallbackScope call_scope(isolate, getter_address);
|
||||
getter(property, info);
|
||||
@ -10683,8 +10685,7 @@ void InvokeFunctionCallback(const v8::FunctionCallbackInfo<v8::Value>& info,
|
||||
Isolate* isolate = reinterpret_cast<Isolate*>(info.GetIsolate());
|
||||
RuntimeCallTimerScope timer(isolate,
|
||||
RuntimeCallCounterId::kInvokeFunctionCallback);
|
||||
Address callback_address =
|
||||
reinterpret_cast<Address>(reinterpret_cast<intptr_t>(callback));
|
||||
Address callback_address = reinterpret_cast<Address>(callback);
|
||||
VMState<EXTERNAL> state(isolate);
|
||||
ExternalCallbackScope call_scope(isolate, callback_address);
|
||||
callback(info);
|
||||
|
18
src/api.h
18
src/api.h
@ -31,10 +31,14 @@ template <typename T> inline T ToCData(v8::internal::Object* obj) {
|
||||
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address));
|
||||
if (obj == v8::internal::Smi::kZero) return nullptr;
|
||||
return reinterpret_cast<T>(
|
||||
reinterpret_cast<intptr_t>(
|
||||
v8::internal::Foreign::cast(obj)->foreign_address()));
|
||||
v8::internal::Foreign::cast(obj)->foreign_address());
|
||||
}
|
||||
|
||||
template <>
|
||||
inline v8::internal::Address ToCData(v8::internal::Object* obj) {
|
||||
if (obj == v8::internal::Smi::kZero) return v8::internal::kNullAddress;
|
||||
return v8::internal::Foreign::cast(obj)->foreign_address();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline v8::internal::Handle<v8::internal::Object> FromCData(
|
||||
@ -42,9 +46,17 @@ inline v8::internal::Handle<v8::internal::Object> FromCData(
|
||||
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address));
|
||||
if (obj == nullptr) return handle(v8::internal::Smi::kZero, isolate);
|
||||
return isolate->factory()->NewForeign(
|
||||
reinterpret_cast<v8::internal::Address>(reinterpret_cast<intptr_t>(obj)));
|
||||
reinterpret_cast<v8::internal::Address>(obj));
|
||||
}
|
||||
|
||||
template <>
|
||||
inline v8::internal::Handle<v8::internal::Object> FromCData(
|
||||
v8::internal::Isolate* isolate, v8::internal::Address obj) {
|
||||
if (obj == v8::internal::kNullAddress) {
|
||||
return handle(v8::internal::Smi::kZero, isolate);
|
||||
}
|
||||
return isolate->factory()->NewForeign(obj);
|
||||
}
|
||||
|
||||
class ApiFunction {
|
||||
public:
|
||||
|
@ -76,7 +76,7 @@ Address RelocInfo::target_address_address() {
|
||||
IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
|
||||
IsOffHeapTarget(rmode_));
|
||||
if (Assembler::IsMovW(Memory::int32_at(pc_))) {
|
||||
return reinterpret_cast<Address>(pc_);
|
||||
return pc_;
|
||||
} else {
|
||||
DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
|
||||
return constant_pool_entry_address();
|
||||
@ -141,7 +141,7 @@ Address RelocInfo::target_internal_reference() {
|
||||
|
||||
Address RelocInfo::target_internal_reference_address() {
|
||||
DCHECK(rmode_ == INTERNAL_REFERENCE);
|
||||
return reinterpret_cast<Address>(pc_);
|
||||
return pc_;
|
||||
}
|
||||
|
||||
void RelocInfo::set_wasm_code_table_entry(Address target,
|
||||
@ -174,9 +174,9 @@ void RelocInfo::WipeOut() {
|
||||
IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
|
||||
IsInternalReference(rmode_));
|
||||
if (IsInternalReference(rmode_)) {
|
||||
Memory::Address_at(pc_) = nullptr;
|
||||
Memory::Address_at(pc_) = kNullAddress;
|
||||
} else {
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, nullptr);
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
|
||||
}
|
||||
}
|
||||
|
||||
@ -206,7 +206,7 @@ Operand Operand::Zero() { return Operand(static_cast<int32_t>(0)); }
|
||||
|
||||
Operand::Operand(const ExternalReference& f)
|
||||
: rmode_(RelocInfo::EXTERNAL_REFERENCE) {
|
||||
value_.immediate = reinterpret_cast<int32_t>(f.address());
|
||||
value_.immediate = static_cast<int32_t>(f.address());
|
||||
}
|
||||
|
||||
Operand::Operand(Smi* value) : rmode_(RelocInfo::NONE) {
|
||||
@ -327,8 +327,7 @@ Address Assembler::target_address_at(Address pc, Address constant_pool) {
|
||||
IsMovT(Memory::int32_at(pc + kInstrSize)));
|
||||
Instruction* movw_instr = Instruction::At(pc);
|
||||
Instruction* movt_instr = Instruction::At(pc + kInstrSize);
|
||||
return reinterpret_cast<Address>(
|
||||
(movt_instr->ImmedMovwMovtValue() << 16) |
|
||||
return static_cast<Address>((movt_instr->ImmedMovwMovtValue() << 16) |
|
||||
movw_instr->ImmedMovwMovtValue());
|
||||
} else {
|
||||
// This is an mov / orr immediate load. Return the immediate.
|
||||
@ -340,7 +339,7 @@ Address Assembler::target_address_at(Address pc, Address constant_pool) {
|
||||
Instr orr_instr_1 = instr_at(pc + kInstrSize);
|
||||
Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
|
||||
Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
|
||||
Address ret = reinterpret_cast<Address>(
|
||||
Address ret = static_cast<Address>(
|
||||
DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
|
||||
DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
|
||||
return ret;
|
||||
@ -367,7 +366,7 @@ void Assembler::set_target_address_at(Address pc, Address constant_pool,
|
||||
DCHECK(IsMovW(Memory::int32_at(pc)));
|
||||
DCHECK(IsMovT(Memory::int32_at(pc + kInstrSize)));
|
||||
uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
|
||||
uint32_t immediate = reinterpret_cast<uint32_t>(target);
|
||||
uint32_t immediate = static_cast<uint32_t>(target);
|
||||
instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
|
||||
instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
|
||||
DCHECK(IsMovW(Memory::int32_at(pc)));
|
||||
|
@ -374,7 +374,7 @@ Address RelocInfo::js_to_wasm_address() const {
|
||||
|
||||
Operand::Operand(Handle<HeapObject> handle) {
|
||||
rm_ = no_reg;
|
||||
value_.immediate = reinterpret_cast<intptr_t>(handle.address());
|
||||
value_.immediate = static_cast<intptr_t>(handle.address());
|
||||
rmode_ = RelocInfo::EMBEDDED_OBJECT;
|
||||
}
|
||||
|
||||
@ -491,7 +491,7 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
|
||||
object = request.code_stub()->GetCode();
|
||||
break;
|
||||
}
|
||||
Address pc = buffer_ + request.offset();
|
||||
Address pc = reinterpret_cast<Address>(buffer_) + request.offset();
|
||||
Memory::Address_at(constant_pool_entry_address(pc, 0 /* unused */)) =
|
||||
object.address();
|
||||
}
|
||||
@ -5152,7 +5152,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
|
||||
return;
|
||||
}
|
||||
DCHECK_GE(buffer_space(), kMaxRelocSize); // too late to grow buffer here
|
||||
RelocInfo rinfo(pc_, rmode, data, nullptr);
|
||||
RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr);
|
||||
reloc_info_writer.Write(&rinfo);
|
||||
}
|
||||
|
||||
@ -5191,7 +5191,7 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode,
|
||||
value != 0) {
|
||||
// Sharing entries here relies on canonicalized handles - without them, we
|
||||
// will miss the optimisation opportunity.
|
||||
Address handle_address = reinterpret_cast<Address>(value);
|
||||
Address handle_address = static_cast<Address>(value);
|
||||
auto existing = handle_to_index_map_.find(handle_address);
|
||||
if (existing != handle_to_index_map_.end()) {
|
||||
int index = existing->second;
|
||||
@ -5476,9 +5476,7 @@ PatchingAssembler::~PatchingAssembler() {
|
||||
DCHECK_EQ(reloc_info_writer.pos(), buffer_ + buffer_size_);
|
||||
}
|
||||
|
||||
void PatchingAssembler::Emit(Address addr) {
|
||||
emit(reinterpret_cast<Instr>(addr));
|
||||
}
|
||||
void PatchingAssembler::Emit(Address addr) { emit(static_cast<Instr>(addr)); }
|
||||
|
||||
UseScratchRegisterScope::UseScratchRegisterScope(Assembler* assembler)
|
||||
: assembler_(assembler),
|
||||
|
@ -1496,8 +1496,8 @@ class Assembler : public AssemblerBase {
|
||||
void instr_at_put(int pos, Instr instr) {
|
||||
*reinterpret_cast<Instr*>(buffer_ + pos) = instr;
|
||||
}
|
||||
static Instr instr_at(byte* pc) { return *reinterpret_cast<Instr*>(pc); }
|
||||
static void instr_at_put(byte* pc, Instr instr) {
|
||||
static Instr instr_at(Address pc) { return *reinterpret_cast<Instr*>(pc); }
|
||||
static void instr_at_put(Address pc, Instr instr) {
|
||||
*reinterpret_cast<Instr*>(pc) = instr;
|
||||
}
|
||||
static Condition GetCondition(Instr instr);
|
||||
|
@ -666,7 +666,7 @@ class Instruction {
|
||||
// reference to an instruction is to convert a pointer. There is no way
|
||||
// to allocate or create instances of class Instruction.
|
||||
// Use the At(pc) function to create references to Instruction.
|
||||
static Instruction* At(byte* pc) {
|
||||
static Instruction* At(Address pc) {
|
||||
return reinterpret_cast<Instruction*>(pc);
|
||||
}
|
||||
|
||||
|
@ -2592,7 +2592,7 @@ int Decoder::ConstantPoolSizeAt(byte* instr_ptr) {
|
||||
|
||||
// Disassemble the instruction at *instr_ptr into the output buffer.
|
||||
int Decoder::InstructionDecode(byte* instr_ptr) {
|
||||
Instruction* instr = Instruction::At(instr_ptr);
|
||||
Instruction* instr = Instruction::At(reinterpret_cast<Address>(instr_ptr));
|
||||
// Print raw instruction bytes.
|
||||
out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
|
||||
"%08x ",
|
||||
|
@ -133,14 +133,14 @@ void TurboAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
|
||||
void TurboAssembler::Jump(Address target, RelocInfo::Mode rmode,
|
||||
Condition cond) {
|
||||
DCHECK(!RelocInfo::IsCodeTarget(rmode));
|
||||
Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
|
||||
Jump(static_cast<intptr_t>(target), rmode, cond);
|
||||
}
|
||||
|
||||
void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
// 'code' is always generated ARM code, never THUMB code
|
||||
Jump(reinterpret_cast<intptr_t>(code.address()), rmode, cond);
|
||||
Jump(static_cast<intptr_t>(code.address()), rmode, cond);
|
||||
}
|
||||
|
||||
int TurboAssembler::CallSize(Register target, Condition cond) {
|
||||
@ -159,7 +159,7 @@ void TurboAssembler::Call(Register target, Condition cond) {
|
||||
int TurboAssembler::CallSize(Address target, RelocInfo::Mode rmode,
|
||||
Condition cond) {
|
||||
Instr mov_instr = cond | MOV | LeaveCC;
|
||||
Operand mov_operand = Operand(reinterpret_cast<intptr_t>(target), rmode);
|
||||
Operand mov_operand = Operand(target, rmode);
|
||||
return kInstrSize +
|
||||
mov_operand.InstructionsRequired(this, mov_instr) * kInstrSize;
|
||||
}
|
||||
@ -203,7 +203,7 @@ void TurboAssembler::Call(Address target, RelocInfo::Mode rmode, Condition cond,
|
||||
// blx ip
|
||||
// @ return address
|
||||
|
||||
mov(ip, Operand(reinterpret_cast<int32_t>(target), rmode));
|
||||
mov(ip, Operand(target, rmode));
|
||||
blx(ip, cond);
|
||||
|
||||
DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
|
||||
@ -1717,7 +1717,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
|
||||
bool builtin_exit_frame) {
|
||||
#if defined(__thumb__)
|
||||
// Thumb mode builtin.
|
||||
DCHECK_EQ(reinterpret_cast<intptr_t>(builtin.address()) & 1, 1);
|
||||
DCHECK_EQ(builtin.address() & 1, 1);
|
||||
#endif
|
||||
mov(r1, Operand(builtin));
|
||||
CEntryStub stub(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
|
||||
@ -1726,8 +1726,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
|
||||
}
|
||||
|
||||
void MacroAssembler::JumpToInstructionStream(Address entry) {
|
||||
mov(kOffHeapTrampolineRegister,
|
||||
Operand(reinterpret_cast<int32_t>(entry), RelocInfo::OFF_HEAP_TARGET));
|
||||
mov(kOffHeapTrampolineRegister, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
|
||||
Jump(kOffHeapTrampolineRegister);
|
||||
}
|
||||
|
||||
|
@ -1666,17 +1666,18 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
|
||||
case ExternalReference::BUILTIN_FP_FP_CALL:
|
||||
case ExternalReference::BUILTIN_COMPARE_CALL:
|
||||
PrintF("Call to host function at %p with args %f, %f",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0,
|
||||
dval1);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0, dval1);
|
||||
break;
|
||||
case ExternalReference::BUILTIN_FP_CALL:
|
||||
PrintF("Call to host function at %p with arg %f",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0);
|
||||
break;
|
||||
case ExternalReference::BUILTIN_FP_INT_CALL:
|
||||
PrintF("Call to host function at %p with args %f, %d",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0,
|
||||
ival);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0, ival);
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -1803,8 +1804,8 @@ void Simulator::SoftwareInterrupt(Instruction* instr) {
|
||||
PrintF(
|
||||
"Call to host function at %p "
|
||||
"args %08x, %08x, %08x, %08x, %08x, %08x, %08x, %08x, %08x",
|
||||
static_cast<void*>(FUNCTION_ADDR(target)), arg0, arg1, arg2, arg3,
|
||||
arg4, arg5, arg6, arg7, arg8);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(target)), arg0, arg1, arg2,
|
||||
arg3, arg4, arg5, arg6, arg7, arg8);
|
||||
if (!stack_aligned) {
|
||||
PrintF(" with unaligned stack %08x\n", get_register(sp));
|
||||
}
|
||||
@ -5731,13 +5732,12 @@ void Simulator::Execute() {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Simulator::CallInternal(byte* entry) {
|
||||
void Simulator::CallInternal(Address entry) {
|
||||
// Adjust JS-based stack limit to C-based stack limit.
|
||||
isolate_->stack_guard()->AdjustStackLimitForSimulator();
|
||||
|
||||
// Prepare to execute the code at entry
|
||||
set_register(pc, reinterpret_cast<int32_t>(entry));
|
||||
set_register(pc, static_cast<int32_t>(entry));
|
||||
// Put down marker for end of simulation. The simulator will stop simulation
|
||||
// when the PC reaches this value. By saving the "end simulation" value into
|
||||
// the LR the simulation stops when returning to this call point.
|
||||
@ -5791,7 +5791,7 @@ void Simulator::CallInternal(byte* entry) {
|
||||
set_register(r11, r11_val);
|
||||
}
|
||||
|
||||
intptr_t Simulator::CallImpl(byte* entry, int argument_count,
|
||||
intptr_t Simulator::CallImpl(Address entry, int argument_count,
|
||||
const intptr_t* arguments) {
|
||||
// Set up arguments
|
||||
|
||||
@ -5823,7 +5823,7 @@ intptr_t Simulator::CallImpl(byte* entry, int argument_count,
|
||||
return get_register(r0);
|
||||
}
|
||||
|
||||
intptr_t Simulator::CallFPImpl(byte* entry, double d0, double d1) {
|
||||
intptr_t Simulator::CallFPImpl(Address entry, double d0, double d1) {
|
||||
if (use_eabi_hardfloat()) {
|
||||
set_d_register_from_double(0, d0);
|
||||
set_d_register_from_double(1, d1);
|
||||
|
@ -148,9 +148,7 @@ class Simulator : public SimulatorBase {
|
||||
void set_pc(int32_t value);
|
||||
int32_t get_pc() const;
|
||||
|
||||
Address get_sp() const {
|
||||
return reinterpret_cast<Address>(static_cast<intptr_t>(get_register(sp)));
|
||||
}
|
||||
Address get_sp() const { return static_cast<Address>(get_register(sp)); }
|
||||
|
||||
// Accessor to the internal simulator stack area.
|
||||
uintptr_t StackLimit(uintptr_t c_limit) const;
|
||||
@ -159,13 +157,13 @@ class Simulator : public SimulatorBase {
|
||||
void Execute();
|
||||
|
||||
template <typename Return, typename... Args>
|
||||
Return Call(byte* entry, Args... args) {
|
||||
Return Call(Address entry, Args... args) {
|
||||
return VariadicCall<Return>(this, &Simulator::CallImpl, entry, args...);
|
||||
}
|
||||
|
||||
// Alternative: call a 2-argument double function.
|
||||
template <typename Return>
|
||||
Return CallFP(byte* entry, double d0, double d1) {
|
||||
Return CallFP(Address entry, double d0, double d1) {
|
||||
return ConvertReturn<Return>(CallFPImpl(entry, d0, d1));
|
||||
}
|
||||
|
||||
@ -212,9 +210,9 @@ class Simulator : public SimulatorBase {
|
||||
end_sim_pc = -2
|
||||
};
|
||||
|
||||
V8_EXPORT_PRIVATE intptr_t CallImpl(byte* entry, int argument_count,
|
||||
V8_EXPORT_PRIVATE intptr_t CallImpl(Address entry, int argument_count,
|
||||
const intptr_t* arguments);
|
||||
intptr_t CallFPImpl(byte* entry, double d0, double d1);
|
||||
intptr_t CallFPImpl(Address entry, double d0, double d1);
|
||||
|
||||
// Unsupported instructions use Format to print an error and stop execution.
|
||||
void Format(Instruction* instr, const char* format);
|
||||
@ -344,7 +342,7 @@ class Simulator : public SimulatorBase {
|
||||
void SetSpecialRegister(SRegisterFieldMask reg_and_mask, uint32_t value);
|
||||
uint32_t GetFromSpecialRegister(SRegister reg);
|
||||
|
||||
void CallInternal(byte* entry);
|
||||
void CallInternal(Address entry);
|
||||
|
||||
// Architecture state.
|
||||
// Saturating instructions require a Q flag to indicate saturation.
|
||||
|
@ -222,7 +222,7 @@ struct ImmediateInitializer<ExternalReference> {
|
||||
return RelocInfo::EXTERNAL_REFERENCE;
|
||||
}
|
||||
static inline int64_t immediate_for(ExternalReference t) {;
|
||||
return reinterpret_cast<int64_t>(t.address());
|
||||
return static_cast<int64_t>(t.address());
|
||||
}
|
||||
};
|
||||
|
||||
@ -671,7 +671,7 @@ Address RelocInfo::target_internal_reference() {
|
||||
|
||||
Address RelocInfo::target_internal_reference_address() {
|
||||
DCHECK(rmode_ == INTERNAL_REFERENCE);
|
||||
return reinterpret_cast<Address>(pc_);
|
||||
return pc_;
|
||||
}
|
||||
|
||||
void RelocInfo::set_wasm_code_table_entry(Address target,
|
||||
@ -705,9 +705,9 @@ void RelocInfo::WipeOut() {
|
||||
IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
|
||||
IsInternalReference(rmode_));
|
||||
if (IsInternalReference(rmode_)) {
|
||||
Memory::Address_at(pc_) = nullptr;
|
||||
Memory::Address_at(pc_) = kNullAddress;
|
||||
} else {
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, nullptr);
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -290,7 +290,7 @@ bool AreConsecutive(const VRegister& reg1, const VRegister& reg2,
|
||||
}
|
||||
|
||||
void Immediate::InitializeHandle(Handle<HeapObject> handle) {
|
||||
value_ = reinterpret_cast<intptr_t>(handle.address());
|
||||
value_ = static_cast<intptr_t>(handle.address());
|
||||
rmode_ = RelocInfo::EMBEDDED_OBJECT;
|
||||
}
|
||||
|
||||
@ -570,8 +570,7 @@ void Assembler::Reset() {
|
||||
memset(buffer_, 0, pc_ - buffer_);
|
||||
#endif
|
||||
pc_ = buffer_;
|
||||
reloc_info_writer.Reposition(reinterpret_cast<byte*>(buffer_ + buffer_size_),
|
||||
reinterpret_cast<byte*>(pc_));
|
||||
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
|
||||
constpool_.Clear();
|
||||
next_constant_pool_check_ = 0;
|
||||
next_veneer_pool_check_ = kMaxInt;
|
||||
@ -591,7 +590,7 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
|
||||
object = request.code_stub()->GetCode();
|
||||
break;
|
||||
}
|
||||
Address pc = buffer_ + request.offset();
|
||||
Address pc = reinterpret_cast<Address>(buffer_) + request.offset();
|
||||
Memory::Address_at(target_pointer_address_at(pc)) = object.address();
|
||||
}
|
||||
}
|
||||
@ -4722,7 +4721,7 @@ void Assembler::GrowBuffer() {
|
||||
DeleteArray(buffer_);
|
||||
buffer_ = desc.buffer;
|
||||
buffer_size_ = desc.buffer_size;
|
||||
pc_ = reinterpret_cast<byte*>(pc_) + pc_delta;
|
||||
pc_ = pc_ + pc_delta;
|
||||
reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
|
||||
reloc_info_writer.last_pc() + pc_delta);
|
||||
|
||||
@ -4745,7 +4744,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
|
||||
DCHECK(!RelocInfo::IsNone(rmode));
|
||||
|
||||
// We do not try to reuse pool constants.
|
||||
RelocInfo rinfo(reinterpret_cast<byte*>(pc_), rmode, data, nullptr);
|
||||
RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr);
|
||||
bool write_reloc_info = true;
|
||||
|
||||
if ((rmode == RelocInfo::COMMENT) ||
|
||||
@ -4858,8 +4857,8 @@ bool Assembler::ShouldEmitVeneer(int max_reachable_pc, int margin) {
|
||||
|
||||
|
||||
void Assembler::RecordVeneerPool(int location_offset, int size) {
|
||||
RelocInfo rinfo(buffer_ + location_offset, RelocInfo::VENEER_POOL,
|
||||
static_cast<intptr_t>(size), nullptr);
|
||||
RelocInfo rinfo(reinterpret_cast<Address>(buffer_) + location_offset,
|
||||
RelocInfo::VENEER_POOL, static_cast<intptr_t>(size), nullptr);
|
||||
reloc_info_writer.Write(&rinfo);
|
||||
}
|
||||
|
||||
@ -4965,8 +4964,7 @@ void Assembler::CheckVeneerPool(bool force_emit, bool require_jump,
|
||||
|
||||
|
||||
int Assembler::buffer_space() const {
|
||||
return static_cast<int>(reloc_info_writer.pos() -
|
||||
reinterpret_cast<byte*>(pc_));
|
||||
return static_cast<int>(reloc_info_writer.pos() - pc_);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1775,8 +1775,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
|
||||
}
|
||||
|
||||
void MacroAssembler::JumpToInstructionStream(Address entry) {
|
||||
Mov(kOffHeapTrampolineRegister,
|
||||
Operand(reinterpret_cast<uint64_t>(entry), RelocInfo::OFF_HEAP_TARGET));
|
||||
Mov(kOffHeapTrampolineRegister, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
|
||||
Br(kOffHeapTrampolineRegister);
|
||||
}
|
||||
|
||||
@ -1869,13 +1868,13 @@ void TurboAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
|
||||
void TurboAssembler::Jump(Address target, RelocInfo::Mode rmode,
|
||||
Condition cond) {
|
||||
DCHECK(!RelocInfo::IsCodeTarget(rmode));
|
||||
Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
|
||||
Jump(static_cast<intptr_t>(target), rmode, cond);
|
||||
}
|
||||
|
||||
void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
Jump(reinterpret_cast<intptr_t>(code.address()), rmode, cond);
|
||||
Jump(static_cast<intptr_t>(code.address()), rmode, cond);
|
||||
}
|
||||
|
||||
void TurboAssembler::Call(Register target) {
|
||||
@ -1920,14 +1919,14 @@ void TurboAssembler::Call(Address target, RelocInfo::Mode rmode) {
|
||||
|
||||
if (RelocInfo::IsNone(rmode)) {
|
||||
// Addresses are 48 bits so we never need to load the upper 16 bits.
|
||||
uint64_t imm = reinterpret_cast<uint64_t>(target);
|
||||
uint64_t imm = static_cast<uint64_t>(target);
|
||||
// If we don't use ARM tagged addresses, the 16 higher bits must be 0.
|
||||
DCHECK_EQ((imm >> 48) & 0xFFFF, 0);
|
||||
movz(temp, (imm >> 0) & 0xFFFF, 0);
|
||||
movk(temp, (imm >> 16) & 0xFFFF, 16);
|
||||
movk(temp, (imm >> 32) & 0xFFFF, 32);
|
||||
} else {
|
||||
Ldr(temp, Immediate(reinterpret_cast<intptr_t>(target), rmode));
|
||||
Ldr(temp, Immediate(static_cast<intptr_t>(target), rmode));
|
||||
}
|
||||
Blr(temp);
|
||||
#ifdef DEBUG
|
||||
@ -1973,7 +1972,7 @@ void TurboAssembler::CallForDeoptimization(Address target,
|
||||
// Deoptimisation table entries require the call address to be in x16, in
|
||||
// order to compute the entry id.
|
||||
DCHECK(temp.Is(x16));
|
||||
Ldr(temp, Immediate(reinterpret_cast<intptr_t>(target), rmode));
|
||||
Ldr(temp, Immediate(static_cast<intptr_t>(target), rmode));
|
||||
Blr(temp);
|
||||
|
||||
#ifdef DEBUG
|
||||
|
@ -117,7 +117,7 @@ Simulator* Simulator::current(Isolate* isolate) {
|
||||
return sim;
|
||||
}
|
||||
|
||||
void Simulator::CallImpl(byte* entry, CallArgument* args) {
|
||||
void Simulator::CallImpl(Address entry, CallArgument* args) {
|
||||
int index_x = 0;
|
||||
int index_d = 0;
|
||||
|
||||
|
@ -719,7 +719,7 @@ class Simulator : public DecoderVisitor, public SimulatorBase {
|
||||
|
||||
// Call an arbitrary function taking an arbitrary number of arguments.
|
||||
template <typename Return, typename... Args>
|
||||
Return Call(byte* entry, Args... args) {
|
||||
Return Call(Address entry, Args... args) {
|
||||
// Convert all arguments to CallArgument.
|
||||
CallArgument call_args[] = {CallArgument(args)..., CallArgument::End()};
|
||||
CallImpl(entry, call_args);
|
||||
@ -2279,7 +2279,7 @@ class Simulator : public DecoderVisitor, public SimulatorBase {
|
||||
private:
|
||||
void Init(FILE* stream);
|
||||
|
||||
V8_EXPORT_PRIVATE void CallImpl(byte* entry, CallArgument* args);
|
||||
V8_EXPORT_PRIVATE void CallImpl(Address entry, CallArgument* args);
|
||||
|
||||
// Read floating point return values.
|
||||
template <typename T>
|
||||
|
@ -313,9 +313,10 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) {
|
||||
byte* begin_pos = pos_;
|
||||
#endif
|
||||
DCHECK(rinfo->rmode() < RelocInfo::NUMBER_OF_MODES);
|
||||
DCHECK_GE(rinfo->pc() - last_pc_, 0);
|
||||
DCHECK_GE(rinfo->pc() - reinterpret_cast<Address>(last_pc_), 0);
|
||||
// Use unsigned delta-encoding for pc.
|
||||
uint32_t pc_delta = static_cast<uint32_t>(rinfo->pc() - last_pc_);
|
||||
uint32_t pc_delta =
|
||||
static_cast<uint32_t>(rinfo->pc() - reinterpret_cast<Address>(last_pc_));
|
||||
|
||||
// The two most common modes are given small tags, and usually fit in a byte.
|
||||
if (rmode == RelocInfo::EMBEDDED_OBJECT) {
|
||||
@ -337,7 +338,7 @@ void RelocInfoWriter::Write(const RelocInfo* rinfo) {
|
||||
WriteIntData(static_cast<int>(rinfo->data()));
|
||||
}
|
||||
}
|
||||
last_pc_ = rinfo->pc();
|
||||
last_pc_ = reinterpret_cast<byte*>(rinfo->pc());
|
||||
#ifdef DEBUG
|
||||
DCHECK_LE(begin_pos - pos_, kMaxSize);
|
||||
#endif
|
||||
@ -464,7 +465,7 @@ RelocIterator::RelocIterator(Code* code, int mode_mask)
|
||||
|
||||
RelocIterator::RelocIterator(const CodeDesc& desc, int mode_mask)
|
||||
: mode_mask_(mode_mask) {
|
||||
rinfo_.pc_ = desc.buffer;
|
||||
rinfo_.pc_ = reinterpret_cast<Address>(desc.buffer);
|
||||
// Relocation info is read backwards.
|
||||
pos_ = desc.buffer + desc.buffer_size;
|
||||
end_ = pos_ - desc.reloc_size;
|
||||
@ -476,7 +477,7 @@ RelocIterator::RelocIterator(Vector<byte> instructions,
|
||||
Vector<const byte> reloc_info, Address const_pool,
|
||||
int mode_mask)
|
||||
: mode_mask_(mode_mask) {
|
||||
rinfo_.pc_ = instructions.start();
|
||||
rinfo_.pc_ = reinterpret_cast<Address>(instructions.start());
|
||||
rinfo_.constant_pool_ = const_pool;
|
||||
rinfo_.flags_ = RelocInfo::kInNativeWasmCode;
|
||||
// Relocation info is read backwards.
|
||||
@ -551,7 +552,7 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
|
||||
}
|
||||
|
||||
void RelocInfo::Print(Isolate* isolate, std::ostream& os) { // NOLINT
|
||||
os << static_cast<const void*>(pc_) << " " << RelocModeName(rmode_);
|
||||
os << reinterpret_cast<const void*>(pc_) << " " << RelocModeName(rmode_);
|
||||
if (IsComment(rmode_)) {
|
||||
os << " (" << reinterpret_cast<char*>(data_) << ")";
|
||||
} else if (rmode_ == DEOPT_SCRIPT_OFFSET || rmode_ == DEOPT_INLINING_ID) {
|
||||
@ -565,7 +566,7 @@ void RelocInfo::Print(Isolate* isolate, std::ostream& os) { // NOLINT
|
||||
ExternalReferenceEncoder ref_encoder(isolate);
|
||||
os << " ("
|
||||
<< ref_encoder.NameOfAddress(isolate, target_external_reference())
|
||||
<< ") (" << static_cast<const void*>(target_external_reference())
|
||||
<< ") (" << reinterpret_cast<const void*>(target_external_reference())
|
||||
<< ")";
|
||||
} else if (IsCodeTarget(rmode_)) {
|
||||
const Address code_target = target_address();
|
||||
@ -582,7 +583,7 @@ void RelocInfo::Print(Isolate* isolate, std::ostream& os) { // NOLINT
|
||||
}
|
||||
os << ") ";
|
||||
}
|
||||
os << " (" << static_cast<const void*>(target_address()) << ")";
|
||||
os << " (" << reinterpret_cast<const void*>(target_address()) << ")";
|
||||
} else if (IsRuntimeEntry(rmode_) && isolate->deoptimizer_data() != nullptr) {
|
||||
// Depotimization bailouts are stored as runtime entries.
|
||||
int id = Deoptimizer::GetDeoptimizationId(
|
||||
@ -607,7 +608,7 @@ void RelocInfo::Verify(Isolate* isolate) {
|
||||
case CODE_TARGET: {
|
||||
// convert inline target address to code object
|
||||
Address addr = target_address();
|
||||
CHECK_NOT_NULL(addr);
|
||||
CHECK_NE(addr, kNullAddress);
|
||||
// Check that we can find the right code object.
|
||||
Code* code = Code::GetCodeFromTargetAddress(addr);
|
||||
Object* found = isolate->FindCodeObject(addr);
|
||||
@ -626,7 +627,7 @@ void RelocInfo::Verify(Isolate* isolate) {
|
||||
}
|
||||
case OFF_HEAP_TARGET: {
|
||||
Address addr = target_off_heap_target();
|
||||
CHECK_NOT_NULL(addr);
|
||||
CHECK_NE(addr, kNullAddress);
|
||||
CHECK_NOT_NULL(InstructionStream::TryLookupCode(isolate, addr));
|
||||
break;
|
||||
}
|
||||
|
@ -162,6 +162,9 @@ class AssemblerBase: public Malloced {
|
||||
static const int kMinimalBufferSize = 4*KB;
|
||||
|
||||
static void FlushICache(void* start, size_t size);
|
||||
static void FlushICache(Address start, size_t size) {
|
||||
return FlushICache(reinterpret_cast<void*>(start), size);
|
||||
}
|
||||
|
||||
protected:
|
||||
// The buffer into which code and relocation info are generated. It could
|
||||
@ -180,6 +183,7 @@ class AssemblerBase: public Malloced {
|
||||
}
|
||||
|
||||
// The program counter, which points into the buffer above and moves forward.
|
||||
// TODO(jkummerow): This should probably have type {Address}.
|
||||
byte* pc_;
|
||||
|
||||
private:
|
||||
@ -414,7 +418,7 @@ class RelocInfo {
|
||||
|
||||
RelocInfo() = default;
|
||||
|
||||
RelocInfo(byte* pc, Mode rmode, intptr_t data, Code* host)
|
||||
RelocInfo(Address pc, Mode rmode, intptr_t data, Code* host)
|
||||
: pc_(pc), rmode_(rmode), data_(data), host_(host) {}
|
||||
|
||||
static inline bool IsRealRelocMode(Mode mode) {
|
||||
@ -476,8 +480,7 @@ class RelocInfo {
|
||||
static constexpr int ModeMask(Mode mode) { return 1 << mode; }
|
||||
|
||||
// Accessors
|
||||
byte* pc() const { return pc_; }
|
||||
void set_pc(byte* pc) { pc_ = pc; }
|
||||
Address pc() const { return pc_; }
|
||||
Mode rmode() const { return rmode_; }
|
||||
intptr_t data() const { return data_; }
|
||||
Code* host() const { return host_; }
|
||||
@ -617,11 +620,11 @@ class RelocInfo {
|
||||
// to be relocated and not the address of the instruction
|
||||
// referencing the constant pool entry (except when rmode_ ==
|
||||
// comment).
|
||||
byte* pc_;
|
||||
Address pc_;
|
||||
Mode rmode_;
|
||||
intptr_t data_ = 0;
|
||||
Code* host_;
|
||||
Address constant_pool_ = nullptr;
|
||||
Address constant_pool_ = kNullAddress;
|
||||
Flags flags_;
|
||||
friend class RelocIterator;
|
||||
};
|
||||
|
@ -40,11 +40,11 @@ struct BuiltinMetadata {
|
||||
{ FUNCTION_ADDR(Builtin_##Name) }},
|
||||
#ifdef V8_TARGET_BIG_ENDIAN
|
||||
#define DECL_TFJ(Name, Count, ...) { #Name, Builtins::TFJ, \
|
||||
{ reinterpret_cast<Address>(static_cast<uintptr_t>( \
|
||||
{ static_cast<Address>(static_cast<uintptr_t>( \
|
||||
Count) << (kBitsPerByte * (kPointerSize - 1))) }},
|
||||
#else
|
||||
#define DECL_TFJ(Name, Count, ...) { #Name, Builtins::TFJ, \
|
||||
{ reinterpret_cast<Address>(Count) }},
|
||||
{ static_cast<Address>(Count) }},
|
||||
#endif
|
||||
#define DECL_TFC(Name, ...) { #Name, Builtins::TFC, {} },
|
||||
#define DECL_TFS(Name, ...) { #Name, Builtins::TFS, {} },
|
||||
@ -91,7 +91,7 @@ void Builtins::IterateBuiltins(RootVisitor* v) {
|
||||
}
|
||||
}
|
||||
|
||||
const char* Builtins::Lookup(byte* pc) {
|
||||
const char* Builtins::Lookup(Address pc) {
|
||||
// may be called during initialization (disassembler!)
|
||||
if (initialized_) {
|
||||
for (int i = 0; i < builtin_count; i++) {
|
||||
|
@ -39,7 +39,7 @@ class Builtins {
|
||||
void IterateBuiltins(RootVisitor* v);
|
||||
|
||||
// Disassembler support.
|
||||
const char* Lookup(byte* pc);
|
||||
const char* Lookup(Address pc);
|
||||
|
||||
enum Name : int32_t {
|
||||
#define DEF_ENUM(Name, ...) k##Name,
|
||||
|
@ -28,8 +28,7 @@ static const bool FLAG_enable_slow_asserts = false;
|
||||
} // namespace v8
|
||||
|
||||
#define DCHECK_TAG_ALIGNED(address) \
|
||||
DCHECK((reinterpret_cast<intptr_t>(address) & \
|
||||
::v8::internal::kHeapObjectTagMask) == 0)
|
||||
DCHECK((address & ::v8::internal::kHeapObjectTagMask) == 0)
|
||||
|
||||
#define DCHECK_SIZE_TAG_ALIGNED(size) \
|
||||
DCHECK((size & ::v8::internal::kHeapObjectTagMask) == 0)
|
||||
|
@ -33,7 +33,7 @@ CodeStubDescriptor::CodeStubDescriptor(CodeStub* stub)
|
||||
stack_parameter_count_(no_reg),
|
||||
hint_stack_parameter_count_(-1),
|
||||
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
|
||||
deoptimization_handler_(nullptr),
|
||||
deoptimization_handler_(kNullAddress),
|
||||
miss_handler_(),
|
||||
has_miss_handler_(false) {
|
||||
stub->InitializeDescriptor(this);
|
||||
@ -44,7 +44,7 @@ CodeStubDescriptor::CodeStubDescriptor(Isolate* isolate, uint32_t stub_key)
|
||||
stack_parameter_count_(no_reg),
|
||||
hint_stack_parameter_count_(-1),
|
||||
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
|
||||
deoptimization_handler_(nullptr),
|
||||
deoptimization_handler_(kNullAddress),
|
||||
miss_handler_(),
|
||||
has_miss_handler_(false) {
|
||||
CodeStub::InitializeDescriptor(isolate, stub_key, this);
|
||||
|
@ -302,11 +302,11 @@ class CodeStubDescriptor {
|
||||
|
||||
CodeStubDescriptor(Isolate* isolate, uint32_t stub_key);
|
||||
|
||||
void Initialize(Address deoptimization_handler = nullptr,
|
||||
void Initialize(Address deoptimization_handler = kNullAddress,
|
||||
int hint_stack_parameter_count = -1,
|
||||
StubFunctionMode function_mode = NOT_JS_FUNCTION_STUB_MODE);
|
||||
void Initialize(Register stack_parameter_count,
|
||||
Address deoptimization_handler = nullptr,
|
||||
Address deoptimization_handler = kNullAddress,
|
||||
int hint_stack_parameter_count = -1,
|
||||
StubFunctionMode function_mode = NOT_JS_FUNCTION_STUB_MODE);
|
||||
|
||||
|
@ -698,8 +698,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
__ Call(wasm_code, rmode);
|
||||
} else {
|
||||
__ Call(i.InputRegister(0));
|
||||
@ -747,8 +747,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
__ Jump(wasm_code, rmode);
|
||||
} else {
|
||||
__ Jump(i.InputRegister(0));
|
||||
|
@ -623,8 +623,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
if (info()->IsWasm()) scope.Open(tasm());
|
||||
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
if (info()->IsWasm()) {
|
||||
__ Call(wasm_code, RelocInfo::WASM_CALL);
|
||||
} else {
|
||||
@ -669,8 +669,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
if (info()->IsWasm()) scope.Open(tasm());
|
||||
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
if (info()->IsWasm()) {
|
||||
__ Jump(wasm_code, RelocInfo::WASM_CALL);
|
||||
} else {
|
||||
|
@ -130,7 +130,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
|
||||
GetDeoptimizationReason(deoptimization_id);
|
||||
Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
|
||||
tasm()->isolate(), deoptimization_id, bailout_type);
|
||||
if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
|
||||
if (deopt_entry == kNullAddress) return kTooManyDeoptimizationBailouts;
|
||||
if (info()->is_source_positions_enabled()) {
|
||||
tasm()->RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ std::unique_ptr<char[]> GetVisualizerLogFileName(OptimizedCompilationInfo* info,
|
||||
SNPrintF(filename, "turbo-%s-%i", debug_name.get(), optimization_id);
|
||||
} else if (info->has_shared_info()) {
|
||||
SNPrintF(filename, "turbo-%p-%i",
|
||||
static_cast<void*>(info->shared_info()->address()),
|
||||
reinterpret_cast<void*>(info->shared_info()->address()),
|
||||
optimization_id);
|
||||
} else {
|
||||
SNPrintF(filename, "turbo-none-%i", optimization_id);
|
||||
|
@ -64,7 +64,7 @@ class IA32OperandConverter : public InstructionOperandConverter {
|
||||
Constant constant = ToConstant(operand);
|
||||
if (constant.type() == Constant::kInt32 &&
|
||||
RelocInfo::IsWasmReference(constant.rmode())) {
|
||||
return Immediate(reinterpret_cast<Address>(constant.ToInt32()),
|
||||
return Immediate(static_cast<Address>(constant.ToInt32()),
|
||||
constant.rmode());
|
||||
}
|
||||
switch (constant.type()) {
|
||||
@ -600,8 +600,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
case kArchCallWasmFunction: {
|
||||
MoveOperandIfAliasedWithPoisonRegister(instr, this);
|
||||
if (HasImmediateInput(instr, 0)) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
if (info()->IsWasm()) {
|
||||
__ wasm_call(wasm_code, RelocInfo::WASM_CALL);
|
||||
} else {
|
||||
@ -649,8 +649,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
case kArchTailCallWasm: {
|
||||
MoveOperandIfAliasedWithPoisonRegister(instr, this);
|
||||
if (HasImmediateInput(instr, 0)) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
if (info()->IsWasm()) {
|
||||
__ jmp(wasm_code, RelocInfo::WASM_CALL);
|
||||
} else {
|
||||
|
@ -603,8 +603,7 @@ std::ostream& operator<<(std::ostream& os, const Constant& constant) {
|
||||
case Constant::kFloat64:
|
||||
return os << constant.ToFloat64().value();
|
||||
case Constant::kExternalReference:
|
||||
return os << static_cast<const void*>(
|
||||
constant.ToExternalReference().address());
|
||||
return os << constant.ToExternalReference().address();
|
||||
case Constant::kHeapObject:
|
||||
return os << Brief(*constant.ToHeapObject());
|
||||
case Constant::kRpoNumber:
|
||||
|
@ -609,8 +609,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
case kArchCallWasmFunction: {
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
__ Call(wasm_code, info()->IsWasm() ? RelocInfo::WASM_CALL
|
||||
: RelocInfo::JS_TO_WASM_CALL);
|
||||
} else {
|
||||
@ -638,8 +638,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
case kArchTailCallWasm: {
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
__ Jump(wasm_code, info()->IsWasm() ? RelocInfo::WASM_CALL
|
||||
: RelocInfo::JS_TO_WASM_CALL);
|
||||
} else {
|
||||
|
@ -629,8 +629,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
i.TempRegister(2));
|
||||
}
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
__ Call(wasm_code, info()->IsWasm() ? RelocInfo::WASM_CALL
|
||||
: RelocInfo::JS_TO_WASM_CALL);
|
||||
} else {
|
||||
@ -660,8 +660,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
case kArchTailCallWasm: {
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
__ Jump(wasm_code, info()->IsWasm() ? RelocInfo::WASM_CALL
|
||||
: RelocInfo::JS_TO_WASM_CALL);
|
||||
} else {
|
||||
|
@ -880,11 +880,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
#ifdef V8_TARGET_ARCH_PPC64
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
#else
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
#endif
|
||||
__ Call(wasm_code, rmode);
|
||||
} else {
|
||||
@ -926,8 +926,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
__ Jump(wasm_code, rmode);
|
||||
} else {
|
||||
__ Jump(i.InputRegister(0));
|
||||
|
@ -1128,11 +1128,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
#ifdef V8_TARGET_ARCH_S390X
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
#else
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
#endif
|
||||
__ Call(wasm_code, rmode);
|
||||
} else {
|
||||
@ -1172,8 +1172,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt32());
|
||||
__ Jump(wasm_code, rmode);
|
||||
} else {
|
||||
__ Jump(i.InputRegister(0));
|
||||
|
@ -2619,9 +2619,8 @@ Node* WasmGraphBuilder::CallDirect(uint32_t index, Node** args, Node*** rets,
|
||||
} else {
|
||||
// A call to a function in this module.
|
||||
// Just encode the function index. This will be patched at instantiation.
|
||||
Address code = reinterpret_cast<Address>(index);
|
||||
args[0] = jsgraph()->RelocatableIntPtrConstant(
|
||||
reinterpret_cast<intptr_t>(code), RelocInfo::WASM_CALL);
|
||||
Address code = static_cast<Address>(index);
|
||||
args[0] = jsgraph()->RelocatableIntPtrConstant(code, RelocInfo::WASM_CALL);
|
||||
|
||||
return BuildWasmCall(sig, args, rets, position);
|
||||
}
|
||||
@ -3071,9 +3070,9 @@ void WasmGraphBuilder::BuildJSToWasmWrapper(Handle<WeakCell> weak_instance,
|
||||
*effect_, *control_);
|
||||
|
||||
Address instr_start =
|
||||
wasm_code == nullptr ? nullptr : wasm_code->instructions().start();
|
||||
wasm_code == nullptr ? kNullAddress : wasm_code->instruction_start();
|
||||
Node* wasm_code_node = jsgraph()->RelocatableIntPtrConstant(
|
||||
reinterpret_cast<intptr_t>(instr_start), RelocInfo::JS_TO_WASM_CALL);
|
||||
instr_start, RelocInfo::JS_TO_WASM_CALL);
|
||||
if (!wasm::IsJSCompatibleSignature(sig_)) {
|
||||
// Throw a TypeError. Use the js_context of the calling javascript function
|
||||
// (passed as a parameter), such that the generated code is js_context
|
||||
|
@ -704,8 +704,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
case kArchCallWasmFunction: {
|
||||
if (HasImmediateInput(instr, 0)) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
if (info()->IsWasm()) {
|
||||
__ near_call(wasm_code, RelocInfo::WASM_CALL);
|
||||
} else {
|
||||
@ -753,8 +753,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
case kArchTailCallWasm: {
|
||||
if (HasImmediateInput(instr, 0)) {
|
||||
Address wasm_code = reinterpret_cast<Address>(
|
||||
i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
Address wasm_code =
|
||||
static_cast<Address>(i.ToConstant(instr->InputAt(0)).ToInt64());
|
||||
if (info()->IsWasm()) {
|
||||
__ near_jmp(wasm_code, RelocInfo::WASM_CALL);
|
||||
} else {
|
||||
|
@ -43,9 +43,10 @@ inline unsigned int FastD2UI(double x) {
|
||||
x += k2Pow52;
|
||||
uint32_t result;
|
||||
#ifndef V8_TARGET_BIG_ENDIAN
|
||||
Address mantissa_ptr = reinterpret_cast<Address>(&x);
|
||||
void* mantissa_ptr = reinterpret_cast<void*>(&x);
|
||||
#else
|
||||
Address mantissa_ptr = reinterpret_cast<Address>(&x) + kInt32Size;
|
||||
void* mantissa_ptr =
|
||||
reinterpret_cast<void*>(reinterpret_cast<Address>(&x) + kInt32Size);
|
||||
#endif
|
||||
// Copy least significant 32 bits of mantissa.
|
||||
memcpy(&result, mantissa_ptr, sizeof(result));
|
||||
|
@ -2699,7 +2699,9 @@ void Worker::ExecuteInThread() {
|
||||
if (Shell::DeserializeValue(isolate, std::move(data))
|
||||
.ToLocal(&value)) {
|
||||
Local<Value> argv[] = {value};
|
||||
(void)onmessage_fun->Call(context, global, 1, argv);
|
||||
MaybeLocal<Value> result =
|
||||
onmessage_fun->Call(context, global, 1, argv);
|
||||
USE(result);
|
||||
}
|
||||
if (try_catch.HasCaught()) {
|
||||
Shell::ReportException(isolate, &try_catch);
|
||||
|
@ -339,7 +339,7 @@ void Debug::ThreadInit() {
|
||||
thread_local_.async_task_count_ = 0;
|
||||
thread_local_.last_breakpoint_id_ = 0;
|
||||
clear_suspended_generator();
|
||||
thread_local_.restart_fp_ = nullptr;
|
||||
thread_local_.restart_fp_ = kNullAddress;
|
||||
base::Relaxed_Store(&thread_local_.current_debug_scope_,
|
||||
static_cast<base::AtomicWord>(0));
|
||||
UpdateHookOnFunctionCall();
|
||||
|
@ -407,7 +407,7 @@ Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction* function,
|
||||
deoptimizing_throw_ = true;
|
||||
}
|
||||
|
||||
DCHECK_NOT_NULL(from);
|
||||
DCHECK_NE(from, kNullAddress);
|
||||
compiled_code_ = FindOptimizedCode();
|
||||
DCHECK_NOT_NULL(compiled_code_);
|
||||
|
||||
@ -495,7 +495,7 @@ void Deoptimizer::DeleteFrameDescriptions() {
|
||||
Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate, int id,
|
||||
BailoutType type) {
|
||||
CHECK_GE(id, 0);
|
||||
if (id >= kMaxNumberOfEntries) return nullptr;
|
||||
if (id >= kMaxNumberOfEntries) return kNullAddress;
|
||||
DeoptimizerData* data = isolate->deoptimizer_data();
|
||||
CHECK_LE(type, kLastBailoutType);
|
||||
CHECK_NOT_NULL(data->deopt_entry_code_[type]);
|
||||
@ -870,8 +870,7 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
|
||||
"context ");
|
||||
if (context == isolate_->heap()->arguments_marker()) {
|
||||
Address output_address =
|
||||
reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
output_offset;
|
||||
static_cast<Address>(output_[frame_index]->GetTop()) + output_offset;
|
||||
values_to_materialize_.push_back({output_address, context_pos});
|
||||
}
|
||||
value_iterator++;
|
||||
@ -883,8 +882,7 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
|
||||
WriteValueToOutput(function, 0, frame_index, output_offset, "function ");
|
||||
if (function == isolate_->heap()->arguments_marker()) {
|
||||
Address output_address =
|
||||
reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
output_offset;
|
||||
static_cast<Address>(output_[frame_index]->GetTop()) + output_offset;
|
||||
values_to_materialize_.push_back({output_address, function_iterator});
|
||||
}
|
||||
|
||||
@ -972,12 +970,12 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
|
||||
? builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance)
|
||||
: builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
|
||||
output_frame->SetPc(
|
||||
reinterpret_cast<intptr_t>(dispatch_builtin->InstructionStart()));
|
||||
static_cast<intptr_t>(dispatch_builtin->InstructionStart()));
|
||||
|
||||
// Update constant pool.
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
intptr_t constant_pool_value =
|
||||
reinterpret_cast<intptr_t>(dispatch_builtin->constant_pool());
|
||||
static_cast<intptr_t>(dispatch_builtin->constant_pool());
|
||||
output_frame->SetConstantPool(constant_pool_value);
|
||||
if (is_topmost) {
|
||||
Register constant_pool_reg =
|
||||
@ -996,7 +994,7 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
|
||||
// Set the continuation for the topmost frame.
|
||||
Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
|
||||
output_frame->SetContinuation(
|
||||
reinterpret_cast<intptr_t>(continuation->InstructionStart()));
|
||||
static_cast<intptr_t>(continuation->InstructionStart()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1106,8 +1104,7 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(
|
||||
WriteValueToOutput(function, 0, frame_index, output_offset, "function ");
|
||||
if (function == isolate_->heap()->arguments_marker()) {
|
||||
Address output_address =
|
||||
reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
output_offset;
|
||||
static_cast<Address>(output_[frame_index]->GetTop()) + output_offset;
|
||||
values_to_materialize_.push_back({output_address, function_iterator});
|
||||
}
|
||||
|
||||
@ -1129,13 +1126,13 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(
|
||||
Builtins* builtins = isolate_->builtins();
|
||||
Code* adaptor_trampoline =
|
||||
builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
|
||||
intptr_t pc_value = reinterpret_cast<intptr_t>(
|
||||
intptr_t pc_value = static_cast<intptr_t>(
|
||||
adaptor_trampoline->InstructionStart() +
|
||||
isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
|
||||
output_frame->SetPc(pc_value);
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
intptr_t constant_pool_value =
|
||||
reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
|
||||
static_cast<intptr_t>(adaptor_trampoline->constant_pool());
|
||||
output_frame->SetConstantPool(constant_pool_value);
|
||||
}
|
||||
}
|
||||
@ -1217,7 +1214,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
|
||||
// a captured object, override the slot address for a captured object.
|
||||
WriteTranslatedValueToOutput(
|
||||
&value_iterator, &input_index, frame_index, output_offset, nullptr,
|
||||
(i == 0) ? reinterpret_cast<Address>(top_address) : nullptr);
|
||||
(i == 0) ? static_cast<Address>(top_address) : kNullAddress);
|
||||
}
|
||||
|
||||
DCHECK_EQ(output_offset, output_frame->GetLastArgumentSlotOffset());
|
||||
@ -1324,13 +1321,13 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
|
||||
bailout_id == BailoutId::ConstructStubCreate()
|
||||
? isolate_->heap()->construct_stub_create_deopt_pc_offset()->value()
|
||||
: isolate_->heap()->construct_stub_invoke_deopt_pc_offset()->value();
|
||||
intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
|
||||
intptr_t pc_value = static_cast<intptr_t>(start + pc_offset);
|
||||
output_frame->SetPc(pc_value);
|
||||
|
||||
// Update constant pool.
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
intptr_t constant_pool_value =
|
||||
reinterpret_cast<intptr_t>(construct_stub->constant_pool());
|
||||
static_cast<intptr_t>(construct_stub->constant_pool());
|
||||
output_frame->SetConstantPool(constant_pool_value);
|
||||
if (is_topmost) {
|
||||
Register constant_pool_reg =
|
||||
@ -1354,7 +1351,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
|
||||
DCHECK_EQ(LAZY, bailout_type_);
|
||||
Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
|
||||
output_frame->SetContinuation(
|
||||
reinterpret_cast<intptr_t>(continuation->InstructionStart()));
|
||||
static_cast<intptr_t>(continuation->InstructionStart()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1731,7 +1728,7 @@ void Deoptimizer::DoComputeBuiltinContinuation(
|
||||
"builtin JavaScript context\n");
|
||||
if (context == isolate_->heap()->arguments_marker()) {
|
||||
Address output_address =
|
||||
reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
static_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
output_frame_offset;
|
||||
values_to_materialize_.push_back(
|
||||
{output_address, context_register_value.iterator_});
|
||||
@ -1767,7 +1764,7 @@ void Deoptimizer::DoComputeBuiltinContinuation(
|
||||
}
|
||||
if (object == isolate_->heap()->arguments_marker()) {
|
||||
Address output_address =
|
||||
reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
static_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
output_frame_offset;
|
||||
values_to_materialize_.push_back(
|
||||
{output_address, register_values[code].iterator_});
|
||||
@ -1820,16 +1817,16 @@ void Deoptimizer::DoComputeBuiltinContinuation(
|
||||
Code* continue_to_builtin = isolate()->builtins()->builtin(
|
||||
TrampolineForBuiltinContinuation(mode, must_handle_result));
|
||||
output_frame->SetPc(
|
||||
reinterpret_cast<intptr_t>(continue_to_builtin->InstructionStart()));
|
||||
static_cast<intptr_t>(continue_to_builtin->InstructionStart()));
|
||||
|
||||
Code* continuation =
|
||||
isolate()->builtins()->builtin(Builtins::kNotifyDeoptimized);
|
||||
output_frame->SetContinuation(
|
||||
reinterpret_cast<intptr_t>(continuation->InstructionStart()));
|
||||
static_cast<intptr_t>(continuation->InstructionStart()));
|
||||
}
|
||||
|
||||
void Deoptimizer::MaterializeHeapObjects() {
|
||||
translated_state_.Prepare(reinterpret_cast<Address>(stack_fp_));
|
||||
translated_state_.Prepare(static_cast<Address>(stack_fp_));
|
||||
if (FLAG_deopt_every_n_times > 0) {
|
||||
// Doing a GC here will find problems with the deoptimized frames.
|
||||
isolate_->heap()->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
|
||||
@ -1841,7 +1838,7 @@ void Deoptimizer::MaterializeHeapObjects() {
|
||||
|
||||
if (trace_scope_ != nullptr) {
|
||||
PrintF("Materialization [0x%08" V8PRIxPTR "] <- 0x%08" V8PRIxPTR " ; ",
|
||||
reinterpret_cast<intptr_t>(materialization.output_slot_address_),
|
||||
static_cast<intptr_t>(materialization.output_slot_address_),
|
||||
reinterpret_cast<intptr_t>(*value));
|
||||
value->ShortPrint(trace_scope_->file());
|
||||
PrintF(trace_scope_->file(), "\n");
|
||||
@ -1861,7 +1858,7 @@ void Deoptimizer::MaterializeHeapObjects() {
|
||||
}
|
||||
|
||||
isolate_->materialized_object_store()->Remove(
|
||||
reinterpret_cast<Address>(stack_fp_));
|
||||
static_cast<Address>(stack_fp_));
|
||||
}
|
||||
|
||||
|
||||
@ -1876,9 +1873,8 @@ void Deoptimizer::WriteTranslatedValueToOutput(
|
||||
|
||||
if (value == isolate_->heap()->arguments_marker()) {
|
||||
Address output_address =
|
||||
reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
output_offset;
|
||||
if (output_address_for_materialization == nullptr) {
|
||||
static_cast<Address>(output_[frame_index]->GetTop()) + output_offset;
|
||||
if (output_address_for_materialization == kNullAddress) {
|
||||
output_address_for_materialization = output_address;
|
||||
}
|
||||
values_to_materialize_.push_back(
|
||||
@ -1910,11 +1906,10 @@ void Deoptimizer::DebugPrintOutputSlot(intptr_t value, int frame_index,
|
||||
const char* debug_hint_string) {
|
||||
if (trace_scope_ != nullptr) {
|
||||
Address output_address =
|
||||
reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
|
||||
output_offset;
|
||||
static_cast<Address>(output_[frame_index]->GetTop()) + output_offset;
|
||||
PrintF(trace_scope_->file(),
|
||||
" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ; %s",
|
||||
reinterpret_cast<intptr_t>(output_address), output_offset, value,
|
||||
output_address, output_offset, value,
|
||||
debug_hint_string == nullptr ? "" : debug_hint_string);
|
||||
}
|
||||
}
|
||||
|
@ -362,7 +362,7 @@ class TranslatedState {
|
||||
|
||||
std::vector<TranslatedFrame> frames_;
|
||||
Isolate* isolate_ = nullptr;
|
||||
Address stack_frame_pointer_ = nullptr;
|
||||
Address stack_frame_pointer_ = kNullAddress;
|
||||
int formal_parameter_count_;
|
||||
|
||||
struct ObjectPosition {
|
||||
@ -398,7 +398,7 @@ class Deoptimizer : public Malloced {
|
||||
static const int kNoDeoptId = -1;
|
||||
};
|
||||
|
||||
static DeoptInfo GetDeoptInfo(Code* code, byte* from);
|
||||
static DeoptInfo GetDeoptInfo(Code* code, Address from);
|
||||
|
||||
static int ComputeSourcePositionFromBytecodeArray(SharedFunctionInfo* shared,
|
||||
BailoutId node_id);
|
||||
@ -557,7 +557,7 @@ class Deoptimizer : public Malloced {
|
||||
void WriteTranslatedValueToOutput(
|
||||
TranslatedFrame::iterator* iterator, int* input_index, int frame_index,
|
||||
unsigned output_offset, const char* debug_hint_string = nullptr,
|
||||
Address output_address_for_materialization = nullptr);
|
||||
Address output_address_for_materialization = kNullAddress);
|
||||
void WriteValueToOutput(Object* value, int input_index, int frame_index,
|
||||
unsigned output_offset,
|
||||
const char* debug_hint_string);
|
||||
|
@ -41,14 +41,16 @@ class V8NameConverter: public disasm::NameConverter {
|
||||
const char* V8NameConverter::NameOfAddress(byte* pc) const {
|
||||
if (code_ != nullptr) {
|
||||
Isolate* isolate = code_->GetIsolate();
|
||||
const char* name = isolate->builtins()->Lookup(pc);
|
||||
const char* name =
|
||||
isolate->builtins()->Lookup(reinterpret_cast<Address>(pc));
|
||||
|
||||
if (name != nullptr) {
|
||||
SNPrintF(v8_buffer_, "%p (%s)", static_cast<void*>(pc), name);
|
||||
return v8_buffer_.start();
|
||||
}
|
||||
|
||||
int offs = static_cast<int>(pc - code_->raw_instruction_start());
|
||||
int offs = static_cast<int>(reinterpret_cast<Address>(pc) -
|
||||
code_->raw_instruction_start());
|
||||
// print as code offset, if it seems reasonable
|
||||
if (0 <= offs && offs < code_->raw_instruction_size()) {
|
||||
SNPrintF(v8_buffer_, "%p <+0x%x>", static_cast<void*>(pc), offs);
|
||||
@ -56,7 +58,8 @@ const char* V8NameConverter::NameOfAddress(byte* pc) const {
|
||||
}
|
||||
|
||||
wasm::WasmCode* wasm_code =
|
||||
isolate->wasm_engine()->code_manager()->LookupCode(pc);
|
||||
isolate->wasm_engine()->code_manager()->LookupCode(
|
||||
reinterpret_cast<Address>(pc));
|
||||
if (wasm_code != nullptr) {
|
||||
SNPrintF(v8_buffer_, "%p (%s)", static_cast<void*>(pc),
|
||||
GetWasmCodeKindAsString(wasm_code->kind()));
|
||||
@ -203,7 +206,8 @@ static int DecodeIt(Isolate* isolate, std::ostream* os,
|
||||
*reinterpret_cast<int32_t*>(pc), num_const);
|
||||
constants = num_const;
|
||||
pc += 4;
|
||||
} else if (it != nullptr && !it->done() && it->rinfo()->pc() == pc &&
|
||||
} else if (it != nullptr && !it->done() &&
|
||||
it->rinfo()->pc() == reinterpret_cast<Address>(pc) &&
|
||||
it->rinfo()->rmode() == RelocInfo::INTERNAL_REFERENCE) {
|
||||
// raw pointer embedded in code stream, e.g., jump table
|
||||
byte* ptr = *reinterpret_cast<byte**>(pc);
|
||||
@ -219,11 +223,11 @@ static int DecodeIt(Isolate* isolate, std::ostream* os,
|
||||
|
||||
// Collect RelocInfo for this instruction (prev_pc .. pc-1)
|
||||
std::vector<const char*> comments;
|
||||
std::vector<byte*> pcs;
|
||||
std::vector<Address> pcs;
|
||||
std::vector<RelocInfo::Mode> rmodes;
|
||||
std::vector<intptr_t> datas;
|
||||
if (it != nullptr) {
|
||||
while (!it->done() && it->rinfo()->pc() < pc) {
|
||||
while (!it->done() && it->rinfo()->pc() < reinterpret_cast<Address>(pc)) {
|
||||
if (RelocInfo::IsComment(it->rinfo()->rmode())) {
|
||||
// For comments just collect the text.
|
||||
comments.push_back(
|
||||
@ -260,7 +264,7 @@ static int DecodeIt(Isolate* isolate, std::ostream* os,
|
||||
// Put together the reloc info
|
||||
Code* host = converter.code();
|
||||
RelocInfo relocinfo(pcs[i], rmodes[i], datas[i], host);
|
||||
relocinfo.set_constant_pool(host ? host->constant_pool() : nullptr);
|
||||
relocinfo.set_constant_pool(host ? host->constant_pool() : kNullAddress);
|
||||
|
||||
bool first_reloc_info = (i == 0);
|
||||
PrintRelocInfo(&out, isolate, ref_encoder, os, &relocinfo,
|
||||
@ -271,9 +275,10 @@ static int DecodeIt(Isolate* isolate, std::ostream* os,
|
||||
// already, check if we can find some RelocInfo for the target address in
|
||||
// the constant pool.
|
||||
if (pcs.empty() && converter.code() != nullptr) {
|
||||
RelocInfo dummy_rinfo(prev_pc, RelocInfo::NONE, 0, nullptr);
|
||||
RelocInfo dummy_rinfo(reinterpret_cast<Address>(prev_pc), RelocInfo::NONE,
|
||||
0, nullptr);
|
||||
if (dummy_rinfo.IsInConstantPool()) {
|
||||
byte* constant_pool_entry_address =
|
||||
Address constant_pool_entry_address =
|
||||
dummy_rinfo.constant_pool_entry_address();
|
||||
RelocIterator reloc_it(converter.code());
|
||||
while (!reloc_it.done()) {
|
||||
|
@ -577,7 +577,8 @@ void EhFrameDisassembler::DumpDwarfDirectives(std::ostream& stream, // NOLINT
|
||||
|
||||
void EhFrameDisassembler::DisassembleToStream(std::ostream& stream) { // NOLINT
|
||||
// The encoded CIE size does not include the size field itself.
|
||||
const int cie_size = ReadUnalignedUInt32(start_) + kInt32Size;
|
||||
const int cie_size =
|
||||
ReadUnalignedUInt32(reinterpret_cast<Address>(start_)) + kInt32Size;
|
||||
const int fde_offset = cie_size;
|
||||
|
||||
const byte* cie_directives_start =
|
||||
@ -588,13 +589,15 @@ void EhFrameDisassembler::DisassembleToStream(std::ostream& stream) { // NOLINT
|
||||
stream << reinterpret_cast<const void*>(start_) << " .eh_frame: CIE\n";
|
||||
DumpDwarfDirectives(stream, cie_directives_start, cie_directives_end);
|
||||
|
||||
const byte* procedure_offset_address =
|
||||
start_ + fde_offset + EhFrameConstants::kProcedureAddressOffsetInFde;
|
||||
Address procedure_offset_address =
|
||||
reinterpret_cast<Address>(start_) + fde_offset +
|
||||
EhFrameConstants::kProcedureAddressOffsetInFde;
|
||||
int32_t procedure_offset =
|
||||
ReadUnalignedValue<int32_t>(procedure_offset_address);
|
||||
|
||||
const byte* procedure_size_address =
|
||||
start_ + fde_offset + EhFrameConstants::kProcedureSizeOffsetInFde;
|
||||
Address procedure_size_address = reinterpret_cast<Address>(start_) +
|
||||
fde_offset +
|
||||
EhFrameConstants::kProcedureSizeOffsetInFde;
|
||||
uint32_t procedure_size = ReadUnalignedUInt32(procedure_size_address);
|
||||
|
||||
const byte* fde_start = start_ + fde_offset;
|
||||
|
@ -140,10 +140,14 @@ class V8_EXPORT_PRIVATE EhFrameWriter {
|
||||
WriteBytes(reinterpret_cast<const byte*>(&value), sizeof(value));
|
||||
}
|
||||
void PatchInt32(int base_offset, uint32_t value) {
|
||||
DCHECK_EQ(ReadUnalignedUInt32(eh_frame_buffer_.data() + base_offset),
|
||||
DCHECK_EQ(
|
||||
ReadUnalignedUInt32(reinterpret_cast<Address>(eh_frame_buffer_.data()) +
|
||||
base_offset),
|
||||
kInt32Placeholder);
|
||||
DCHECK_LT(base_offset + kInt32Size, eh_frame_offset());
|
||||
WriteUnalignedUInt32(eh_frame_buffer_.data() + base_offset, value);
|
||||
WriteUnalignedUInt32(
|
||||
reinterpret_cast<Address>(eh_frame_buffer_.data()) + base_offset,
|
||||
value);
|
||||
}
|
||||
|
||||
// Write the common information entry, which includes encoding specifiers,
|
||||
@ -209,7 +213,7 @@ class V8_EXPORT_PRIVATE EhFrameIterator {
|
||||
|
||||
void SkipCie() {
|
||||
DCHECK_EQ(next_, start_);
|
||||
next_ += ReadUnalignedUInt32(next_) + kInt32Size;
|
||||
next_ += ReadUnalignedUInt32(reinterpret_cast<Address>(next_)) + kInt32Size;
|
||||
}
|
||||
|
||||
void SkipToFdeDirectives() {
|
||||
@ -260,7 +264,7 @@ class V8_EXPORT_PRIVATE EhFrameIterator {
|
||||
T GetNextValue() {
|
||||
T result;
|
||||
DCHECK_LE(next_ + sizeof(result), end_);
|
||||
result = ReadUnalignedValue<T>(next_);
|
||||
result = ReadUnalignedValue<T>(reinterpret_cast<Address>(next_));
|
||||
next_ += sizeof(result);
|
||||
return result;
|
||||
}
|
||||
|
@ -27,8 +27,8 @@ BUILTIN_LIST_C(FORWARD_DECLARE)
|
||||
void ExternalReferenceTable::Init(Isolate* isolate) {
|
||||
int index = 0;
|
||||
|
||||
// nullptr is preserved through serialization/deserialization.
|
||||
Add(nullptr, "nullptr", &index);
|
||||
// kNullAddress is preserved through serialization/deserialization.
|
||||
Add(kNullAddress, "nullptr", &index);
|
||||
AddReferences(isolate, &index);
|
||||
AddBuiltins(isolate, &index);
|
||||
AddRuntimeFunctions(isolate, &index);
|
||||
|
@ -65,7 +65,7 @@ class ExternalReferenceTable {
|
||||
Address address;
|
||||
const char* name;
|
||||
|
||||
ExternalReferenceEntry() : address(nullptr), name(nullptr) {}
|
||||
ExternalReferenceEntry() : address(kNullAddress), name(nullptr) {}
|
||||
ExternalReferenceEntry(Address address, const char* name)
|
||||
: address(address), name(name) {}
|
||||
};
|
||||
|
@ -215,15 +215,12 @@ void ExternalReference::set_redirector(
|
||||
}
|
||||
|
||||
// static
|
||||
void* ExternalReference::Redirect(Isolate* isolate, Address address_arg,
|
||||
Address ExternalReference::Redirect(Isolate* isolate, Address address,
|
||||
Type type) {
|
||||
ExternalReferenceRedirector* redirector =
|
||||
reinterpret_cast<ExternalReferenceRedirector*>(
|
||||
isolate->external_reference_redirector());
|
||||
void* address = reinterpret_cast<void*>(address_arg);
|
||||
void* answer =
|
||||
(redirector == nullptr) ? address : (*redirector)(address, type);
|
||||
return answer;
|
||||
return (redirector == nullptr) ? address : (*redirector)(address, type);
|
||||
}
|
||||
|
||||
ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
|
||||
@ -1018,7 +1015,7 @@ size_t hash_value(ExternalReference reference) {
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, ExternalReference reference) {
|
||||
os << static_cast<const void*>(reference.address());
|
||||
os << reinterpret_cast<const void*>(reference.address());
|
||||
const Runtime::Function* fn = Runtime::FunctionForEntry(reference.address());
|
||||
if (fn) os << "<" << fn->name << ".entry>";
|
||||
return os;
|
||||
|
@ -250,9 +250,9 @@ class ExternalReference BASE_EMBEDDED {
|
||||
|
||||
static void SetUp();
|
||||
|
||||
typedef void* ExternalReferenceRedirector(void* original, Type type);
|
||||
typedef Address ExternalReferenceRedirector(Address original, Type type);
|
||||
|
||||
ExternalReference() : address_(nullptr) {}
|
||||
ExternalReference() : address_(kNullAddress) {}
|
||||
explicit ExternalReference(const SCTableReference& table_ref);
|
||||
explicit ExternalReference(StatsCounter* counter);
|
||||
ExternalReference(Address address, Isolate* isolate);
|
||||
@ -276,7 +276,7 @@ class ExternalReference BASE_EMBEDDED {
|
||||
V8_EXPORT_PRIVATE V8_NOINLINE static ExternalReference
|
||||
runtime_function_table_address_for_unittests(Isolate* isolate);
|
||||
|
||||
Address address() const { return reinterpret_cast<Address>(address_); }
|
||||
Address address() const { return address_; }
|
||||
|
||||
// This lets you register a function that rewrites all external references.
|
||||
// Used by the ARM simulator to catch calls to external references.
|
||||
@ -284,12 +284,14 @@ class ExternalReference BASE_EMBEDDED {
|
||||
ExternalReferenceRedirector* redirector);
|
||||
|
||||
private:
|
||||
explicit ExternalReference(void* address) : address_(address) {}
|
||||
explicit ExternalReference(void* address)
|
||||
: address_(reinterpret_cast<Address>(address)) {}
|
||||
explicit ExternalReference(Address address) : address_(address) {}
|
||||
|
||||
static void* Redirect(Isolate* isolate, Address address_arg,
|
||||
static Address Redirect(Isolate* isolate, Address address_arg,
|
||||
Type type = ExternalReference::BUILTIN_CALL);
|
||||
|
||||
void* address_;
|
||||
Address address_;
|
||||
};
|
||||
|
||||
V8_EXPORT_PRIVATE bool operator==(ExternalReference, ExternalReference);
|
||||
|
@ -236,7 +236,7 @@ SafeStackFrameIterator::SafeStackFrameIterator(
|
||||
type = ExitFrame::GetStateForFramePointer(Isolate::c_entry_fp(top), &state);
|
||||
top_frame_type_ = type;
|
||||
} else if (IsValidStackAddress(fp)) {
|
||||
DCHECK_NOT_NULL(fp);
|
||||
DCHECK_NE(fp, kNullAddress);
|
||||
state.fp = fp;
|
||||
state.sp = sp;
|
||||
state.pc_address = StackFrame::ResolveReturnAddressLocation(
|
||||
@ -247,7 +247,7 @@ SafeStackFrameIterator::SafeStackFrameIterator(
|
||||
// case, set the PC properly and make sure we do not drop the frame.
|
||||
if (IsValidStackAddress(sp)) {
|
||||
MSAN_MEMORY_IS_INITIALIZED(sp, kPointerSize);
|
||||
Address tos = ReadMemoryAt(reinterpret_cast<Address>(sp));
|
||||
Address tos = ReadMemoryAt(sp);
|
||||
if (IsInterpreterFramePc(isolate, tos, &state)) {
|
||||
state.pc_address = reinterpret_cast<Address*>(sp);
|
||||
advance_frame = false;
|
||||
@ -293,7 +293,7 @@ bool SafeStackFrameIterator::IsValidTop(ThreadLocalTop* top) const {
|
||||
if (!IsValidExitFrame(c_entry_fp)) return false;
|
||||
// There should be at least one JS_ENTRY stack handler.
|
||||
Address handler = Isolate::handler(top);
|
||||
if (handler == nullptr) return false;
|
||||
if (handler == kNullAddress) return false;
|
||||
// Check that there are no js frames on top of the native frames.
|
||||
return c_entry_fp < handler;
|
||||
}
|
||||
@ -359,7 +359,7 @@ bool SafeStackFrameIterator::IsValidExitFrame(Address fp) const {
|
||||
StackFrame::State state;
|
||||
ExitFrame::FillState(fp, sp, &state);
|
||||
MSAN_MEMORY_IS_INITIALIZED(state.pc_address, sizeof(state.pc_address));
|
||||
return *state.pc_address != nullptr;
|
||||
return *state.pc_address != kNullAddress;
|
||||
}
|
||||
|
||||
|
||||
@ -435,7 +435,7 @@ void StackFrame::SetReturnAddressLocationResolver(
|
||||
|
||||
StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
|
||||
State* state) {
|
||||
DCHECK_NOT_NULL(state->fp);
|
||||
DCHECK_NE(state->fp, kNullAddress);
|
||||
|
||||
MSAN_MEMORY_IS_INITIALIZED(
|
||||
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
|
||||
@ -640,7 +640,7 @@ StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
|
||||
if (fp == 0) return NONE;
|
||||
Address sp = ComputeStackPointer(fp);
|
||||
FillState(fp, sp, state);
|
||||
DCHECK_NOT_NULL(*state->pc_address);
|
||||
DCHECK_NE(*state->pc_address, kNullAddress);
|
||||
|
||||
return ComputeFrameType(fp);
|
||||
}
|
||||
@ -734,7 +734,7 @@ void StackFrame::Print(StringStream* accumulator, PrintMode mode,
|
||||
DisallowHeapAllocation no_gc;
|
||||
PrintIndex(accumulator, mode, index);
|
||||
accumulator->Add(StringForStackFrameType(type()));
|
||||
accumulator->Add(" [pc: %p]\n", pc());
|
||||
accumulator->Add(" [pc: %p]\n", reinterpret_cast<void*>(pc()));
|
||||
}
|
||||
|
||||
void BuiltinExitFrame::Print(StringStream* accumulator, PrintMode mode,
|
||||
@ -838,7 +838,7 @@ void StandardFrame::IterateCompiledFrame(RootVisitor* v) const {
|
||||
Code* code = nullptr;
|
||||
bool has_tagged_params = false;
|
||||
if (wasm_code != nullptr) {
|
||||
SafepointTable table(wasm_code->instructions().start(),
|
||||
SafepointTable table(wasm_code->instruction_start(),
|
||||
wasm_code->safepoint_table_offset(),
|
||||
wasm_code->stack_slots());
|
||||
safepoint_entry = table.FindEntry(inner_pointer);
|
||||
@ -1777,8 +1777,7 @@ void WasmCompiledFrame::Print(StringStream* accumulator, PrintMode mode,
|
||||
->wasm_engine()
|
||||
->code_manager()
|
||||
->LookupCode(pc())
|
||||
->instructions()
|
||||
.start();
|
||||
->instruction_start();
|
||||
int pc = static_cast<int>(this->pc() - instruction_start);
|
||||
Vector<const uint8_t> raw_func_name =
|
||||
shared()->GetRawFunctionName(this->function_index());
|
||||
@ -1835,7 +1834,7 @@ int WasmCompiledFrame::position() const {
|
||||
void WasmCompiledFrame::Summarize(std::vector<FrameSummary>* functions) const {
|
||||
DCHECK(functions->empty());
|
||||
wasm::WasmCode* code = wasm_code();
|
||||
int offset = static_cast<int>(pc() - code->instructions().start());
|
||||
int offset = static_cast<int>(pc() - code->instruction_start());
|
||||
Handle<WasmInstanceObject> instance(
|
||||
LookupWasmInstanceObjectFromStandardFrame(this), isolate());
|
||||
FrameSummary::WasmCompiledFrameSummary summary(
|
||||
@ -1846,13 +1845,12 @@ void WasmCompiledFrame::Summarize(std::vector<FrameSummary>* functions) const {
|
||||
bool WasmCompiledFrame::at_to_number_conversion() const {
|
||||
// Check whether our callee is a WASM_TO_JS frame, and this frame is at the
|
||||
// ToNumber conversion call.
|
||||
Address callee_pc = reinterpret_cast<Address>(this->callee_pc());
|
||||
wasm::WasmCode* code =
|
||||
callee_pc
|
||||
? isolate()->wasm_engine()->code_manager()->LookupCode(callee_pc)
|
||||
callee_pc() != kNullAddress
|
||||
? isolate()->wasm_engine()->code_manager()->LookupCode(callee_pc())
|
||||
: nullptr;
|
||||
if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false;
|
||||
int offset = static_cast<int>(callee_pc - code->instructions().start());
|
||||
int offset = static_cast<int>(callee_pc() - code->instruction_start());
|
||||
int pos = FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
|
||||
code, offset);
|
||||
DCHECK(pos == 0 || pos == 1);
|
||||
@ -1865,9 +1863,8 @@ int WasmCompiledFrame::LookupExceptionHandlerInTable(int* stack_slots) {
|
||||
wasm::WasmCode* code =
|
||||
isolate()->wasm_engine()->code_manager()->LookupCode(pc());
|
||||
if (!code->IsAnonymous() && code->handler_table_offset() > 0) {
|
||||
HandlerTable table(code->instructions().start(),
|
||||
code->handler_table_offset());
|
||||
int pc_offset = static_cast<int>(pc() - code->instructions().start());
|
||||
HandlerTable table(code->instruction_start(), code->handler_table_offset());
|
||||
int pc_offset = static_cast<int>(pc() - code->instruction_start());
|
||||
*stack_slots = static_cast<int>(code->stack_slots());
|
||||
return table.LookupReturn(pc_offset);
|
||||
}
|
||||
@ -1982,7 +1979,6 @@ void JavaScriptFrame::Print(StringStream* accumulator,
|
||||
accumulator->Add(" [");
|
||||
accumulator->PrintName(script->name());
|
||||
|
||||
Address pc = this->pc();
|
||||
if (is_interpreted()) {
|
||||
const InterpretedFrame* iframe =
|
||||
reinterpret_cast<const InterpretedFrame*>(this);
|
||||
@ -1994,7 +1990,7 @@ void JavaScriptFrame::Print(StringStream* accumulator,
|
||||
} else {
|
||||
int function_start_pos = shared->StartPosition();
|
||||
int line = script->GetLineNumber(function_start_pos) + 1;
|
||||
accumulator->Add(":~%d] [pc=%p]", line, pc);
|
||||
accumulator->Add(":~%d] [pc=%p]", line, reinterpret_cast<void*>(pc()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2164,7 +2160,8 @@ InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
|
||||
InnerPointerToCodeCache::GetCacheEntry(Address inner_pointer) {
|
||||
isolate_->counters()->pc_to_code()->Increment();
|
||||
DCHECK(base::bits::IsPowerOfTwo(kInnerPointerToCodeCacheSize));
|
||||
uint32_t hash = ComputeIntegerHash(ObjectAddressForHashing(inner_pointer));
|
||||
uint32_t hash = ComputeIntegerHash(
|
||||
ObjectAddressForHashing(reinterpret_cast<void*>(inner_pointer)));
|
||||
uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
|
||||
InnerPointerToCodeCacheEntry* entry = cache(index);
|
||||
if (entry->inner_pointer == inner_pointer) {
|
||||
|
@ -146,8 +146,8 @@ class StackFrame BASE_EMBEDDED {
|
||||
kHeapObjectTag);
|
||||
|
||||
struct State {
|
||||
Address sp = nullptr;
|
||||
Address fp = nullptr;
|
||||
Address sp = kNullAddress;
|
||||
Address fp = kNullAddress;
|
||||
Address* pc_address = nullptr;
|
||||
Address* callee_pc_address = nullptr;
|
||||
Address* constant_pool_address = nullptr;
|
||||
@ -237,7 +237,7 @@ class StackFrame BASE_EMBEDDED {
|
||||
Address sp() const { return state_.sp; }
|
||||
Address fp() const { return state_.fp; }
|
||||
Address callee_pc() const {
|
||||
return state_.callee_pc_address ? *state_.callee_pc_address : nullptr;
|
||||
return state_.callee_pc_address ? *state_.callee_pc_address : kNullAddress;
|
||||
}
|
||||
Address caller_sp() const { return GetCallerStackPointer(); }
|
||||
|
||||
|
@ -979,11 +979,11 @@ class CodeDescription BASE_EMBEDDED {
|
||||
}
|
||||
|
||||
uintptr_t CodeStart() const {
|
||||
return reinterpret_cast<uintptr_t>(code_->InstructionStart());
|
||||
return static_cast<uintptr_t>(code_->InstructionStart());
|
||||
}
|
||||
|
||||
uintptr_t CodeEnd() const {
|
||||
return reinterpret_cast<uintptr_t>(code_->InstructionEnd());
|
||||
return static_cast<uintptr_t>(code_->InstructionEnd());
|
||||
}
|
||||
|
||||
uintptr_t CodeSize() const {
|
||||
@ -1900,7 +1900,8 @@ static JITCodeEntry* CreateCodeEntry(Address symfile_addr,
|
||||
|
||||
entry->symfile_addr_ = reinterpret_cast<Address>(entry + 1);
|
||||
entry->symfile_size_ = symfile_size;
|
||||
MemCopy(entry->symfile_addr_, symfile_addr, symfile_size);
|
||||
MemCopy(reinterpret_cast<void*>(entry->symfile_addr_),
|
||||
reinterpret_cast<void*>(symfile_addr), symfile_size);
|
||||
|
||||
entry->prev_ = entry->next_ = nullptr;
|
||||
|
||||
@ -1976,7 +1977,7 @@ static JITCodeEntry* CreateELFObject(CodeDescription* desc, Isolate* isolate) {
|
||||
elf.Write(&w);
|
||||
#endif
|
||||
|
||||
return CreateCodeEntry(w.buffer(), w.position());
|
||||
return CreateCodeEntry(reinterpret_cast<Address>(w.buffer()), w.position());
|
||||
}
|
||||
|
||||
|
||||
@ -2010,8 +2011,7 @@ static CodeMap* GetCodeMap() {
|
||||
|
||||
static uint32_t HashCodeAddress(Address addr) {
|
||||
static const uintptr_t kGoldenRatio = 2654435761u;
|
||||
uintptr_t offset = OffsetFrom(addr);
|
||||
return static_cast<uint32_t>((offset >> kCodeAlignmentBits) * kGoldenRatio);
|
||||
return static_cast<uint32_t>((addr >> kCodeAlignmentBits) * kGoldenRatio);
|
||||
}
|
||||
|
||||
static base::HashMap* GetLineMap() {
|
||||
@ -2025,15 +2025,16 @@ static base::HashMap* GetLineMap() {
|
||||
|
||||
static void PutLineInfo(Address addr, LineInfo* info) {
|
||||
base::HashMap* line_map = GetLineMap();
|
||||
base::HashMap::Entry* e =
|
||||
line_map->LookupOrInsert(addr, HashCodeAddress(addr));
|
||||
base::HashMap::Entry* e = line_map->LookupOrInsert(
|
||||
reinterpret_cast<void*>(addr), HashCodeAddress(addr));
|
||||
if (e->value != nullptr) delete static_cast<LineInfo*>(e->value);
|
||||
e->value = info;
|
||||
}
|
||||
|
||||
|
||||
static LineInfo* GetLineInfo(Address addr) {
|
||||
void* value = GetLineMap()->Remove(addr, HashCodeAddress(addr));
|
||||
void* value = GetLineMap()->Remove(reinterpret_cast<void*>(addr),
|
||||
HashCodeAddress(addr));
|
||||
return static_cast<LineInfo*>(value);
|
||||
}
|
||||
|
||||
@ -2121,7 +2122,7 @@ static void AddJITCodeEntry(CodeMap* map, const AddressRange& range,
|
||||
|
||||
SNPrintF(Vector<char>(file_name, kMaxFileNameSize), "/tmp/elfdump%s%d.o",
|
||||
(name_hint != nullptr) ? name_hint : "", file_num++);
|
||||
WriteBytes(file_name, entry->symfile_addr_,
|
||||
WriteBytes(file_name, reinterpret_cast<byte*>(entry->symfile_addr_),
|
||||
static_cast<int>(entry->symfile_size_));
|
||||
}
|
||||
#endif
|
||||
|
@ -50,7 +50,6 @@ namespace v8 {
|
||||
namespace base {
|
||||
class Mutex;
|
||||
class RecursiveMutex;
|
||||
class VirtualMemory;
|
||||
}
|
||||
|
||||
namespace internal {
|
||||
@ -127,7 +126,8 @@ class AllStatic {
|
||||
#define BASE_EMBEDDED
|
||||
|
||||
typedef uint8_t byte;
|
||||
typedef byte* Address;
|
||||
typedef uintptr_t Address;
|
||||
static const Address kNullAddress = 0;
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Constants
|
||||
@ -275,15 +275,18 @@ constexpr int kUC16Size = sizeof(uc16); // NOLINT
|
||||
constexpr int kSimd128Size = 16;
|
||||
|
||||
// FUNCTION_ADDR(f) gets the address of a C function f.
|
||||
#define FUNCTION_ADDR(f) \
|
||||
(reinterpret_cast<v8::internal::Address>(reinterpret_cast<intptr_t>(f)))
|
||||
|
||||
#define FUNCTION_ADDR(f) (reinterpret_cast<v8::internal::Address>(f))
|
||||
|
||||
// FUNCTION_CAST<F>(addr) casts an address into a function
|
||||
// of type F. Used to invoke generated code from within C.
|
||||
template <typename F>
|
||||
F FUNCTION_CAST(byte* addr) {
|
||||
return reinterpret_cast<F>(reinterpret_cast<Address>(addr));
|
||||
}
|
||||
|
||||
template <typename F>
|
||||
F FUNCTION_CAST(Address addr) {
|
||||
return reinterpret_cast<F>(reinterpret_cast<intptr_t>(addr));
|
||||
return reinterpret_cast<F>(addr);
|
||||
}
|
||||
|
||||
|
||||
@ -1225,8 +1228,7 @@ inline std::ostream& operator<<(std::ostream& os,
|
||||
inline uint32_t ObjectHash(Address address) {
|
||||
// All objects are at least pointer aligned, so we can remove the trailing
|
||||
// zeros.
|
||||
return static_cast<uint32_t>(bit_cast<uintptr_t>(address) >>
|
||||
kPointerSizeLog2);
|
||||
return static_cast<uint32_t>(address >> kPointerSizeLog2);
|
||||
}
|
||||
|
||||
// Type feedback is encoded in such a way that, we can combine the feedback
|
||||
|
@ -25,7 +25,8 @@ HandlerTable::HandlerTable(ByteArray* byte_array)
|
||||
#ifdef DEBUG
|
||||
mode_(kRangeBasedEncoding),
|
||||
#endif
|
||||
raw_encoded_data_(byte_array->GetDataStartAddress()) {
|
||||
raw_encoded_data_(
|
||||
reinterpret_cast<Address>(byte_array->GetDataStartAddress())) {
|
||||
}
|
||||
|
||||
HandlerTable::HandlerTable(Address instruction_start,
|
||||
|
@ -107,7 +107,7 @@ void HandleScope::DeleteExtensions(Isolate* isolate) {
|
||||
void HandleScope::ZapRange(Object** start, Object** end) {
|
||||
DCHECK_LE(end - start, kHandleBlockSize);
|
||||
for (Object** p = start; p != end; p++) {
|
||||
*reinterpret_cast<Address*>(p) = reinterpret_cast<Address>(kHandleZapValue);
|
||||
*reinterpret_cast<Address*>(p) = static_cast<Address>(kHandleZapValue);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
@ -144,7 +144,7 @@ class Handle final : public HandleBase {
|
||||
// Provide function object for location hashing.
|
||||
struct hash : public std::unary_function<Handle<T>, size_t> {
|
||||
V8_INLINE size_t operator()(Handle<T> const& handle) const {
|
||||
return base::hash<void*>()(handle.address());
|
||||
return base::hash<Address>()(handle.address());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -165,7 +165,7 @@ void CodeStatistics::CollectCommentStatistics(Isolate* isolate,
|
||||
// Search for end of nested comment or a new nested comment
|
||||
const char* const comment_txt =
|
||||
reinterpret_cast<const char*>(it->rinfo()->data());
|
||||
const byte* prev_pc = it->rinfo()->pc();
|
||||
Address prev_pc = it->rinfo()->pc();
|
||||
int flat_delta = 0;
|
||||
it->next();
|
||||
while (true) {
|
||||
@ -199,7 +199,7 @@ void CodeStatistics::CollectCodeCommentStatistics(HeapObject* obj,
|
||||
Code* code = Code::cast(obj);
|
||||
RelocIterator it(code);
|
||||
int delta = 0;
|
||||
const byte* prev_pc = code->raw_instruction_start();
|
||||
Address prev_pc = code->raw_instruction_start();
|
||||
while (!it.done()) {
|
||||
if (it.rinfo()->rmode() == RelocInfo::COMMENT) {
|
||||
delta += static_cast<int>(it.rinfo()->pc() - prev_pc);
|
||||
|
@ -380,8 +380,8 @@ Handle<FeedbackMetadata> Factory::NewFeedbackMetadata(int slot_count) {
|
||||
|
||||
// Initialize the data section to 0.
|
||||
int data_size = size - FeedbackMetadata::kHeaderSize;
|
||||
byte* data_start = data->address() + FeedbackMetadata::kHeaderSize;
|
||||
memset(data_start, 0, data_size);
|
||||
Address data_start = data->address() + FeedbackMetadata::kHeaderSize;
|
||||
memset(reinterpret_cast<byte*>(data_start), 0, data_size);
|
||||
// Fields have been zeroed out but not initialized, so this object will not
|
||||
// pass object verification at this point.
|
||||
return data;
|
||||
@ -671,14 +671,13 @@ Handle<String> Factory::AllocateTwoByteInternalizedString(
|
||||
Map* map = *internalized_string_map();
|
||||
int size = SeqTwoByteString::SizeFor(str.length());
|
||||
HeapObject* result = AllocateRawWithImmortalMap(size, TENURED, map);
|
||||
Handle<String> answer(String::cast(result), isolate());
|
||||
Handle<SeqTwoByteString> answer(SeqTwoByteString::cast(result), isolate());
|
||||
answer->set_length(str.length());
|
||||
answer->set_hash_field(hash_field);
|
||||
DCHECK_EQ(size, answer->Size());
|
||||
|
||||
// Fill in the characters.
|
||||
MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
|
||||
str.length() * kUC16Size);
|
||||
MemCopy(answer->GetChars(), str.start(), str.length() * kUC16Size);
|
||||
|
||||
return answer;
|
||||
}
|
||||
@ -1501,7 +1500,8 @@ Handle<BytecodeArray> Factory::NewBytecodeArray(
|
||||
instance->set_constant_pool(*constant_pool);
|
||||
instance->set_handler_table(*empty_byte_array());
|
||||
instance->set_source_position_table(*empty_byte_array());
|
||||
CopyBytes(instance->GetFirstBytecodeAddress(), raw_bytecodes, length);
|
||||
CopyBytes(reinterpret_cast<byte*>(instance->GetFirstBytecodeAddress()),
|
||||
raw_bytecodes, length);
|
||||
instance->clear_padding();
|
||||
|
||||
return instance;
|
||||
@ -1539,8 +1539,9 @@ Handle<FixedTypedArrayBase> Factory::NewFixedTypedArray(
|
||||
isolate());
|
||||
elements->set_base_pointer(*elements, SKIP_WRITE_BARRIER);
|
||||
elements->set_external_pointer(
|
||||
reinterpret_cast<void*>(
|
||||
ExternalReference::fixed_typed_array_base_data_offset(isolate())
|
||||
.address(),
|
||||
.address()),
|
||||
SKIP_WRITE_BARRIER);
|
||||
elements->set_length(static_cast<int>(length));
|
||||
if (initialize) memset(elements->DataPtr(), 0, elements->DataSize());
|
||||
@ -2269,7 +2270,7 @@ Handle<PreParsedScopeData> Factory::NewPreParsedScopeData() {
|
||||
}
|
||||
|
||||
Handle<JSObject> Factory::NewExternal(void* value) {
|
||||
Handle<Foreign> foreign = NewForeign(static_cast<Address>(value));
|
||||
Handle<Foreign> foreign = NewForeign(reinterpret_cast<Address>(value));
|
||||
Handle<JSObject> external = NewJSObjectFromMap(external_map());
|
||||
external->SetEmbedderField(0, *foreign);
|
||||
return external;
|
||||
@ -2344,7 +2345,7 @@ Handle<Code> Factory::NewCode(
|
||||
|
||||
result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER);
|
||||
Handle<Code> code(Code::cast(result), isolate());
|
||||
DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment));
|
||||
DCHECK(IsAligned(code->address(), kCodeAlignment));
|
||||
DCHECK(!heap->memory_allocator()->code_range()->valid() ||
|
||||
heap->memory_allocator()->code_range()->contains(code->address()) ||
|
||||
object_size <= heap->code_space()->AreaSize());
|
||||
@ -2380,7 +2381,7 @@ Handle<Code> Factory::NewCode(
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) code->ObjectVerify();
|
||||
#endif
|
||||
DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment));
|
||||
DCHECK(IsAligned(code->address(), kCodeAlignment));
|
||||
DCHECK(!isolate()->heap()->memory_allocator()->code_range()->valid() ||
|
||||
isolate()->heap()->memory_allocator()->code_range()->contains(
|
||||
code->address()) ||
|
||||
@ -2397,7 +2398,7 @@ Handle<Code> Factory::NewCodeForDeserialization(uint32_t size) {
|
||||
heap->UnprotectAndRegisterMemoryChunk(result);
|
||||
heap->ZapCodeObject(result->address(), size);
|
||||
result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER);
|
||||
DCHECK(IsAligned(bit_cast<intptr_t>(result->address()), kCodeAlignment));
|
||||
DCHECK(IsAligned(result->address(), kCodeAlignment));
|
||||
DCHECK(!heap->memory_allocator()->code_range()->valid() ||
|
||||
heap->memory_allocator()->code_range()->contains(result->address()) ||
|
||||
static_cast<int>(size) <= heap->code_space()->AreaSize());
|
||||
@ -2461,7 +2462,7 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) {
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap) new_code->ObjectVerify();
|
||||
#endif
|
||||
DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment));
|
||||
DCHECK(IsAligned(new_code->address(), kCodeAlignment));
|
||||
DCHECK(
|
||||
!heap->memory_allocator()->code_range()->valid() ||
|
||||
heap->memory_allocator()->code_range()->contains(new_code->address()) ||
|
||||
|
@ -247,7 +247,7 @@ Heap::Heap()
|
||||
set_encountered_weak_collections(Smi::kZero);
|
||||
// Put a dummy entry in the remembered pages so we can find the list the
|
||||
// minidump even if there are no real unmapped pages.
|
||||
RememberUnmappedPage(nullptr, false);
|
||||
RememberUnmappedPage(kNullAddress, false);
|
||||
}
|
||||
|
||||
size_t Heap::MaxReserved() {
|
||||
@ -2733,7 +2733,7 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
|
||||
SKIP_WRITE_BARRIER);
|
||||
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
|
||||
Memory::Address_at(addr + kPointerSize) =
|
||||
reinterpret_cast<Address>(kClearedFreeMemoryValue);
|
||||
static_cast<Address>(kClearedFreeMemoryValue);
|
||||
}
|
||||
} else {
|
||||
DCHECK_GT(size, 2 * kPointerSize);
|
||||
@ -3809,8 +3809,7 @@ void Heap::ZapFromSpace() {
|
||||
PageRange(new_space_->FromSpaceStart(), new_space_->FromSpaceEnd())) {
|
||||
for (Address cursor = page->area_start(), limit = page->area_end();
|
||||
cursor < limit; cursor += kPointerSize) {
|
||||
Memory::Address_at(cursor) =
|
||||
reinterpret_cast<Address>(kFromSpaceZapValue);
|
||||
Memory::Address_at(cursor) = static_cast<Address>(kFromSpaceZapValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5437,15 +5436,13 @@ void Heap::ExternalStringTable::TearDown() {
|
||||
|
||||
|
||||
void Heap::RememberUnmappedPage(Address page, bool compacted) {
|
||||
uintptr_t p = reinterpret_cast<uintptr_t>(page);
|
||||
// Tag the page pointer to make it findable in the dump file.
|
||||
if (compacted) {
|
||||
p ^= 0xC1EAD & (Page::kPageSize - 1); // Cleared.
|
||||
page ^= 0xC1EAD & (Page::kPageSize - 1); // Cleared.
|
||||
} else {
|
||||
p ^= 0x1D1ED & (Page::kPageSize - 1); // I died.
|
||||
page ^= 0x1D1ED & (Page::kPageSize - 1); // I died.
|
||||
}
|
||||
remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
|
||||
reinterpret_cast<Address>(p);
|
||||
remembered_unmapped_pages_[remembered_unmapped_pages_index_] = page;
|
||||
remembered_unmapped_pages_index_++;
|
||||
remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
|
||||
heap->isolate(),
|
||||
RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver);
|
||||
incremental_marking_.AdvanceIncrementalMarkingOnAllocation();
|
||||
if (incremental_marking_.black_allocation() && addr != nullptr) {
|
||||
if (incremental_marking_.black_allocation() && addr != kNullAddress) {
|
||||
// AdvanceIncrementalMarkingOnAllocation can start black allocation.
|
||||
// Ensure that the new object is marked black.
|
||||
HeapObject* object = HeapObject::FromAddress(addr);
|
||||
|
@ -35,8 +35,8 @@ class LocalAllocator {
|
||||
// sits right next to new space allocation top.
|
||||
const LinearAllocationArea info = new_space_lab_.Close();
|
||||
const Address top = new_space_->top();
|
||||
if (info.limit() != nullptr && info.limit() == top) {
|
||||
DCHECK_NOT_NULL(info.top());
|
||||
if (info.limit() != kNullAddress && info.limit() == top) {
|
||||
DCHECK_NE(info.top(), kNullAddress);
|
||||
*new_space_->allocation_top_address() = info.top();
|
||||
}
|
||||
}
|
||||
|
@ -1239,7 +1239,7 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
|
||||
base->ExecuteMigrationObservers(dest, src, dst, size);
|
||||
}
|
||||
base::Relaxed_Store(reinterpret_cast<base::AtomicWord*>(src_addr),
|
||||
reinterpret_cast<base::AtomicWord>(dst_addr));
|
||||
static_cast<base::AtomicWord>(dst_addr));
|
||||
}
|
||||
|
||||
EvacuateVisitorBase(Heap* heap, LocalAllocator* local_allocator,
|
||||
@ -1284,8 +1284,7 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
|
||||
if (FLAG_stress_compaction) {
|
||||
const uintptr_t mask = static_cast<uintptr_t>(FLAG_random_seed) &
|
||||
Page::kPageAlignmentMask & ~kPointerAlignmentMask;
|
||||
if ((reinterpret_cast<uintptr_t>(object->address()) &
|
||||
Page::kPageAlignmentMask) == mask) {
|
||||
if ((object->address() & Page::kPageAlignmentMask) == mask) {
|
||||
Page* page = Page::FromAddress(object->address());
|
||||
if (page->IsFlagSet(Page::COMPACTION_WAS_ABORTED_FOR_TESTING)) {
|
||||
page->ClearFlag(Page::COMPACTION_WAS_ABORTED_FOR_TESTING);
|
||||
@ -3740,7 +3739,7 @@ void MinorMarkCompactCollector::MakeIterable(
|
||||
// remove here.
|
||||
MarkCompactCollector* full_collector = heap()->mark_compact_collector();
|
||||
Address free_start = p->area_start();
|
||||
DCHECK_EQ(0, reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize));
|
||||
DCHECK_EQ(0, free_start % (32 * kPointerSize));
|
||||
|
||||
for (auto object_and_size :
|
||||
LiveObjectRange<kGreyObjects>(p, marking_state()->bitmap(p))) {
|
||||
@ -3754,7 +3753,7 @@ void MinorMarkCompactCollector::MakeIterable(
|
||||
p->AddressToMarkbitIndex(free_start),
|
||||
p->AddressToMarkbitIndex(free_end));
|
||||
if (free_space_mode == ZAP_FREE_SPACE) {
|
||||
memset(free_start, 0xCC, size);
|
||||
memset(reinterpret_cast<void*>(free_start), 0xCC, size);
|
||||
}
|
||||
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
|
||||
ClearRecordedSlots::kNo);
|
||||
@ -3771,7 +3770,7 @@ void MinorMarkCompactCollector::MakeIterable(
|
||||
p->AddressToMarkbitIndex(free_start),
|
||||
p->AddressToMarkbitIndex(p->area_end()));
|
||||
if (free_space_mode == ZAP_FREE_SPACE) {
|
||||
memset(free_start, 0xCC, size);
|
||||
memset(reinterpret_cast<void*>(free_start), 0xCC, size);
|
||||
}
|
||||
p->heap()->CreateFillerObjectAt(free_start, static_cast<int>(size),
|
||||
ClearRecordedSlots::kNo);
|
||||
|
@ -196,7 +196,7 @@ class RememberedSet : public AllStatic {
|
||||
if (slot_set == nullptr) {
|
||||
slot_set = page->AllocateTypedSlotSet<type>();
|
||||
}
|
||||
if (host_addr == nullptr) {
|
||||
if (host_addr == kNullAddress) {
|
||||
host_addr = page->address();
|
||||
}
|
||||
uintptr_t offset = slot_addr - page->address();
|
||||
|
@ -173,8 +173,9 @@ AllocationResult Heap::AllocateEmptyFixedTypedArray(
|
||||
FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object);
|
||||
elements->set_base_pointer(elements, SKIP_WRITE_BARRIER);
|
||||
elements->set_external_pointer(
|
||||
reinterpret_cast<void*>(
|
||||
ExternalReference::fixed_typed_array_base_data_offset(isolate())
|
||||
.address(),
|
||||
.address()),
|
||||
SKIP_WRITE_BARRIER);
|
||||
elements->set_length(0);
|
||||
return elements;
|
||||
|
@ -171,7 +171,7 @@ size_t PagedSpace::RelinkFreeListCategories(Page* page) {
|
||||
}
|
||||
|
||||
bool PagedSpace::TryFreeLast(HeapObject* object, int object_size) {
|
||||
if (allocation_info_.top() != nullptr) {
|
||||
if (allocation_info_.top() != kNullAddress) {
|
||||
const Address object_address = object->address();
|
||||
if ((allocation_info_.top() - object_size) == object_address) {
|
||||
allocation_info_.set_top(object_address);
|
||||
@ -356,7 +356,7 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
|
||||
SupportsInlineAllocation()) {
|
||||
// Generated code decreased the top() pointer to do folded allocations.
|
||||
// The top_on_previous_step_ can be one byte beyond the current page.
|
||||
DCHECK_NOT_NULL(top());
|
||||
DCHECK_NE(top(), kNullAddress);
|
||||
DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
|
||||
Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
|
||||
top_on_previous_step_ = top();
|
||||
@ -396,7 +396,8 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
|
||||
int filler_size = Heap::GetFillToAlign(top, alignment);
|
||||
int aligned_size_in_bytes = size_in_bytes + filler_size;
|
||||
|
||||
if (allocation_info_.limit() - top < aligned_size_in_bytes) {
|
||||
if (allocation_info_.limit() - top <
|
||||
static_cast<uintptr_t>(aligned_size_in_bytes)) {
|
||||
// See if we can create room.
|
||||
if (!EnsureAllocation(size_in_bytes, alignment)) {
|
||||
return AllocationResult::Retry();
|
||||
@ -471,7 +472,8 @@ size_t LargeObjectSpace::Available() {
|
||||
|
||||
|
||||
LocalAllocationBuffer LocalAllocationBuffer::InvalidBuffer() {
|
||||
return LocalAllocationBuffer(nullptr, LinearAllocationArea(nullptr, nullptr));
|
||||
return LocalAllocationBuffer(
|
||||
nullptr, LinearAllocationArea(kNullAddress, kNullAddress));
|
||||
}
|
||||
|
||||
|
||||
@ -491,7 +493,7 @@ LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
|
||||
bool LocalAllocationBuffer::TryMerge(LocalAllocationBuffer* other) {
|
||||
if (allocation_info_.top() == other->allocation_info_.limit()) {
|
||||
allocation_info_.set_top(other->allocation_info_.top());
|
||||
other->allocation_info_.Reset(nullptr, nullptr);
|
||||
other->allocation_info_.Reset(kNullAddress, kNullAddress);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
@ -31,15 +31,15 @@ namespace internal {
|
||||
// HeapObjectIterator
|
||||
|
||||
HeapObjectIterator::HeapObjectIterator(PagedSpace* space)
|
||||
: cur_addr_(nullptr),
|
||||
cur_end_(nullptr),
|
||||
: cur_addr_(kNullAddress),
|
||||
cur_end_(kNullAddress),
|
||||
space_(space),
|
||||
page_range_(space->anchor()->next_page(), space->anchor()),
|
||||
current_page_(page_range_.begin()) {}
|
||||
|
||||
HeapObjectIterator::HeapObjectIterator(Page* page)
|
||||
: cur_addr_(nullptr),
|
||||
cur_end_(nullptr),
|
||||
: cur_addr_(kNullAddress),
|
||||
cur_end_(kNullAddress),
|
||||
space_(reinterpret_cast<PagedSpace*>(page->owner())),
|
||||
page_range_(page),
|
||||
current_page_(page_range_.begin()) {
|
||||
@ -133,7 +133,7 @@ bool CodeRange::SetUp(size_t requested) {
|
||||
|
||||
// We are sure that we have mapped a block of requested addresses.
|
||||
DCHECK_GE(reservation.size(), requested);
|
||||
Address base = reinterpret_cast<Address>(reservation.address());
|
||||
Address base = reservation.address();
|
||||
|
||||
// On some platforms, specifically Win64, we need to reserve some pages at
|
||||
// the beginning of an executable space.
|
||||
@ -149,7 +149,9 @@ bool CodeRange::SetUp(size_t requested) {
|
||||
allocation_list_.emplace_back(aligned_base, size);
|
||||
current_allocation_block_index_ = 0;
|
||||
|
||||
LOG(isolate_, NewEvent("CodeRange", reservation.address(), requested));
|
||||
LOG(isolate_,
|
||||
NewEvent("CodeRange", reinterpret_cast<void*>(reservation.address()),
|
||||
requested));
|
||||
virtual_memory_.TakeControl(&reservation);
|
||||
return true;
|
||||
}
|
||||
@ -212,7 +214,7 @@ Address CodeRange::AllocateRawMemory(const size_t requested_size,
|
||||
FreeBlock current;
|
||||
if (!ReserveBlock(requested_size, ¤t)) {
|
||||
*allocated = 0;
|
||||
return nullptr;
|
||||
return kNullAddress;
|
||||
}
|
||||
*allocated = current.size;
|
||||
DCHECK(IsAddressAligned(current.start, MemoryChunk::kAlignment));
|
||||
@ -220,7 +222,7 @@ Address CodeRange::AllocateRawMemory(const size_t requested_size,
|
||||
&virtual_memory_, current.start, commit_size, *allocated)) {
|
||||
*allocated = 0;
|
||||
ReleaseBlock(¤t);
|
||||
return nullptr;
|
||||
return kNullAddress;
|
||||
}
|
||||
return current.start;
|
||||
}
|
||||
@ -284,8 +286,8 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate)
|
||||
capacity_(0),
|
||||
size_(0),
|
||||
size_executable_(0),
|
||||
lowest_ever_allocated_(reinterpret_cast<void*>(-1)),
|
||||
highest_ever_allocated_(reinterpret_cast<void*>(0)),
|
||||
lowest_ever_allocated_(static_cast<Address>(-1ll)),
|
||||
highest_ever_allocated_(kNullAddress),
|
||||
unmapper_(isolate->heap(), this) {}
|
||||
|
||||
bool MemoryAllocator::SetUp(size_t capacity, size_t code_range_size) {
|
||||
@ -458,7 +460,7 @@ void MemoryAllocator::FreeMemory(VirtualMemory* reservation,
|
||||
// TODO(gc) make code_range part of memory allocator?
|
||||
// Code which is part of the code-range does not have its own VirtualMemory.
|
||||
DCHECK(code_range() == nullptr ||
|
||||
!code_range()->contains(static_cast<Address>(reservation->address())));
|
||||
!code_range()->contains(reservation->address()));
|
||||
DCHECK(executable == NOT_EXECUTABLE || !code_range()->valid() ||
|
||||
reservation->size() <= Page::kPageSize);
|
||||
|
||||
@ -469,13 +471,12 @@ void MemoryAllocator::FreeMemory(VirtualMemory* reservation,
|
||||
void MemoryAllocator::FreeMemory(Address base, size_t size,
|
||||
Executability executable) {
|
||||
// TODO(gc) make code_range part of memory allocator?
|
||||
if (code_range() != nullptr &&
|
||||
code_range()->contains(static_cast<Address>(base))) {
|
||||
if (code_range() != nullptr && code_range()->contains(base)) {
|
||||
DCHECK(executable == EXECUTABLE);
|
||||
code_range()->FreeRawMemory(base, size);
|
||||
} else {
|
||||
DCHECK(executable == NOT_EXECUTABLE || !code_range()->valid());
|
||||
CHECK(FreePages(base, size));
|
||||
CHECK(FreePages(reinterpret_cast<void*>(base), size));
|
||||
}
|
||||
}
|
||||
|
||||
@ -483,10 +484,11 @@ Address MemoryAllocator::ReserveAlignedMemory(size_t size, size_t alignment,
|
||||
void* hint,
|
||||
VirtualMemory* controller) {
|
||||
VirtualMemory reservation;
|
||||
if (!AlignedAllocVirtualMemory(size, alignment, hint, &reservation))
|
||||
return nullptr;
|
||||
if (!AlignedAllocVirtualMemory(size, alignment, hint, &reservation)) {
|
||||
return kNullAddress;
|
||||
}
|
||||
|
||||
Address result = static_cast<Address>(reservation.address());
|
||||
Address result = reservation.address();
|
||||
size_.Increment(reservation.size());
|
||||
controller->TakeControl(&reservation);
|
||||
return result;
|
||||
@ -499,28 +501,28 @@ Address MemoryAllocator::AllocateAlignedMemory(
|
||||
VirtualMemory reservation;
|
||||
Address base =
|
||||
ReserveAlignedMemory(reserve_size, alignment, hint, &reservation);
|
||||
if (base == nullptr) return nullptr;
|
||||
if (base == kNullAddress) return kNullAddress;
|
||||
|
||||
if (executable == EXECUTABLE) {
|
||||
if (!CommitExecutableMemory(&reservation, base, commit_size,
|
||||
reserve_size)) {
|
||||
base = nullptr;
|
||||
base = kNullAddress;
|
||||
}
|
||||
} else {
|
||||
if (reservation.SetPermissions(base, commit_size,
|
||||
PageAllocator::kReadWrite)) {
|
||||
UpdateAllocatedSpaceLimits(base, base + commit_size);
|
||||
} else {
|
||||
base = nullptr;
|
||||
base = kNullAddress;
|
||||
}
|
||||
}
|
||||
|
||||
if (base == nullptr) {
|
||||
if (base == kNullAddress) {
|
||||
// Failed to commit the body. Free the mapping and any partially committed
|
||||
// regions inside it.
|
||||
reservation.Free();
|
||||
size_.Decrement(reserve_size);
|
||||
return nullptr;
|
||||
return kNullAddress;
|
||||
}
|
||||
|
||||
controller->TakeControl(&reservation);
|
||||
@ -796,10 +798,10 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
|
||||
|
||||
size_t chunk_size;
|
||||
Heap* heap = isolate_->heap();
|
||||
Address base = nullptr;
|
||||
Address base = kNullAddress;
|
||||
VirtualMemory reservation;
|
||||
Address area_start = nullptr;
|
||||
Address area_end = nullptr;
|
||||
Address area_start = kNullAddress;
|
||||
Address area_end = kNullAddress;
|
||||
void* address_hint =
|
||||
AlignedAddress(heap->GetRandomMmapAddr(), MemoryChunk::kAlignment);
|
||||
|
||||
@ -851,9 +853,8 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
|
||||
#endif
|
||||
base =
|
||||
code_range()->AllocateRawMemory(chunk_size, commit_size, &chunk_size);
|
||||
DCHECK(
|
||||
IsAligned(reinterpret_cast<intptr_t>(base), MemoryChunk::kAlignment));
|
||||
if (base == nullptr) return nullptr;
|
||||
DCHECK(IsAligned(base, MemoryChunk::kAlignment));
|
||||
if (base == kNullAddress) return nullptr;
|
||||
size_.Increment(chunk_size);
|
||||
// Update executable memory size.
|
||||
size_executable_.Increment(chunk_size);
|
||||
@ -861,7 +862,7 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
|
||||
base = AllocateAlignedMemory(chunk_size, commit_size,
|
||||
MemoryChunk::kAlignment, executable,
|
||||
address_hint, &reservation);
|
||||
if (base == nullptr) return nullptr;
|
||||
if (base == kNullAddress) return nullptr;
|
||||
// Update executable memory size.
|
||||
size_executable_.Increment(reservation.size());
|
||||
}
|
||||
@ -883,7 +884,7 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
|
||||
AllocateAlignedMemory(chunk_size, commit_size, MemoryChunk::kAlignment,
|
||||
executable, address_hint, &reservation);
|
||||
|
||||
if (base == nullptr) return nullptr;
|
||||
if (base == kNullAddress) return nullptr;
|
||||
|
||||
if (Heap::ShouldZapGarbage()) {
|
||||
ZapBlock(base, Page::kObjectStartOffset + commit_area_size);
|
||||
@ -898,16 +899,16 @@ MemoryChunk* MemoryAllocator::AllocateChunk(size_t reserve_area_size,
|
||||
isolate_->counters()->memory_allocated()->Increment(
|
||||
static_cast<int>(chunk_size));
|
||||
|
||||
LOG(isolate_, NewEvent("MemoryChunk", base, chunk_size));
|
||||
LOG(isolate_,
|
||||
NewEvent("MemoryChunk", reinterpret_cast<void*>(base), chunk_size));
|
||||
|
||||
// We cannot use the last chunk in the address space because we would
|
||||
// overflow when comparing top and limit if this chunk is used for a
|
||||
// linear allocation area.
|
||||
if ((reinterpret_cast<uintptr_t>(base) + chunk_size) == 0u) {
|
||||
if ((base + chunk_size) == 0u) {
|
||||
CHECK(!last_chunk_.IsReserved());
|
||||
last_chunk_.TakeControl(&reservation);
|
||||
UncommitBlock(reinterpret_cast<Address>(last_chunk_.address()),
|
||||
last_chunk_.size());
|
||||
UncommitBlock(last_chunk_.address(), last_chunk_.size());
|
||||
size_.Decrement(chunk_size);
|
||||
if (executable == EXECUTABLE) {
|
||||
size_executable_.Decrement(chunk_size);
|
||||
@ -1004,7 +1005,7 @@ void Page::CreateBlackArea(Address start, Address end) {
|
||||
heap()->incremental_marking()->marking_state();
|
||||
marking_state->bitmap(this)->SetRange(AddressToMarkbitIndex(start),
|
||||
AddressToMarkbitIndex(end));
|
||||
marking_state->IncrementLiveBytes(this, static_cast<int>(end - start));
|
||||
marking_state->IncrementLiveBytes(this, static_cast<intptr_t>(end - start));
|
||||
}
|
||||
|
||||
void Page::DestroyBlackArea(Address start, Address end) {
|
||||
@ -1016,7 +1017,7 @@ void Page::DestroyBlackArea(Address start, Address end) {
|
||||
heap()->incremental_marking()->marking_state();
|
||||
marking_state->bitmap(this)->ClearRange(AddressToMarkbitIndex(start),
|
||||
AddressToMarkbitIndex(end));
|
||||
marking_state->IncrementLiveBytes(this, -static_cast<int>(end - start));
|
||||
marking_state->IncrementLiveBytes(this, -static_cast<intptr_t>(end - start));
|
||||
}
|
||||
|
||||
void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk, Address start_free,
|
||||
@ -1029,8 +1030,7 @@ void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk, Address start_free,
|
||||
if (chunk->IsFlagSet(MemoryChunk::IS_EXECUTABLE)) {
|
||||
// Add guard page at the end.
|
||||
size_t page_size = GetCommitPageSize();
|
||||
DCHECK_EQ(0, reinterpret_cast<uintptr_t>(chunk->area_end_) %
|
||||
static_cast<uintptr_t>(page_size));
|
||||
DCHECK_EQ(0, chunk->area_end_ % static_cast<Address>(page_size));
|
||||
DCHECK_EQ(chunk->address() + chunk->size(),
|
||||
chunk->area_end() + CodePageGuardSize());
|
||||
reservation->SetPermissions(chunk->area_end_, page_size,
|
||||
@ -1164,7 +1164,7 @@ MemoryChunk* MemoryAllocator::AllocatePagePooled(SpaceType* owner) {
|
||||
const Address start = reinterpret_cast<Address>(chunk);
|
||||
const Address area_start = start + MemoryChunk::kObjectStartOffset;
|
||||
const Address area_end = start + size;
|
||||
if (!CommitBlock(reinterpret_cast<Address>(chunk), size, NOT_EXECUTABLE)) {
|
||||
if (!CommitBlock(start, size, NOT_EXECUTABLE)) {
|
||||
return nullptr;
|
||||
}
|
||||
VirtualMemory reservation(start, size);
|
||||
@ -1196,7 +1196,7 @@ bool MemoryAllocator::UncommitBlock(Address start, size_t size) {
|
||||
|
||||
void MemoryAllocator::ZapBlock(Address start, size_t size) {
|
||||
for (size_t s = 0; s + kPointerSize <= size; s += kPointerSize) {
|
||||
Memory::Address_at(start + s) = reinterpret_cast<Address>(kZapValue);
|
||||
Memory::Address_at(start + s) = static_cast<Address>(kZapValue);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1530,8 +1530,8 @@ void PagedSpace::MergeCompactionSpace(CompactionSpace* other) {
|
||||
other->FreeLinearAllocationArea();
|
||||
|
||||
// The linear allocation area of {other} should be destroyed now.
|
||||
DCHECK_NULL(other->top());
|
||||
DCHECK_NULL(other->limit());
|
||||
DCHECK_EQ(kNullAddress, other->top());
|
||||
DCHECK_EQ(kNullAddress, other->limit());
|
||||
|
||||
// Move over pages.
|
||||
for (auto it = other->begin(); it != other->end();) {
|
||||
@ -1690,7 +1690,7 @@ void PagedSpace::ResetFreeListStatistics() {
|
||||
|
||||
void PagedSpace::SetLinearAllocationArea(Address top, Address limit) {
|
||||
SetTopAndLimit(top, limit);
|
||||
if (top != nullptr && top != limit &&
|
||||
if (top != kNullAddress && top != limit &&
|
||||
heap()->incremental_marking()->black_allocation()) {
|
||||
Page::FromAllocationAreaAddress(top)->CreateBlackArea(top, limit);
|
||||
}
|
||||
@ -1746,7 +1746,7 @@ void PagedSpace::MarkLinearAllocationAreaBlack() {
|
||||
DCHECK(heap()->incremental_marking()->black_allocation());
|
||||
Address current_top = top();
|
||||
Address current_limit = limit();
|
||||
if (current_top != nullptr && current_top != current_limit) {
|
||||
if (current_top != kNullAddress && current_top != current_limit) {
|
||||
Page::FromAllocationAreaAddress(current_top)
|
||||
->CreateBlackArea(current_top, current_limit);
|
||||
}
|
||||
@ -1755,7 +1755,7 @@ void PagedSpace::MarkLinearAllocationAreaBlack() {
|
||||
void PagedSpace::UnmarkLinearAllocationArea() {
|
||||
Address current_top = top();
|
||||
Address current_limit = limit();
|
||||
if (current_top != nullptr && current_top != current_limit) {
|
||||
if (current_top != kNullAddress && current_top != current_limit) {
|
||||
Page::FromAllocationAreaAddress(current_top)
|
||||
->DestroyBlackArea(current_top, current_limit);
|
||||
}
|
||||
@ -1766,8 +1766,8 @@ void PagedSpace::FreeLinearAllocationArea() {
|
||||
// skipped when scanning the heap.
|
||||
Address current_top = top();
|
||||
Address current_limit = limit();
|
||||
if (current_top == nullptr) {
|
||||
DCHECK_NULL(current_limit);
|
||||
if (current_top == kNullAddress) {
|
||||
DCHECK_EQ(kNullAddress, current_limit);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1786,8 +1786,8 @@ void PagedSpace::FreeLinearAllocationArea() {
|
||||
}
|
||||
}
|
||||
|
||||
InlineAllocationStep(current_top, nullptr, nullptr, 0);
|
||||
SetTopAndLimit(nullptr, nullptr);
|
||||
InlineAllocationStep(current_top, kNullAddress, kNullAddress, 0);
|
||||
SetTopAndLimit(kNullAddress, kNullAddress);
|
||||
DCHECK_GE(current_limit, current_top);
|
||||
|
||||
// The code page of the linear allocation area needs to be unprotected
|
||||
@ -1811,7 +1811,7 @@ void PagedSpace::ReleasePage(Page* page) {
|
||||
|
||||
if (Page::FromAllocationAreaAddress(allocation_info_.top()) == page) {
|
||||
DCHECK(!top_on_previous_step_);
|
||||
allocation_info_.Reset(nullptr, nullptr);
|
||||
allocation_info_.Reset(kNullAddress, kNullAddress);
|
||||
}
|
||||
|
||||
// If page is still in a list, unlink it from that list.
|
||||
@ -2038,7 +2038,7 @@ bool NewSpace::SetUp(size_t initial_semispace_capacity,
|
||||
|
||||
|
||||
void NewSpace::TearDown() {
|
||||
allocation_info_.Reset(nullptr, nullptr);
|
||||
allocation_info_.Reset(kNullAddress, kNullAddress);
|
||||
|
||||
to_space_.TearDown();
|
||||
from_space_.TearDown();
|
||||
@ -2144,10 +2144,10 @@ LinearAllocationArea LocalAllocationBuffer::Close() {
|
||||
static_cast<int>(allocation_info_.limit() - allocation_info_.top()),
|
||||
ClearRecordedSlots::kNo);
|
||||
const LinearAllocationArea old_info = allocation_info_;
|
||||
allocation_info_ = LinearAllocationArea(nullptr, nullptr);
|
||||
allocation_info_ = LinearAllocationArea(kNullAddress, kNullAddress);
|
||||
return old_info;
|
||||
}
|
||||
return LinearAllocationArea(nullptr, nullptr);
|
||||
return LinearAllocationArea(kNullAddress, kNullAddress);
|
||||
}
|
||||
|
||||
LocalAllocationBuffer::LocalAllocationBuffer(
|
||||
@ -2177,8 +2177,8 @@ LocalAllocationBuffer& LocalAllocationBuffer::operator=(
|
||||
// This is needed since we (a) cannot yet use move-semantics, and (b) want
|
||||
// to make the use of the class easy by it as value and (c) implicitly call
|
||||
// {Close} upon copy.
|
||||
const_cast<LocalAllocationBuffer&>(other)
|
||||
.allocation_info_.Reset(nullptr, nullptr);
|
||||
const_cast<LocalAllocationBuffer&>(other).allocation_info_.Reset(
|
||||
kNullAddress, kNullAddress);
|
||||
return *this;
|
||||
}
|
||||
|
||||
@ -2197,7 +2197,7 @@ void NewSpace::UpdateLinearAllocationArea() {
|
||||
|
||||
void NewSpace::ResetLinearAllocationArea() {
|
||||
// Do a step to account for memory allocated so far before resetting.
|
||||
InlineAllocationStep(top(), top(), nullptr, 0);
|
||||
InlineAllocationStep(top(), top(), kNullAddress, 0);
|
||||
to_space_.Reset();
|
||||
UpdateLinearAllocationArea();
|
||||
// Clear all mark-bits in the to-space.
|
||||
@ -2227,7 +2227,7 @@ bool NewSpace::AddFreshPage() {
|
||||
DCHECK(!Page::IsAtObjectStart(top));
|
||||
|
||||
// Do a step to account for memory allocated on previous page.
|
||||
InlineAllocationStep(top, top, nullptr, 0);
|
||||
InlineAllocationStep(top, top, kNullAddress, 0);
|
||||
|
||||
if (!to_space_.AdvancePage()) {
|
||||
// No more pages left to advance.
|
||||
@ -2293,12 +2293,12 @@ void SpaceWithLinearArea::StartNextInlineAllocationStep() {
|
||||
top_on_previous_step_ = top();
|
||||
UpdateInlineAllocationLimit(0);
|
||||
} else {
|
||||
DCHECK_NULL(top_on_previous_step_);
|
||||
DCHECK_EQ(kNullAddress, top_on_previous_step_);
|
||||
}
|
||||
}
|
||||
|
||||
void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) {
|
||||
InlineAllocationStep(top(), top(), nullptr, 0);
|
||||
InlineAllocationStep(top(), top(), kNullAddress, 0);
|
||||
Space::AddAllocationObserver(observer);
|
||||
DCHECK_IMPLIES(top_on_previous_step_, AllocationObserversActive());
|
||||
}
|
||||
@ -2306,22 +2306,22 @@ void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) {
|
||||
void SpaceWithLinearArea::RemoveAllocationObserver(
|
||||
AllocationObserver* observer) {
|
||||
Address top_for_next_step =
|
||||
allocation_observers_.size() == 1 ? nullptr : top();
|
||||
InlineAllocationStep(top(), top_for_next_step, nullptr, 0);
|
||||
allocation_observers_.size() == 1 ? kNullAddress : top();
|
||||
InlineAllocationStep(top(), top_for_next_step, kNullAddress, 0);
|
||||
Space::RemoveAllocationObserver(observer);
|
||||
DCHECK_IMPLIES(top_on_previous_step_, AllocationObserversActive());
|
||||
}
|
||||
|
||||
void SpaceWithLinearArea::PauseAllocationObservers() {
|
||||
// Do a step to account for memory allocated so far.
|
||||
InlineAllocationStep(top(), nullptr, nullptr, 0);
|
||||
InlineAllocationStep(top(), kNullAddress, kNullAddress, 0);
|
||||
Space::PauseAllocationObservers();
|
||||
DCHECK_NULL(top_on_previous_step_);
|
||||
DCHECK_EQ(kNullAddress, top_on_previous_step_);
|
||||
UpdateInlineAllocationLimit(0);
|
||||
}
|
||||
|
||||
void SpaceWithLinearArea::ResumeAllocationObservers() {
|
||||
DCHECK_NULL(top_on_previous_step_);
|
||||
DCHECK_EQ(kNullAddress, top_on_previous_step_);
|
||||
Space::ResumeAllocationObservers();
|
||||
StartNextInlineAllocationStep();
|
||||
}
|
||||
@ -2338,7 +2338,7 @@ void SpaceWithLinearArea::InlineAllocationStep(Address top,
|
||||
if (top_on_previous_step_) {
|
||||
if (top < top_on_previous_step_) {
|
||||
// Generated code decreased the top pointer to do folded allocations.
|
||||
DCHECK_NOT_NULL(top);
|
||||
DCHECK_NE(top, kNullAddress);
|
||||
DCHECK_EQ(Page::FromAllocationAreaAddress(top),
|
||||
Page::FromAllocationAreaAddress(top_on_previous_step_));
|
||||
top_on_previous_step_ = top;
|
||||
@ -2447,7 +2447,7 @@ bool SemiSpace::Commit() {
|
||||
}
|
||||
Reset();
|
||||
AccountCommitted(current_capacity_);
|
||||
if (age_mark_ == nullptr) {
|
||||
if (age_mark_ == kNullAddress) {
|
||||
age_mark_ = first_page()->area_start();
|
||||
}
|
||||
committed_ = true;
|
||||
@ -3241,7 +3241,9 @@ void LargeObjectSpace::TearDown() {
|
||||
while (first_page_ != nullptr) {
|
||||
LargePage* page = first_page_;
|
||||
first_page_ = first_page_->next_page();
|
||||
LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address()));
|
||||
LOG(heap()->isolate(),
|
||||
DeleteEvent("LargeObjectChunk",
|
||||
reinterpret_cast<void*>(page->address())));
|
||||
heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page);
|
||||
}
|
||||
SetUp();
|
||||
@ -3321,7 +3323,7 @@ LargePage* LargeObjectSpace::FindPageThreadSafe(Address a) {
|
||||
|
||||
LargePage* LargeObjectSpace::FindPage(Address a) {
|
||||
const Address key = MemoryChunk::FromAddress(a)->address();
|
||||
auto it = chunk_map_.find(reinterpret_cast<Address>(key));
|
||||
auto it = chunk_map_.find(key);
|
||||
if (it != chunk_map_.end()) {
|
||||
LargePage* page = it->second;
|
||||
if (page->Contains(a)) {
|
||||
@ -3365,8 +3367,7 @@ void LargeObjectSpace::RemoveChunkMapEntries(LargePage* page) {
|
||||
|
||||
void LargeObjectSpace::RemoveChunkMapEntries(LargePage* page,
|
||||
Address free_start) {
|
||||
for (Address current = reinterpret_cast<Address>(::RoundUp(
|
||||
reinterpret_cast<uintptr_t>(free_start), MemoryChunk::kPageSize));
|
||||
for (Address current = ::RoundUp(free_start, MemoryChunk::kPageSize);
|
||||
current < reinterpret_cast<Address>(page) + page->size();
|
||||
current += MemoryChunk::kPageSize) {
|
||||
chunk_map_.erase(current);
|
||||
@ -3515,7 +3516,7 @@ void LargeObjectSpace::Print() {
|
||||
|
||||
void Page::Print() {
|
||||
// Make a best-effort to print the objects in the page.
|
||||
PrintF("Page@%p in %s\n", static_cast<void*>(this->address()),
|
||||
PrintF("Page@%p in %s\n", reinterpret_cast<void*>(this->address()),
|
||||
AllocationSpaceName(this->owner()->identity()));
|
||||
printf(" --------------------------------------\n");
|
||||
HeapObjectIterator objects(this);
|
||||
|
@ -413,7 +413,7 @@ class MemoryChunk {
|
||||
static inline MemoryChunk* FromAnyPointerAddress(Heap* heap, Address addr);
|
||||
|
||||
static inline void UpdateHighWaterMark(Address mark) {
|
||||
if (mark == nullptr) return;
|
||||
if (mark == kNullAddress) return;
|
||||
// Need to subtract one from the mark because when a chunk is full the
|
||||
// top points to the next address after the chunk, which effectively belongs
|
||||
// to another chunk. See the comment to Page::FromTopOrLimit.
|
||||
@ -769,8 +769,7 @@ class Page : public MemoryChunk {
|
||||
}
|
||||
|
||||
static bool IsAtObjectStart(Address addr) {
|
||||
return (reinterpret_cast<intptr_t>(addr) & kPageAlignmentMask) ==
|
||||
kObjectStartOffset;
|
||||
return (addr & kPageAlignmentMask) == kObjectStartOffset;
|
||||
}
|
||||
|
||||
static Page* ConvertNewToOld(Page* old_page);
|
||||
@ -1033,7 +1032,7 @@ class CodeRange {
|
||||
bool valid() { return virtual_memory_.IsReserved(); }
|
||||
Address start() {
|
||||
DCHECK(valid());
|
||||
return static_cast<Address>(virtual_memory_.address());
|
||||
return virtual_memory_.address();
|
||||
}
|
||||
size_t size() {
|
||||
DCHECK(valid());
|
||||
@ -1041,7 +1040,7 @@ class CodeRange {
|
||||
}
|
||||
bool contains(Address address) {
|
||||
if (!valid()) return false;
|
||||
Address start = static_cast<Address>(virtual_memory_.address());
|
||||
Address start = virtual_memory_.address();
|
||||
return start <= address && address < start + virtual_memory_.size();
|
||||
}
|
||||
|
||||
@ -1065,7 +1064,7 @@ class CodeRange {
|
||||
DCHECK(size >= static_cast<size_t>(Page::kPageSize));
|
||||
}
|
||||
FreeBlock(void* start_arg, size_t size_arg)
|
||||
: start(static_cast<Address>(start_arg)), size(size_arg) {
|
||||
: start(reinterpret_cast<Address>(start_arg)), size(size_arg) {
|
||||
DCHECK(IsAddressAligned(start, MemoryChunk::kAlignment));
|
||||
DCHECK(size >= static_cast<size_t>(Page::kPageSize));
|
||||
}
|
||||
@ -1114,7 +1113,7 @@ class SkipList {
|
||||
|
||||
void Clear() {
|
||||
for (int idx = 0; idx < kSize; idx++) {
|
||||
starts_[idx] = reinterpret_cast<Address>(-1);
|
||||
starts_[idx] = static_cast<Address>(-1);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1334,7 +1333,7 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
|
||||
|
||||
// Returns an indication of whether a pointer is in a space that has
|
||||
// been allocated by this MemoryAllocator.
|
||||
V8_INLINE bool IsOutsideAllocatedSpace(const void* address) {
|
||||
V8_INLINE bool IsOutsideAllocatedSpace(Address address) {
|
||||
return address < lowest_ever_allocated_.Value() ||
|
||||
address >= highest_ever_allocated_.Value();
|
||||
}
|
||||
@ -1364,8 +1363,8 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
|
||||
size_t bytes_to_free, Address new_area_end);
|
||||
|
||||
// Commit a contiguous block of memory from the initial chunk. Assumes that
|
||||
// the address is not nullptr, the size is greater than zero, and that the
|
||||
// block is contained in the initial chunk. Returns true if it succeeded
|
||||
// the address is not kNullAddress, the size is greater than zero, and that
|
||||
// the block is contained in the initial chunk. Returns true if it succeeded
|
||||
// and false otherwise.
|
||||
bool CommitBlock(Address start, size_t size, Executability executable);
|
||||
|
||||
@ -1376,7 +1375,7 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
|
||||
}
|
||||
|
||||
// Uncommit a contiguous block of memory [start..(start+size)[.
|
||||
// start is not nullptr, the size is greater than zero, and the
|
||||
// start is not kNullAddress, the size is greater than zero, and the
|
||||
// block is contained in the initial chunk. Returns true if it succeeded
|
||||
// and false otherwise.
|
||||
bool UncommitBlock(Address start, size_t size);
|
||||
@ -1413,11 +1412,11 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
|
||||
Page* InitializePagesInChunk(int chunk_id, int pages_in_chunk,
|
||||
PagedSpace* owner);
|
||||
|
||||
void UpdateAllocatedSpaceLimits(void* low, void* high) {
|
||||
void UpdateAllocatedSpaceLimits(Address low, Address high) {
|
||||
// The use of atomic primitives does not guarantee correctness (wrt.
|
||||
// desired semantics) by default. The loop here ensures that we update the
|
||||
// values only if they did not change in between.
|
||||
void* ptr = nullptr;
|
||||
Address ptr = kNullAddress;
|
||||
do {
|
||||
ptr = lowest_ever_allocated_.Value();
|
||||
} while ((low < ptr) && !lowest_ever_allocated_.TrySetValue(ptr, low));
|
||||
@ -1454,8 +1453,8 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
|
||||
// conservative, i.e. not all addresses in 'allocated' space are allocated
|
||||
// to our heap. The range is [lowest, highest[, inclusive on the low end
|
||||
// and exclusive on the high end.
|
||||
base::AtomicValue<void*> lowest_ever_allocated_;
|
||||
base::AtomicValue<void*> highest_ever_allocated_;
|
||||
base::AtomicValue<Address> lowest_ever_allocated_;
|
||||
base::AtomicValue<Address> highest_ever_allocated_;
|
||||
|
||||
VirtualMemory last_chunk_;
|
||||
Unmapper unmapper_;
|
||||
@ -1574,7 +1573,7 @@ class V8_EXPORT_PRIVATE HeapObjectIterator : public ObjectIterator {
|
||||
// space.
|
||||
class LinearAllocationArea {
|
||||
public:
|
||||
LinearAllocationArea() : top_(nullptr), limit_(nullptr) {}
|
||||
LinearAllocationArea() : top_(kNullAddress), limit_(kNullAddress) {}
|
||||
LinearAllocationArea(Address top, Address limit) : top_(top), limit_(limit) {}
|
||||
|
||||
void Reset(Address top, Address limit) {
|
||||
@ -1583,14 +1582,12 @@ class LinearAllocationArea {
|
||||
}
|
||||
|
||||
INLINE(void set_top(Address top)) {
|
||||
SLOW_DCHECK(top == nullptr ||
|
||||
(reinterpret_cast<intptr_t>(top) & kHeapObjectTagMask) == 0);
|
||||
SLOW_DCHECK(top == kNullAddress || (top & kHeapObjectTagMask) == 0);
|
||||
top_ = top;
|
||||
}
|
||||
|
||||
INLINE(Address top()) const {
|
||||
SLOW_DCHECK(top_ == nullptr ||
|
||||
(reinterpret_cast<intptr_t>(top_) & kHeapObjectTagMask) == 0);
|
||||
SLOW_DCHECK(top_ == kNullAddress || (top_ & kHeapObjectTagMask) == 0);
|
||||
return top_;
|
||||
}
|
||||
|
||||
@ -1952,7 +1949,7 @@ class LocalAllocationBuffer {
|
||||
V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRawAligned(
|
||||
int size_in_bytes, AllocationAlignment alignment);
|
||||
|
||||
inline bool IsValid() { return allocation_info_.top() != nullptr; }
|
||||
inline bool IsValid() { return allocation_info_.top() != kNullAddress; }
|
||||
|
||||
// Try to merge LABs, which is only possible when they are adjacent in memory.
|
||||
// Returns true if the merge was successful, false otherwise.
|
||||
@ -1974,7 +1971,7 @@ class SpaceWithLinearArea : public Space {
|
||||
public:
|
||||
SpaceWithLinearArea(Heap* heap, AllocationSpace id, Executability executable)
|
||||
: Space(heap, id, executable), top_on_previous_step_(0) {
|
||||
allocation_info_.Reset(nullptr, nullptr);
|
||||
allocation_info_.Reset(kNullAddress, kNullAddress);
|
||||
}
|
||||
|
||||
virtual bool SupportsInlineAllocation() = 0;
|
||||
@ -2354,7 +2351,7 @@ class SemiSpace : public Space {
|
||||
current_capacity_(0),
|
||||
maximum_capacity_(0),
|
||||
minimum_capacity_(0),
|
||||
age_mark_(nullptr),
|
||||
age_mark_(kNullAddress),
|
||||
committed_(false),
|
||||
id_(semispace),
|
||||
anchor_(this),
|
||||
@ -2618,8 +2615,8 @@ class NewSpace : public SpaceWithLinearArea {
|
||||
|
||||
size_t AllocatedSinceLastGC() {
|
||||
const Address age_mark = to_space_.age_mark();
|
||||
DCHECK_NOT_NULL(age_mark);
|
||||
DCHECK_NOT_NULL(top());
|
||||
DCHECK_NE(age_mark, kNullAddress);
|
||||
DCHECK_NE(top(), kNullAddress);
|
||||
Page* const age_mark_page = Page::FromAllocationAreaAddress(age_mark);
|
||||
Page* const last_page = Page::FromAllocationAreaAddress(top());
|
||||
Page* current_page = age_mark_page;
|
||||
|
@ -38,15 +38,13 @@ void StoreBuffer::SetUp() {
|
||||
&reservation)) {
|
||||
heap_->FatalProcessOutOfMemory("StoreBuffer::SetUp");
|
||||
}
|
||||
uintptr_t start_as_int = reinterpret_cast<uintptr_t>(reservation.address());
|
||||
start_[0] =
|
||||
reinterpret_cast<Address*>(::RoundUp(start_as_int, kStoreBufferSize));
|
||||
Address start = reservation.address();
|
||||
start_[0] = reinterpret_cast<Address*>(::RoundUp(start, kStoreBufferSize));
|
||||
limit_[0] = start_[0] + (kStoreBufferSize / kPointerSize);
|
||||
start_[1] = limit_[0];
|
||||
limit_[1] = start_[1] + (kStoreBufferSize / kPointerSize);
|
||||
|
||||
Address* vm_limit = reinterpret_cast<Address*>(
|
||||
reinterpret_cast<char*>(reservation.address()) + reservation.size());
|
||||
Address* vm_limit = reinterpret_cast<Address*>(start + reservation.size());
|
||||
|
||||
USE(vm_limit);
|
||||
for (int i = 0; i < kStoreBuffers; i++) {
|
||||
@ -54,7 +52,7 @@ void StoreBuffer::SetUp() {
|
||||
DCHECK(reinterpret_cast<Address>(limit_[i]) >= reservation.address());
|
||||
DCHECK(start_[i] <= vm_limit);
|
||||
DCHECK(limit_[i] <= vm_limit);
|
||||
DCHECK_EQ(0, reinterpret_cast<uintptr_t>(limit_[i]) & kStoreBufferMask);
|
||||
DCHECK_EQ(0, reinterpret_cast<Address>(limit_[i]) & kStoreBufferMask);
|
||||
}
|
||||
|
||||
if (!reservation.SetPermissions(reinterpret_cast<Address>(start_[0]),
|
||||
@ -104,7 +102,7 @@ void StoreBuffer::MoveEntriesToRememberedSet(int index) {
|
||||
if (!lazy_top_[index]) return;
|
||||
DCHECK_GE(index, 0);
|
||||
DCHECK_LT(index, kStoreBuffers);
|
||||
Address last_inserted_addr = nullptr;
|
||||
Address last_inserted_addr = kNullAddress;
|
||||
|
||||
// We are taking the chunk map mutex here because the page lookup of addr
|
||||
// below may require us to check if addr is part of a large page.
|
||||
@ -114,7 +112,7 @@ void StoreBuffer::MoveEntriesToRememberedSet(int index) {
|
||||
Address addr = *current;
|
||||
MemoryChunk* chunk = MemoryChunk::FromAnyPointerAddress(heap_, addr);
|
||||
if (IsDeletionAddress(addr)) {
|
||||
last_inserted_addr = nullptr;
|
||||
last_inserted_addr = kNullAddress;
|
||||
current++;
|
||||
Address end = *current;
|
||||
DCHECK(!IsDeletionAddress(end));
|
||||
|
@ -50,23 +50,21 @@ class StoreBuffer {
|
||||
void MoveAllEntriesToRememberedSet();
|
||||
|
||||
inline bool IsDeletionAddress(Address address) const {
|
||||
return reinterpret_cast<intptr_t>(address) & kDeletionTag;
|
||||
return address & kDeletionTag;
|
||||
}
|
||||
|
||||
inline Address MarkDeletionAddress(Address address) {
|
||||
return reinterpret_cast<Address>(reinterpret_cast<intptr_t>(address) |
|
||||
kDeletionTag);
|
||||
return address | kDeletionTag;
|
||||
}
|
||||
|
||||
inline Address UnmarkDeletionAddress(Address address) {
|
||||
return reinterpret_cast<Address>(reinterpret_cast<intptr_t>(address) &
|
||||
~kDeletionTag);
|
||||
return address & ~kDeletionTag;
|
||||
}
|
||||
|
||||
// If we only want to delete a single slot, end should be set to null which
|
||||
// will be written into the second field. When processing the store buffer
|
||||
// the more efficient Remove method will be called in this case.
|
||||
void DeleteEntry(Address start, Address end = nullptr) {
|
||||
void DeleteEntry(Address start, Address end = kNullAddress) {
|
||||
// Deletions coming from the GC are directly deleted from the remembered
|
||||
// set. Deletions coming from the runtime are added to the store buffer
|
||||
// to allow concurrent processing.
|
||||
|
@ -251,7 +251,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
ArrayBufferTracker::FreeDead(p, marking_state_);
|
||||
|
||||
Address free_start = p->area_start();
|
||||
DCHECK_EQ(0, reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize));
|
||||
DCHECK_EQ(0, free_start % (32 * kPointerSize));
|
||||
|
||||
// If we use the skip list for code space pages, we have to lock the skip
|
||||
// list because it could be accessed concurrently by the runtime or the
|
||||
@ -281,7 +281,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
CHECK_GT(free_end, free_start);
|
||||
size_t size = static_cast<size_t>(free_end - free_start);
|
||||
if (free_space_mode == ZAP_FREE_SPACE) {
|
||||
memset(free_start, 0xCC, size);
|
||||
memset(reinterpret_cast<void*>(free_start), 0xCC, size);
|
||||
}
|
||||
if (free_list_mode == REBUILD_FREE_LIST) {
|
||||
freed_bytes = reinterpret_cast<PagedSpace*>(space)->Free(
|
||||
@ -321,7 +321,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
|
||||
CHECK_GT(p->area_end(), free_start);
|
||||
size_t size = static_cast<size_t>(p->area_end() - free_start);
|
||||
if (free_space_mode == ZAP_FREE_SPACE) {
|
||||
memset(free_start, 0xCC, size);
|
||||
memset(reinterpret_cast<void*>(free_start), 0xCC, size);
|
||||
}
|
||||
if (free_list_mode == REBUILD_FREE_LIST) {
|
||||
freed_bytes = reinterpret_cast<PagedSpace*>(space)->Free(
|
||||
|
@ -74,7 +74,7 @@ Address RelocInfo::target_address_address() {
|
||||
DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) ||
|
||||
IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
|
||||
IsOffHeapTarget(rmode_));
|
||||
return reinterpret_cast<Address>(pc_);
|
||||
return pc_;
|
||||
}
|
||||
|
||||
|
||||
@ -135,7 +135,7 @@ Address RelocInfo::target_internal_reference() {
|
||||
|
||||
Address RelocInfo::target_internal_reference_address() {
|
||||
DCHECK(rmode_ == INTERNAL_REFERENCE);
|
||||
return reinterpret_cast<Address>(pc_);
|
||||
return pc_;
|
||||
}
|
||||
|
||||
void RelocInfo::set_wasm_code_table_entry(Address target,
|
||||
@ -149,7 +149,7 @@ void RelocInfo::set_wasm_code_table_entry(Address target,
|
||||
|
||||
Address RelocInfo::target_runtime_entry(Assembler* origin) {
|
||||
DCHECK(IsRuntimeEntry(rmode_));
|
||||
return reinterpret_cast<Address>(*reinterpret_cast<int32_t*>(pc_));
|
||||
return static_cast<Address>(*reinterpret_cast<int32_t*>(pc_));
|
||||
}
|
||||
|
||||
void RelocInfo::set_target_runtime_entry(Address target,
|
||||
@ -169,7 +169,7 @@ Address RelocInfo::target_off_heap_target() {
|
||||
void RelocInfo::WipeOut() {
|
||||
if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
|
||||
IsInternalReference(rmode_)) {
|
||||
Memory::Address_at(pc_) = nullptr;
|
||||
Memory::Address_at(pc_) = kNullAddress;
|
||||
} else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
|
||||
// Effectively write zero into the relocation.
|
||||
Assembler::set_target_address_at(pc_, constant_pool_,
|
||||
@ -210,8 +210,7 @@ void Assembler::emit_q(uint64_t x) {
|
||||
}
|
||||
|
||||
void Assembler::emit(Handle<HeapObject> handle) {
|
||||
emit(reinterpret_cast<intptr_t>(handle.address()),
|
||||
RelocInfo::EMBEDDED_OBJECT);
|
||||
emit(handle.address(), RelocInfo::EMBEDDED_OBJECT);
|
||||
}
|
||||
|
||||
void Assembler::emit(uint32_t x, RelocInfo::Mode rmode) {
|
||||
@ -222,7 +221,7 @@ void Assembler::emit(uint32_t x, RelocInfo::Mode rmode) {
|
||||
}
|
||||
|
||||
void Assembler::emit(Handle<Code> code, RelocInfo::Mode rmode) {
|
||||
emit(reinterpret_cast<intptr_t>(code.address()), rmode);
|
||||
emit(code.address(), rmode);
|
||||
}
|
||||
|
||||
|
||||
@ -273,10 +272,9 @@ Address Assembler::target_address_at(Address pc, Address constant_pool) {
|
||||
void Assembler::set_target_address_at(Address pc, Address constant_pool,
|
||||
Address target,
|
||||
ICacheFlushMode icache_flush_mode) {
|
||||
int32_t* p = reinterpret_cast<int32_t*>(pc);
|
||||
*p = target - (pc + sizeof(int32_t));
|
||||
*reinterpret_cast<int32_t*>(pc) = target - (pc + sizeof(int32_t));
|
||||
if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
|
||||
Assembler::FlushICache(p, sizeof(int32_t));
|
||||
Assembler::FlushICache(pc, sizeof(int32_t));
|
||||
}
|
||||
}
|
||||
|
||||
@ -287,7 +285,7 @@ Address Assembler::target_address_from_return_address(Address pc) {
|
||||
void Assembler::deserialization_set_special_target_at(
|
||||
Address instruction_payload, Code* code, Address target) {
|
||||
set_target_address_at(instruction_payload,
|
||||
code ? code->constant_pool() : nullptr, target);
|
||||
code ? code->constant_pool() : kNullAddress, target);
|
||||
}
|
||||
|
||||
Displacement Assembler::disp_at(Label* L) {
|
||||
|
@ -318,7 +318,7 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
|
||||
object = request.code_stub()->GetCode();
|
||||
break;
|
||||
}
|
||||
Address pc = buffer_ + request.offset();
|
||||
Address pc = reinterpret_cast<Address>(buffer_) + request.offset();
|
||||
Memory::Object_Handle_at(pc) = object;
|
||||
}
|
||||
}
|
||||
@ -395,7 +395,7 @@ void Assembler::Align(int m) {
|
||||
|
||||
|
||||
bool Assembler::IsNop(Address addr) {
|
||||
Address a = addr;
|
||||
byte* a = reinterpret_cast<byte*>(addr);
|
||||
while (*a == 0x66) a++;
|
||||
if (*a == 0x90) return true;
|
||||
if (a[0] == 0xF && a[1] == 0x1F) return true;
|
||||
@ -654,7 +654,7 @@ void Assembler::mov(Operand dst, Address src, RelocInfo::Mode rmode) {
|
||||
EnsureSpace ensure_space(this);
|
||||
EMIT(0xC7);
|
||||
emit_operand(eax, dst);
|
||||
emit(reinterpret_cast<uint32_t>(src), rmode);
|
||||
emit(src, rmode);
|
||||
}
|
||||
|
||||
void Assembler::mov(Operand dst, Handle<HeapObject> handle) {
|
||||
@ -1595,22 +1595,21 @@ void Assembler::call(Label* L) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Assembler::call(byte* entry, RelocInfo::Mode rmode) {
|
||||
void Assembler::call(Address entry, RelocInfo::Mode rmode) {
|
||||
EnsureSpace ensure_space(this);
|
||||
DCHECK(!RelocInfo::IsCodeTarget(rmode));
|
||||
EMIT(0xE8);
|
||||
if (RelocInfo::IsRuntimeEntry(rmode)) {
|
||||
emit(reinterpret_cast<uint32_t>(entry), rmode);
|
||||
emit(entry, rmode);
|
||||
} else {
|
||||
emit(entry - (pc_ + sizeof(int32_t)), rmode);
|
||||
emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode);
|
||||
}
|
||||
}
|
||||
|
||||
void Assembler::wasm_call(Address entry, RelocInfo::Mode rmode) {
|
||||
EnsureSpace ensure_space(this);
|
||||
EMIT(0xE8);
|
||||
emit(reinterpret_cast<intptr_t>(entry), rmode);
|
||||
emit(entry, rmode);
|
||||
}
|
||||
|
||||
int Assembler::CallSize(Operand adr) {
|
||||
@ -1680,14 +1679,14 @@ void Assembler::jmp(Label* L, Label::Distance distance) {
|
||||
}
|
||||
}
|
||||
|
||||
void Assembler::jmp(byte* entry, RelocInfo::Mode rmode) {
|
||||
void Assembler::jmp(Address entry, RelocInfo::Mode rmode) {
|
||||
EnsureSpace ensure_space(this);
|
||||
DCHECK(!RelocInfo::IsCodeTarget(rmode));
|
||||
EMIT(0xE9);
|
||||
if (RelocInfo::IsRuntimeEntry(rmode)) {
|
||||
emit(reinterpret_cast<uint32_t>(entry), rmode);
|
||||
emit(entry, rmode);
|
||||
} else {
|
||||
emit(entry - (pc_ + sizeof(int32_t)), rmode);
|
||||
emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3304,7 +3303,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
|
||||
!serializer_enabled() && !emit_debug_code()) {
|
||||
return;
|
||||
}
|
||||
RelocInfo rinfo(pc_, rmode, data, nullptr);
|
||||
RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr);
|
||||
reloc_info_writer.Write(&rinfo);
|
||||
}
|
||||
|
||||
|
@ -245,7 +245,7 @@ class Immediate BASE_EMBEDDED {
|
||||
: Immediate(reinterpret_cast<intptr_t>(value)) {}
|
||||
inline explicit Immediate(Address addr,
|
||||
RelocInfo::Mode rmode = RelocInfo::NONE)
|
||||
: Immediate(reinterpret_cast<int32_t>(addr), rmode) {}
|
||||
: Immediate(static_cast<int32_t>(addr), rmode) {}
|
||||
|
||||
static Immediate EmbeddedNumber(double number); // Smi or HeapNumber.
|
||||
static Immediate EmbeddedCode(CodeStub* code);
|
||||
@ -363,15 +363,13 @@ class Operand {
|
||||
}
|
||||
|
||||
static Operand StaticVariable(const ExternalReference& ext) {
|
||||
return Operand(reinterpret_cast<int32_t>(ext.address()),
|
||||
RelocInfo::EXTERNAL_REFERENCE);
|
||||
return Operand(ext.address(), RelocInfo::EXTERNAL_REFERENCE);
|
||||
}
|
||||
|
||||
static Operand StaticArray(Register index,
|
||||
ScaleFactor scale,
|
||||
const ExternalReference& arr) {
|
||||
return Operand(index, scale, reinterpret_cast<int32_t>(arr.address()),
|
||||
RelocInfo::EXTERNAL_REFERENCE);
|
||||
return Operand(index, scale, arr.address(), RelocInfo::EXTERNAL_REFERENCE);
|
||||
}
|
||||
|
||||
static Operand ForRegisterPlusImmediate(Register base, Immediate imm) {
|
||||
@ -850,7 +848,7 @@ class Assembler : public AssemblerBase {
|
||||
|
||||
// Calls
|
||||
void call(Label* L);
|
||||
void call(byte* entry, RelocInfo::Mode rmode);
|
||||
void call(Address entry, RelocInfo::Mode rmode);
|
||||
int CallSize(Operand adr);
|
||||
void call(Register reg) { call(Operand(reg)); }
|
||||
void call(Operand adr);
|
||||
@ -862,7 +860,7 @@ class Assembler : public AssemblerBase {
|
||||
// Jumps
|
||||
// unconditional jump to L
|
||||
void jmp(Label* L, Label::Distance distance = Label::kFar);
|
||||
void jmp(byte* entry, RelocInfo::Mode rmode);
|
||||
void jmp(Address entry, RelocInfo::Mode rmode);
|
||||
void jmp(Register reg) { jmp(Operand(reg)); }
|
||||
void jmp(Operand adr);
|
||||
void jmp(Handle<Code> code, RelocInfo::Mode rmode);
|
||||
|
@ -26,7 +26,7 @@ Address IC::constant_pool() const {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
return raw_constant_pool();
|
||||
} else {
|
||||
return nullptr;
|
||||
return kNullAddress;
|
||||
}
|
||||
}
|
||||
|
||||
@ -35,7 +35,7 @@ Address IC::raw_constant_pool() const {
|
||||
if (FLAG_enable_embedded_constant_pool) {
|
||||
return *constant_pool_address_;
|
||||
} else {
|
||||
return nullptr;
|
||||
return kNullAddress;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -889,7 +889,7 @@ Handle<Object> LoadIC::ComputeHandler(LookupIterator* lookup) {
|
||||
|
||||
Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(accessors);
|
||||
|
||||
if (v8::ToCData<Address>(info->getter()) == nullptr ||
|
||||
if (v8::ToCData<Address>(info->getter()) == kNullAddress ||
|
||||
!AccessorInfo::IsCompatibleReceiverMap(isolate(), info, map) ||
|
||||
!holder->HasFastProperties() ||
|
||||
(info->is_sloppy() && !receiver->IsJSReceiver())) {
|
||||
@ -1502,8 +1502,8 @@ Handle<Object> StoreIC::ComputeHandler(LookupIterator* lookup) {
|
||||
Handle<Object> accessors = lookup->GetAccessors();
|
||||
if (accessors->IsAccessorInfo()) {
|
||||
Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(accessors);
|
||||
if (v8::ToCData<Address>(info->setter()) == nullptr) {
|
||||
set_slow_stub_reason("setter == nullptr");
|
||||
if (v8::ToCData<Address>(info->setter()) == kNullAddress) {
|
||||
set_slow_stub_reason("setter == kNullAddress");
|
||||
TRACE_HANDLER_STATS(isolate(), StoreIC_SlowStub);
|
||||
return slow_stub();
|
||||
}
|
||||
|
@ -37,7 +37,8 @@ String16 calculateHash(const String16& str) {
|
||||
size_t sizeInBytes = sizeof(UChar) * str.length();
|
||||
data = reinterpret_cast<const uint32_t*>(str.characters16());
|
||||
for (size_t i = 0; i < sizeInBytes / 4; ++i) {
|
||||
uint32_t d = v8::internal::ReadUnalignedUInt32(data + i);
|
||||
uint32_t d = v8::internal::ReadUnalignedUInt32(
|
||||
reinterpret_cast<v8::internal::Address>(data + i));
|
||||
#if V8_TARGET_LITTLE_ENDIAN
|
||||
uint32_t v = d;
|
||||
#else
|
||||
|
@ -14,7 +14,7 @@ namespace internal {
|
||||
// static
|
||||
bool InstructionStream::PcIsOffHeap(Isolate* isolate, Address pc) {
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
const uint8_t* start = isolate->embedded_blob();
|
||||
const Address start = reinterpret_cast<Address>(isolate->embedded_blob());
|
||||
return start <= pc && pc < start + isolate->embedded_blob_size();
|
||||
#else
|
||||
return false;
|
||||
@ -31,8 +31,8 @@ Code* InstructionStream::TryLookupCode(Isolate* isolate, Address address) {
|
||||
int l = 0, r = Builtins::builtin_count;
|
||||
while (l < r) {
|
||||
const int mid = (l + r) / 2;
|
||||
const uint8_t* start = d.InstructionStartOfBuiltin(mid);
|
||||
const uint8_t* end = start + d.InstructionSizeOfBuiltin(mid);
|
||||
Address start = d.InstructionStartOfBuiltin(mid);
|
||||
Address end = start + d.InstructionSizeOfBuiltin(mid);
|
||||
|
||||
if (address < start) {
|
||||
r = mid;
|
||||
|
@ -68,7 +68,7 @@ uint32_t BytecodeArrayAccessor::GetUnsignedOperand(
|
||||
DCHECK_EQ(operand_type,
|
||||
Bytecodes::GetOperandType(current_bytecode(), operand_index));
|
||||
DCHECK(Bytecodes::IsUnsignedOperandType(operand_type));
|
||||
const uint8_t* operand_start =
|
||||
Address operand_start =
|
||||
bytecode_array()->GetFirstBytecodeAddress() + bytecode_offset_ +
|
||||
current_prefix_offset() +
|
||||
Bytecodes::GetOperandOffset(current_bytecode(), operand_index,
|
||||
@ -84,7 +84,7 @@ int32_t BytecodeArrayAccessor::GetSignedOperand(
|
||||
DCHECK_EQ(operand_type,
|
||||
Bytecodes::GetOperandType(current_bytecode(), operand_index));
|
||||
DCHECK(!Bytecodes::IsUnsignedOperandType(operand_type));
|
||||
const uint8_t* operand_start =
|
||||
Address operand_start =
|
||||
bytecode_array()->GetFirstBytecodeAddress() + bytecode_offset_ +
|
||||
current_prefix_offset() +
|
||||
Bytecodes::GetOperandOffset(current_bytecode(), operand_index,
|
||||
@ -134,7 +134,7 @@ FeedbackSlot BytecodeArrayAccessor::GetSlotOperand(int operand_index) const {
|
||||
Register BytecodeArrayAccessor::GetRegisterOperand(int operand_index) const {
|
||||
OperandType operand_type =
|
||||
Bytecodes::GetOperandType(current_bytecode(), operand_index);
|
||||
const uint8_t* operand_start =
|
||||
Address operand_start =
|
||||
bytecode_array()->GetFirstBytecodeAddress() + bytecode_offset_ +
|
||||
current_prefix_offset() +
|
||||
Bytecodes::GetOperandOffset(current_bytecode(), operand_index,
|
||||
@ -237,8 +237,9 @@ bool BytecodeArrayAccessor::OffsetWithinBytecode(int offset) const {
|
||||
}
|
||||
|
||||
std::ostream& BytecodeArrayAccessor::PrintTo(std::ostream& os) const {
|
||||
return BytecodeDecoder::Decode(
|
||||
os, bytecode_array()->GetFirstBytecodeAddress() + bytecode_offset_,
|
||||
const uint8_t* bytecode_addr = reinterpret_cast<const uint8_t*>(
|
||||
bytecode_array()->GetFirstBytecodeAddress() + bytecode_offset_);
|
||||
return BytecodeDecoder::Decode(os, bytecode_addr,
|
||||
bytecode_array()->parameter_count());
|
||||
}
|
||||
|
||||
|
@ -304,7 +304,8 @@ void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location,
|
||||
// The jump fits within the range of an Imm16 operand, so cancel
|
||||
// the reservation and jump directly.
|
||||
constant_array_builder()->DiscardReservedEntry(OperandSize::kShort);
|
||||
WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta));
|
||||
WriteUnalignedUInt16(reinterpret_cast<Address>(operand_bytes),
|
||||
static_cast<uint16_t>(delta));
|
||||
} else {
|
||||
// The jump does not fit within the range of an Imm16 operand, so
|
||||
// commit reservation putting the offset into the constant pool,
|
||||
@ -313,7 +314,8 @@ void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location,
|
||||
OperandSize::kShort, Smi::FromInt(delta));
|
||||
jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
|
||||
bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
|
||||
WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry));
|
||||
WriteUnalignedUInt16(reinterpret_cast<Address>(operand_bytes),
|
||||
static_cast<uint16_t>(entry));
|
||||
}
|
||||
DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
|
||||
bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder);
|
||||
@ -327,7 +329,8 @@ void BytecodeArrayWriter::PatchJumpWith32BitOperand(size_t jump_location,
|
||||
Bytecodes::FromByte(bytecodes()->at(jump_location))));
|
||||
constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad);
|
||||
uint8_t operand_bytes[4];
|
||||
WriteUnalignedUInt32(operand_bytes, static_cast<uint32_t>(delta));
|
||||
WriteUnalignedUInt32(reinterpret_cast<Address>(operand_bytes),
|
||||
static_cast<uint32_t>(delta));
|
||||
size_t operand_location = jump_location + 1;
|
||||
DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
|
||||
bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder &&
|
||||
|
@ -15,7 +15,7 @@ namespace internal {
|
||||
namespace interpreter {
|
||||
|
||||
// static
|
||||
Register BytecodeDecoder::DecodeRegisterOperand(const uint8_t* operand_start,
|
||||
Register BytecodeDecoder::DecodeRegisterOperand(Address operand_start,
|
||||
OperandType operand_type,
|
||||
OperandScale operand_scale) {
|
||||
DCHECK(Bytecodes::IsRegisterOperandType(operand_type));
|
||||
@ -26,7 +26,7 @@ Register BytecodeDecoder::DecodeRegisterOperand(const uint8_t* operand_start,
|
||||
|
||||
// static
|
||||
RegisterList BytecodeDecoder::DecodeRegisterListOperand(
|
||||
const uint8_t* operand_start, uint32_t count, OperandType operand_type,
|
||||
Address operand_start, uint32_t count, OperandType operand_type,
|
||||
OperandScale operand_scale) {
|
||||
Register first_reg =
|
||||
DecodeRegisterOperand(operand_start, operand_type, operand_scale);
|
||||
@ -34,13 +34,13 @@ RegisterList BytecodeDecoder::DecodeRegisterListOperand(
|
||||
}
|
||||
|
||||
// static
|
||||
int32_t BytecodeDecoder::DecodeSignedOperand(const uint8_t* operand_start,
|
||||
int32_t BytecodeDecoder::DecodeSignedOperand(Address operand_start,
|
||||
OperandType operand_type,
|
||||
OperandScale operand_scale) {
|
||||
DCHECK(!Bytecodes::IsUnsignedOperandType(operand_type));
|
||||
switch (Bytecodes::SizeOfOperand(operand_type, operand_scale)) {
|
||||
case OperandSize::kByte:
|
||||
return static_cast<int8_t>(*operand_start);
|
||||
return *reinterpret_cast<const int8_t*>(operand_start);
|
||||
case OperandSize::kShort:
|
||||
return static_cast<int16_t>(ReadUnalignedUInt16(operand_start));
|
||||
case OperandSize::kQuad:
|
||||
@ -52,13 +52,13 @@ int32_t BytecodeDecoder::DecodeSignedOperand(const uint8_t* operand_start,
|
||||
}
|
||||
|
||||
// static
|
||||
uint32_t BytecodeDecoder::DecodeUnsignedOperand(const uint8_t* operand_start,
|
||||
uint32_t BytecodeDecoder::DecodeUnsignedOperand(Address operand_start,
|
||||
OperandType operand_type,
|
||||
OperandScale operand_scale) {
|
||||
DCHECK(Bytecodes::IsUnsignedOperandType(operand_type));
|
||||
switch (Bytecodes::SizeOfOperand(operand_type, operand_scale)) {
|
||||
case OperandSize::kByte:
|
||||
return *operand_start;
|
||||
return *reinterpret_cast<const uint8_t*>(operand_start);
|
||||
case OperandSize::kShort:
|
||||
return ReadUnalignedUInt16(operand_start);
|
||||
case OperandSize::kQuad:
|
||||
@ -139,8 +139,8 @@ std::ostream& BytecodeDecoder::Decode(std::ostream& os,
|
||||
OperandType op_type = Bytecodes::GetOperandType(bytecode, i);
|
||||
int operand_offset =
|
||||
Bytecodes::GetOperandOffset(bytecode, i, operand_scale);
|
||||
const uint8_t* operand_start =
|
||||
&bytecode_start[prefix_offset + operand_offset];
|
||||
Address operand_start = reinterpret_cast<Address>(
|
||||
&bytecode_start[prefix_offset + operand_offset]);
|
||||
switch (op_type) {
|
||||
case interpreter::OperandType::kIdx:
|
||||
case interpreter::OperandType::kUImm:
|
||||
@ -201,8 +201,8 @@ std::ostream& BytecodeDecoder::Decode(std::ostream& os,
|
||||
OperandType::kRegCount);
|
||||
int reg_count_offset =
|
||||
Bytecodes::GetOperandOffset(bytecode, i + 1, operand_scale);
|
||||
const uint8_t* reg_count_operand =
|
||||
&bytecode_start[prefix_offset + reg_count_offset];
|
||||
Address reg_count_operand = reinterpret_cast<Address>(
|
||||
&bytecode_start[prefix_offset + reg_count_offset]);
|
||||
uint32_t count = DecodeUnsignedOperand(
|
||||
reg_count_operand, OperandType::kRegCount, operand_scale);
|
||||
RegisterList reg_list = DecodeRegisterListOperand(
|
||||
|
@ -18,23 +18,23 @@ namespace interpreter {
|
||||
class V8_EXPORT_PRIVATE BytecodeDecoder final {
|
||||
public:
|
||||
// Decodes a register operand in a byte array.
|
||||
static Register DecodeRegisterOperand(const uint8_t* operand_start,
|
||||
static Register DecodeRegisterOperand(Address operand_start,
|
||||
OperandType operand_type,
|
||||
OperandScale operand_scale);
|
||||
|
||||
// Decodes a register list operand in a byte array.
|
||||
static RegisterList DecodeRegisterListOperand(const uint8_t* operand_start,
|
||||
static RegisterList DecodeRegisterListOperand(Address operand_start,
|
||||
uint32_t count,
|
||||
OperandType operand_type,
|
||||
OperandScale operand_scale);
|
||||
|
||||
// Decodes a signed operand in a byte array.
|
||||
static int32_t DecodeSignedOperand(const uint8_t* operand_start,
|
||||
static int32_t DecodeSignedOperand(Address operand_start,
|
||||
OperandType operand_type,
|
||||
OperandScale operand_scale);
|
||||
|
||||
// Decodes an unsigned operand in a byte array.
|
||||
static uint32_t DecodeUnsignedOperand(const uint8_t* operand_start,
|
||||
static uint32_t DecodeUnsignedOperand(Address operand_start,
|
||||
OperandType operand_type,
|
||||
OperandScale operand_scale);
|
||||
|
||||
|
@ -115,7 +115,7 @@ size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode,
|
||||
void Interpreter::IterateDispatchTable(RootVisitor* v) {
|
||||
for (int i = 0; i < kDispatchTableSize; i++) {
|
||||
Address code_entry = dispatch_table_[i];
|
||||
Object* code = code_entry == nullptr
|
||||
Object* code = code_entry == kNullAddress
|
||||
? nullptr
|
||||
: Code::GetCodeFromTargetAddress(code_entry);
|
||||
Object* old_code = code;
|
||||
@ -230,7 +230,7 @@ UnoptimizedCompilationJob* Interpreter::NewCompilationJob(
|
||||
}
|
||||
|
||||
bool Interpreter::IsDispatchTableInitialized() const {
|
||||
return dispatch_table_[0] != nullptr;
|
||||
return dispatch_table_[0] != kNullAddress;
|
||||
}
|
||||
|
||||
const char* Interpreter::LookupNameOfBytecodeHandler(Code* code) {
|
||||
|
@ -51,7 +51,7 @@ void SetupInterpreter::InstallBytecodeHandlers(Interpreter* interpreter) {
|
||||
size_t illegal_index = Interpreter::GetDispatchTableIndex(
|
||||
Bytecode::kIllegal, OperandScale::kSingle);
|
||||
for (size_t index = 0; index < Interpreter::kDispatchTableSize; ++index) {
|
||||
if (dispatch_table[index] == nullptr) {
|
||||
if (dispatch_table[index] == kNullAddress) {
|
||||
dispatch_table[index] = dispatch_table[illegal_index];
|
||||
}
|
||||
}
|
||||
|
@ -139,7 +139,7 @@ void ThreadLocalTop::InitializeInternal() {
|
||||
#ifdef USE_SIMULATOR
|
||||
simulator_ = nullptr;
|
||||
#endif
|
||||
js_entry_sp_ = nullptr;
|
||||
js_entry_sp_ = kNullAddress;
|
||||
external_callback_scope_ = nullptr;
|
||||
current_vm_state_ = EXTERNAL;
|
||||
try_catch_handler_ = nullptr;
|
||||
@ -704,7 +704,7 @@ Address Isolate::GetAbstractPC(int* line, int* column) {
|
||||
if (it.done()) {
|
||||
*line = -1;
|
||||
*column = -1;
|
||||
return nullptr;
|
||||
return kNullAddress;
|
||||
}
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
DCHECK(!frame->is_builtin());
|
||||
@ -1341,7 +1341,7 @@ Object* Isolate::UnwindAndFindHandler() {
|
||||
set_wasm_caught_exception(exception);
|
||||
wasm::WasmCode* wasm_code =
|
||||
wasm_engine()->code_manager()->LookupCode(frame->pc());
|
||||
return FoundHandler(nullptr, wasm_code->instructions().start(), offset,
|
||||
return FoundHandler(nullptr, wasm_code->instruction_start(), offset,
|
||||
wasm_code->constant_pool(), return_sp, frame->fp());
|
||||
}
|
||||
|
||||
@ -1552,8 +1552,10 @@ Isolate::CatchType Isolate::PredictExceptionCatcher() {
|
||||
Address entry_handler = frame->top_handler()->next()->address();
|
||||
// The exception has been externally caught if and only if there is an
|
||||
// external handler which is on top of the top-most JS_ENTRY handler.
|
||||
if (external_handler != nullptr && !try_catch_handler()->is_verbose_) {
|
||||
if (entry_handler == nullptr || entry_handler > external_handler) {
|
||||
if (external_handler != kNullAddress &&
|
||||
!try_catch_handler()->is_verbose_) {
|
||||
if (entry_handler == kNullAddress ||
|
||||
entry_handler > external_handler) {
|
||||
return CAUGHT_BY_EXTERNAL;
|
||||
}
|
||||
}
|
||||
@ -1818,12 +1820,12 @@ bool Isolate::IsJavaScriptHandlerOnTop(Object* exception) {
|
||||
|
||||
// Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
|
||||
Address entry_handler = Isolate::handler(thread_local_top());
|
||||
if (entry_handler == nullptr) return false;
|
||||
if (entry_handler == kNullAddress) return false;
|
||||
|
||||
// Get the address of the external handler so we can compare the address to
|
||||
// determine which one is closer to the top of the stack.
|
||||
Address external_handler = thread_local_top()->try_catch_handler_address();
|
||||
if (external_handler == nullptr) return true;
|
||||
if (external_handler == kNullAddress) return true;
|
||||
|
||||
// The exception has been externally caught if and only if there is an
|
||||
// external handler which is on top of the top-most JS_ENTRY handler.
|
||||
@ -1841,14 +1843,14 @@ bool Isolate::IsExternalHandlerOnTop(Object* exception) {
|
||||
// Get the address of the external handler so we can compare the address to
|
||||
// determine which one is closer to the top of the stack.
|
||||
Address external_handler = thread_local_top()->try_catch_handler_address();
|
||||
if (external_handler == nullptr) return false;
|
||||
if (external_handler == kNullAddress) return false;
|
||||
|
||||
// For uncatchable exceptions, the external handler is always on top.
|
||||
if (!is_catchable_by_javascript(exception)) return true;
|
||||
|
||||
// Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
|
||||
Address entry_handler = Isolate::handler(thread_local_top());
|
||||
if (entry_handler == nullptr) return true;
|
||||
if (entry_handler == kNullAddress) return true;
|
||||
|
||||
// The exception has been externally caught if and only if there is an
|
||||
// external handler which is on top of the top-most JS_ENTRY handler.
|
||||
@ -1920,25 +1922,25 @@ void Isolate::ReportPendingMessagesFromJavaScript() {
|
||||
|
||||
// Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
|
||||
Address entry_handler = Isolate::handler(thread_local_top());
|
||||
DCHECK_NOT_NULL(entry_handler);
|
||||
DCHECK_NE(entry_handler, kNullAddress);
|
||||
entry_handler =
|
||||
reinterpret_cast<StackHandler*>(entry_handler)->next()->address();
|
||||
|
||||
// Get the address of the external handler so we can compare the address to
|
||||
// determine which one is closer to the top of the stack.
|
||||
Address external_handler = thread_local_top()->try_catch_handler_address();
|
||||
if (external_handler == nullptr) return true;
|
||||
if (external_handler == kNullAddress) return true;
|
||||
|
||||
return (entry_handler < external_handler);
|
||||
};
|
||||
|
||||
auto IsHandledExternally = [=]() {
|
||||
Address external_handler = thread_local_top()->try_catch_handler_address();
|
||||
if (external_handler == nullptr) return false;
|
||||
if (external_handler == kNullAddress) return false;
|
||||
|
||||
// Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
|
||||
Address entry_handler = Isolate::handler(thread_local_top());
|
||||
DCHECK_NOT_NULL(entry_handler);
|
||||
DCHECK_NE(entry_handler, kNullAddress);
|
||||
entry_handler =
|
||||
reinterpret_cast<StackHandler*>(entry_handler)->next()->address();
|
||||
return (entry_handler > external_handler);
|
||||
@ -2013,7 +2015,7 @@ bool Isolate::OptionalRescheduleException(bool is_bottom_call) {
|
||||
// If the exception is externally caught, clear it if there are no
|
||||
// JavaScript frames on the way to the C++ frame that has the
|
||||
// external handler.
|
||||
DCHECK_NOT_NULL(thread_local_top()->try_catch_handler_address());
|
||||
DCHECK_NE(thread_local_top()->try_catch_handler_address(), kNullAddress);
|
||||
Address external_handler_address =
|
||||
thread_local_top()->try_catch_handler_address();
|
||||
JavaScriptFrameIterator it(this);
|
||||
@ -2903,9 +2905,9 @@ void CreateOffHeapTrampolines(Isolate* isolate) {
|
||||
for (int i = 0; i < Builtins::builtin_count; i++) {
|
||||
if (!Builtins::IsIsolateIndependent(i)) continue;
|
||||
|
||||
const uint8_t* instruction_start = d.InstructionStartOfBuiltin(i);
|
||||
Address instruction_start = d.InstructionStartOfBuiltin(i);
|
||||
Handle<Code> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
|
||||
builtins->builtin_handle(i), const_cast<Address>(instruction_start));
|
||||
builtins->builtin_handle(i), instruction_start);
|
||||
|
||||
// Note that references to the old, on-heap code objects may still exist on
|
||||
// the heap. This is fine for the sake of serialization, as serialization
|
||||
@ -4078,7 +4080,7 @@ void Isolate::SetIdle(bool is_idle) {
|
||||
if (!is_profiling()) return;
|
||||
StateTag state = current_vm_state();
|
||||
DCHECK(state == EXTERNAL || state == IDLE);
|
||||
if (js_entry_sp() != nullptr) return;
|
||||
if (js_entry_sp() != kNullAddress) return;
|
||||
if (is_idle) {
|
||||
set_current_vm_state(IDLE);
|
||||
} else if (state == IDLE) {
|
||||
@ -4091,7 +4093,7 @@ bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
|
||||
#ifdef USE_SIMULATOR
|
||||
// The simulator uses a separate JS stack.
|
||||
Address jssp_address = Simulator::current(isolate_)->get_sp();
|
||||
uintptr_t jssp = reinterpret_cast<uintptr_t>(jssp_address);
|
||||
uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
|
||||
if (jssp - gap < stack_guard->real_jslimit()) return true;
|
||||
#endif // USE_SIMULATOR
|
||||
return GetCurrentStackPosition() - gap < stack_guard->real_climit();
|
||||
|
@ -23,7 +23,8 @@ Handle<LayoutDescriptor> LayoutDescriptor::New(Isolate* isolate, int length) {
|
||||
int backing_store_length = GetSlowModeBackingStoreLength(length);
|
||||
Handle<LayoutDescriptor> result = Handle<LayoutDescriptor>::cast(
|
||||
isolate->factory()->NewByteArray(backing_store_length, TENURED));
|
||||
memset(result->GetDataStartAddress(), 0, result->DataSize());
|
||||
memset(reinterpret_cast<void*>(result->GetDataStartAddress()), 0,
|
||||
result->DataSize());
|
||||
return result;
|
||||
}
|
||||
|
||||
|
25
src/log.cc
25
src/log.cc
@ -309,14 +309,13 @@ void PerfBasicLogger::LogRecordedBuffer(AbstractCode* code, SharedFunctionInfo*,
|
||||
return;
|
||||
}
|
||||
|
||||
WriteLogRecordedBuffer(reinterpret_cast<uintptr_t>(code->InstructionStart()),
|
||||
WriteLogRecordedBuffer(static_cast<uintptr_t>(code->InstructionStart()),
|
||||
code->InstructionSize(), name, length);
|
||||
}
|
||||
|
||||
void PerfBasicLogger::LogRecordedBuffer(const wasm::WasmCode* code,
|
||||
const char* name, int length) {
|
||||
WriteLogRecordedBuffer(
|
||||
reinterpret_cast<uintptr_t>(code->instructions().start()),
|
||||
WriteLogRecordedBuffer(static_cast<uintptr_t>(code->instruction_start()),
|
||||
code->instructions().length(), name, length);
|
||||
}
|
||||
|
||||
@ -438,11 +437,11 @@ void LowLevelLogger::LogRecordedBuffer(const wasm::WasmCode* code,
|
||||
const char* name, int length) {
|
||||
CodeCreateStruct event;
|
||||
event.name_size = length;
|
||||
event.code_address = code->instructions().start();
|
||||
event.code_address = code->instruction_start();
|
||||
event.code_size = code->instructions().length();
|
||||
LogWriteStruct(event);
|
||||
LogWriteBytes(name, length);
|
||||
LogWriteBytes(reinterpret_cast<const char*>(code->instructions().start()),
|
||||
LogWriteBytes(reinterpret_cast<const char*>(code->instruction_start()),
|
||||
code->instructions().length());
|
||||
}
|
||||
|
||||
@ -503,7 +502,7 @@ void JitLogger::LogRecordedBuffer(AbstractCode* code,
|
||||
JitCodeEvent event;
|
||||
memset(&event, 0, sizeof(event));
|
||||
event.type = JitCodeEvent::CODE_ADDED;
|
||||
event.code_start = code->InstructionStart();
|
||||
event.code_start = reinterpret_cast<void*>(code->InstructionStart());
|
||||
event.code_type =
|
||||
code->IsCode() ? JitCodeEvent::JIT_CODE : JitCodeEvent::BYTE_CODE;
|
||||
event.code_len = code->InstructionSize();
|
||||
@ -537,14 +536,14 @@ void JitLogger::CodeMoveEvent(AbstractCode* from, Address to) {
|
||||
event.type = JitCodeEvent::CODE_MOVED;
|
||||
event.code_type =
|
||||
from->IsCode() ? JitCodeEvent::JIT_CODE : JitCodeEvent::BYTE_CODE;
|
||||
event.code_start = from->InstructionStart();
|
||||
event.code_start = reinterpret_cast<void*>(from->InstructionStart());
|
||||
event.code_len = from->InstructionSize();
|
||||
|
||||
// Calculate the header size.
|
||||
const size_t header_size = from->InstructionStart() - from->address();
|
||||
|
||||
// Calculate the new start address of the instructions.
|
||||
event.new_code_start = to + header_size;
|
||||
event.new_code_start = reinterpret_cast<void*>(to + header_size);
|
||||
|
||||
code_event_handler_(&event);
|
||||
}
|
||||
@ -580,7 +579,7 @@ void JitLogger::EndCodePosInfoEvent(Address start_address,
|
||||
JitCodeEvent event;
|
||||
memset(&event, 0, sizeof(event));
|
||||
event.type = JitCodeEvent::CODE_END_LINE_INFO_RECORDING;
|
||||
event.code_start = start_address;
|
||||
event.code_start = reinterpret_cast<void*>(start_address);
|
||||
event.user_data = jit_handler_data;
|
||||
|
||||
code_event_handler_(&event);
|
||||
@ -1070,7 +1069,8 @@ void AppendCodeCreateHeader(Log::MessageBuilder& msg,
|
||||
void AppendCodeCreateHeader(Log::MessageBuilder& msg,
|
||||
CodeEventListener::LogEventsAndTags tag,
|
||||
AbstractCode* code, base::ElapsedTimer* timer) {
|
||||
AppendCodeCreateHeader(msg, tag, code->kind(), code->InstructionStart(),
|
||||
AppendCodeCreateHeader(msg, tag, code->kind(),
|
||||
reinterpret_cast<uint8_t*>(code->InstructionStart()),
|
||||
code->InstructionSize(), timer);
|
||||
}
|
||||
|
||||
@ -1202,7 +1202,7 @@ void Logger::CodeCreateEvent(CodeEventListener::LogEventsAndTags tag,
|
||||
// <fns> is the function table encoded as a sequence of strings
|
||||
// S<shared-function-info-address>
|
||||
msg << "code-source-info" << kNext
|
||||
<< static_cast<void*>(code->InstructionStart()) << kNext << script_id
|
||||
<< reinterpret_cast<void*>(code->InstructionStart()) << kNext << script_id
|
||||
<< kNext << shared->StartPosition() << kNext << shared->EndPosition()
|
||||
<< kNext;
|
||||
|
||||
@ -1250,7 +1250,8 @@ void Logger::CodeCreateEvent(CodeEventListener::LogEventsAndTags tag,
|
||||
msg << std::hex;
|
||||
for (int i = 0; i <= maxInlinedId; i++) {
|
||||
msg << "S"
|
||||
<< static_cast<void*>(deopt_data->GetInlinedFunction(i)->address());
|
||||
<< reinterpret_cast<void*>(
|
||||
deopt_data->GetInlinedFunction(i)->address());
|
||||
}
|
||||
msg << std::dec;
|
||||
}
|
||||
|
@ -69,8 +69,7 @@ int32_t Operand::immediate() const {
|
||||
void RelocInfo::apply(intptr_t delta) {
|
||||
if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
|
||||
// Absolute code pointer inside code object moves with the code object.
|
||||
byte* p = reinterpret_cast<byte*>(pc_);
|
||||
Assembler::RelocateInternalReference(rmode_, p, delta);
|
||||
Assembler::RelocateInternalReference(rmode_, pc_, delta);
|
||||
}
|
||||
}
|
||||
|
||||
@ -102,12 +101,11 @@ Address RelocInfo::target_address_address() {
|
||||
// On R6 we don't move to the end of the instructions to be patched, but one
|
||||
// instruction before, because if these instructions are at the end of the
|
||||
// code object it can cause errors in the deserializer.
|
||||
return reinterpret_cast<Address>(
|
||||
pc_ +
|
||||
(Assembler::kInstructionsFor32BitConstant - 1) * Assembler::kInstrSize);
|
||||
return pc_ + (Assembler::kInstructionsFor32BitConstant - 1) *
|
||||
Assembler::kInstrSize;
|
||||
} else {
|
||||
return reinterpret_cast<Address>(
|
||||
pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
|
||||
return pc_ +
|
||||
Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize;
|
||||
}
|
||||
}
|
||||
|
||||
@ -131,11 +129,11 @@ void Assembler::deserialization_set_special_target_at(
|
||||
// On R6 the address location is shifted by one instruction
|
||||
set_target_address_at(
|
||||
instruction_payload - (kInstructionsFor32BitConstant - 1) * kInstrSize,
|
||||
code ? code->constant_pool() : nullptr, target);
|
||||
code ? code->constant_pool() : kNullAddress, target);
|
||||
} else {
|
||||
set_target_address_at(
|
||||
instruction_payload - kInstructionsFor32BitConstant * kInstrSize,
|
||||
code ? code->constant_pool() : nullptr, target);
|
||||
code ? code->constant_pool() : kNullAddress, target);
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,7 +145,7 @@ void Assembler::set_target_internal_reference_encoded_at(Address pc,
|
||||
DCHECK(Assembler::IsOri(instr2) || Assembler::IsJicOrJialc(instr2));
|
||||
instr1 &= ~kImm16Mask;
|
||||
instr2 &= ~kImm16Mask;
|
||||
int32_t imm = reinterpret_cast<int32_t>(target);
|
||||
int32_t imm = static_cast<int32_t>(target);
|
||||
DCHECK_EQ(imm & 3, 0);
|
||||
if (Assembler::IsJicOrJialc(instr2)) {
|
||||
// Encoded internal references are lui/jic load of 32-bit absolute address.
|
||||
@ -232,19 +230,19 @@ Address RelocInfo::target_internal_reference() {
|
||||
DCHECK(Assembler::IsLui(instr1));
|
||||
DCHECK(Assembler::IsOri(instr2) || Assembler::IsJicOrJialc(instr2));
|
||||
if (Assembler::IsJicOrJialc(instr2)) {
|
||||
return reinterpret_cast<Address>(
|
||||
return static_cast<Address>(
|
||||
Assembler::CreateTargetAddress(instr1, instr2));
|
||||
}
|
||||
int32_t imm = (instr1 & static_cast<int32_t>(kImm16Mask)) << kLuiShift;
|
||||
imm |= (instr2 & static_cast<int32_t>(kImm16Mask));
|
||||
return reinterpret_cast<Address>(imm);
|
||||
return static_cast<Address>(imm);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address RelocInfo::target_internal_reference_address() {
|
||||
DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
|
||||
return reinterpret_cast<Address>(pc_);
|
||||
return pc_;
|
||||
}
|
||||
|
||||
void RelocInfo::set_wasm_code_table_entry(Address target,
|
||||
@ -277,11 +275,11 @@ void RelocInfo::WipeOut() {
|
||||
IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
|
||||
IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
|
||||
if (IsInternalReference(rmode_)) {
|
||||
Memory::Address_at(pc_) = nullptr;
|
||||
Memory::Address_at(pc_) = kNullAddress;
|
||||
} else if (IsInternalReferenceEncoded(rmode_)) {
|
||||
Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
|
||||
Assembler::set_target_internal_reference_encoded_at(pc_, kNullAddress);
|
||||
} else {
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, nullptr);
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,7 +203,7 @@ Address RelocInfo::embedded_address() const {
|
||||
}
|
||||
|
||||
uint32_t RelocInfo::embedded_size() const {
|
||||
return reinterpret_cast<uint32_t>(
|
||||
return static_cast<uint32_t>(
|
||||
Assembler::target_address_at(pc_, constant_pool_));
|
||||
}
|
||||
|
||||
@ -214,7 +214,7 @@ void RelocInfo::set_embedded_address(Address address,
|
||||
|
||||
void RelocInfo::set_embedded_size(uint32_t size, ICacheFlushMode flush_mode) {
|
||||
Assembler::set_target_address_at(pc_, constant_pool_,
|
||||
reinterpret_cast<Address>(size), flush_mode);
|
||||
static_cast<Address>(size), flush_mode);
|
||||
}
|
||||
|
||||
void RelocInfo::set_js_to_wasm_address(Address address,
|
||||
@ -234,7 +234,7 @@ Address RelocInfo::js_to_wasm_address() const {
|
||||
|
||||
Operand::Operand(Handle<HeapObject> handle)
|
||||
: rm_(no_reg), rmode_(RelocInfo::EMBEDDED_OBJECT) {
|
||||
value_.immediate = reinterpret_cast<intptr_t>(handle.address());
|
||||
value_.immediate = static_cast<intptr_t>(handle.address());
|
||||
}
|
||||
|
||||
Operand Operand::EmbeddedNumber(double value) {
|
||||
@ -276,7 +276,7 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
|
||||
object = request.code_stub()->GetCode();
|
||||
break;
|
||||
}
|
||||
Address pc = buffer_ + request.offset();
|
||||
Address pc = reinterpret_cast<Address>(buffer_) + request.offset();
|
||||
set_target_value_at(pc, reinterpret_cast<uint32_t>(object.location()));
|
||||
}
|
||||
}
|
||||
@ -3602,7 +3602,7 @@ MSA_BIT_LIST(MSA_BIT)
|
||||
#undef MSA_BIT_FORMAT
|
||||
#undef MSA_BIT_LIST
|
||||
|
||||
int Assembler::RelocateInternalReference(RelocInfo::Mode rmode, byte* pc,
|
||||
int Assembler::RelocateInternalReference(RelocInfo::Mode rmode, Address pc,
|
||||
intptr_t pc_delta) {
|
||||
Instr instr = instr_at(pc);
|
||||
|
||||
@ -3698,8 +3698,7 @@ void Assembler::GrowBuffer() {
|
||||
RelocInfo::Mode rmode = it.rinfo()->rmode();
|
||||
if (rmode == RelocInfo::INTERNAL_REFERENCE_ENCODED ||
|
||||
rmode == RelocInfo::INTERNAL_REFERENCE) {
|
||||
byte* p = reinterpret_cast<byte*>(it.rinfo()->pc());
|
||||
RelocateInternalReference(rmode, p, pc_delta);
|
||||
RelocateInternalReference(rmode, it.rinfo()->pc(), pc_delta);
|
||||
}
|
||||
}
|
||||
DCHECK(!overflow());
|
||||
@ -3741,7 +3740,7 @@ void Assembler::dd(Label* label) {
|
||||
|
||||
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
|
||||
// We do not try to reuse pool constants.
|
||||
RelocInfo rinfo(pc_, rmode, data, nullptr);
|
||||
RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr);
|
||||
if (!RelocInfo::IsNone(rinfo.rmode())) {
|
||||
// Don't record external references unless the heap will be serialized.
|
||||
if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
|
||||
@ -3857,11 +3856,11 @@ Address Assembler::target_address_at(Address pc) {
|
||||
if (IsLui(instr1)) {
|
||||
if (IsOri(instr2)) {
|
||||
// Assemble the 32 bit value.
|
||||
return reinterpret_cast<Address>((GetImmediate16(instr1) << kLuiShift) |
|
||||
return static_cast<Address>((GetImmediate16(instr1) << kLuiShift) |
|
||||
GetImmediate16(instr2));
|
||||
} else if (IsJicOrJialc(instr2)) {
|
||||
// Assemble the 32 bit value.
|
||||
return reinterpret_cast<Address>(CreateTargetAddress(instr1, instr2));
|
||||
return static_cast<Address>(CreateTargetAddress(instr1, instr2));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -394,7 +394,7 @@ class Operand BASE_EMBEDDED {
|
||||
}
|
||||
INLINE(explicit Operand(const ExternalReference& f))
|
||||
: rm_(no_reg), rmode_(RelocInfo::EXTERNAL_REFERENCE) {
|
||||
value_.immediate = reinterpret_cast<int32_t>(f.address());
|
||||
value_.immediate = static_cast<int32_t>(f.address());
|
||||
}
|
||||
INLINE(explicit Operand(const char* s));
|
||||
INLINE(explicit Operand(Object** opp));
|
||||
@ -569,8 +569,7 @@ class Assembler : public AssemblerBase {
|
||||
INLINE(static void set_target_address_at)
|
||||
(Address pc, Address target,
|
||||
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED) {
|
||||
set_target_value_at(pc, reinterpret_cast<uint32_t>(target),
|
||||
icache_flush_mode);
|
||||
set_target_value_at(pc, static_cast<uint32_t>(target), icache_flush_mode);
|
||||
}
|
||||
// On MIPS there is no Constant Pool so we skip that parameter.
|
||||
INLINE(static Address target_address_at(Address pc, Address constant_pool)) {
|
||||
@ -1717,7 +1716,7 @@ class Assembler : public AssemblerBase {
|
||||
void RecordDeoptReason(DeoptimizeReason reason, SourcePosition position,
|
||||
int id);
|
||||
|
||||
static int RelocateInternalReference(RelocInfo::Mode rmode, byte* pc,
|
||||
static int RelocateInternalReference(RelocInfo::Mode rmode, Address pc,
|
||||
intptr_t pc_delta);
|
||||
|
||||
// Writes a single byte or word of data in the code stream. Used for
|
||||
@ -1741,8 +1740,8 @@ class Assembler : public AssemblerBase {
|
||||
inline int available_space() const { return reloc_info_writer.pos() - pc_; }
|
||||
|
||||
// Read/patch instructions.
|
||||
static Instr instr_at(byte* pc) { return *reinterpret_cast<Instr*>(pc); }
|
||||
static void instr_at_put(byte* pc, Instr instr) {
|
||||
static Instr instr_at(Address pc) { return *reinterpret_cast<Instr*>(pc); }
|
||||
static void instr_at_put(Address pc, Instr instr) {
|
||||
*reinterpret_cast<Instr*>(pc) = instr;
|
||||
}
|
||||
Instr instr_at(int pos) { return *reinterpret_cast<Instr*>(buffer_ + pos); }
|
||||
|
@ -3665,14 +3665,14 @@ void TurboAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
|
||||
void TurboAssembler::Jump(Address target, RelocInfo::Mode rmode, Condition cond,
|
||||
Register rs, const Operand& rt, BranchDelaySlot bd) {
|
||||
DCHECK(!RelocInfo::IsCodeTarget(rmode));
|
||||
Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
|
||||
Jump(static_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
|
||||
}
|
||||
|
||||
void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond, Register rs, const Operand& rt,
|
||||
BranchDelaySlot bd) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
Jump(reinterpret_cast<intptr_t>(code.address()), rmode, cond, rs, rt, bd);
|
||||
Jump(static_cast<intptr_t>(code.address()), rmode, cond, rs, rt, bd);
|
||||
}
|
||||
|
||||
int TurboAssembler::CallSize(Register target, int16_t offset, Condition cond,
|
||||
@ -3795,7 +3795,7 @@ void TurboAssembler::Call(Address target, RelocInfo::Mode rmode, Condition cond,
|
||||
BlockTrampolinePoolScope block_trampoline_pool(this);
|
||||
Label start;
|
||||
bind(&start);
|
||||
int32_t target_int = reinterpret_cast<int32_t>(target);
|
||||
int32_t target_int = static_cast<int32_t>(target);
|
||||
if (IsMipsArchVariant(kMips32r6) && bd == PROTECT && cond == cc_always) {
|
||||
uint32_t lui_offset, jialc_offset;
|
||||
UnpackTargetAddressUnsigned(target_int, lui_offset, jialc_offset);
|
||||
@ -4556,8 +4556,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
|
||||
}
|
||||
|
||||
void MacroAssembler::JumpToInstructionStream(Address entry) {
|
||||
li(kOffHeapTrampolineRegister,
|
||||
Operand(reinterpret_cast<int32_t>(entry), RelocInfo::OFF_HEAP_TARGET));
|
||||
li(kOffHeapTrampolineRegister, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
|
||||
Jump(kOffHeapTrampolineRegister);
|
||||
}
|
||||
|
||||
|
@ -928,7 +928,6 @@ Simulator* Simulator::current(Isolate* isolate) {
|
||||
v8::internal::Isolate::PerIsolateThreadData* isolate_data =
|
||||
isolate->FindOrAllocatePerThreadDataForThisThread();
|
||||
DCHECK_NOT_NULL(isolate_data);
|
||||
DCHECK_NOT_NULL(isolate_data);
|
||||
|
||||
Simulator* sim = isolate_data->simulator();
|
||||
if (sim == nullptr) {
|
||||
@ -2301,17 +2300,18 @@ void Simulator::SoftwareInterrupt() {
|
||||
case ExternalReference::BUILTIN_FP_FP_CALL:
|
||||
case ExternalReference::BUILTIN_COMPARE_CALL:
|
||||
PrintF("Call to host function at %p with args %f, %f",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0,
|
||||
dval1);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0, dval1);
|
||||
break;
|
||||
case ExternalReference::BUILTIN_FP_CALL:
|
||||
PrintF("Call to host function at %p with arg %f",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0);
|
||||
break;
|
||||
case ExternalReference::BUILTIN_FP_INT_CALL:
|
||||
PrintF("Call to host function at %p with args %f, %d",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0,
|
||||
ival);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0, ival);
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -2411,8 +2411,8 @@ void Simulator::SoftwareInterrupt() {
|
||||
PrintF(
|
||||
"Call to host function at %p "
|
||||
"args %08x, %08x, %08x, %08x, %08x, %08x, %08x, %08x, %08x\n",
|
||||
static_cast<void*>(FUNCTION_ADDR(target)), arg0, arg1, arg2, arg3,
|
||||
arg4, arg5, arg6, arg7, arg8);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(target)), arg0, arg1, arg2,
|
||||
arg3, arg4, arg5, arg6, arg7, arg8);
|
||||
}
|
||||
int64_t result =
|
||||
target(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8);
|
||||
@ -6974,13 +6974,12 @@ void Simulator::Execute() {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Simulator::CallInternal(byte* entry) {
|
||||
void Simulator::CallInternal(Address entry) {
|
||||
// Adjust JS-based stack limit to C-based stack limit.
|
||||
isolate_->stack_guard()->AdjustStackLimitForSimulator();
|
||||
|
||||
// Prepare to execute the code at entry.
|
||||
set_register(pc, reinterpret_cast<int32_t>(entry));
|
||||
set_register(pc, static_cast<int32_t>(entry));
|
||||
// Put down marker for end of simulation. The simulator will stop simulation
|
||||
// when the PC reaches this value. By saving the "end simulation" value into
|
||||
// the LR the simulation stops when returning to this call point.
|
||||
@ -7044,7 +7043,7 @@ void Simulator::CallInternal(byte* entry) {
|
||||
set_register(fp, fp_val);
|
||||
}
|
||||
|
||||
intptr_t Simulator::CallImpl(byte* entry, int argument_count,
|
||||
intptr_t Simulator::CallImpl(Address entry, int argument_count,
|
||||
const intptr_t* arguments) {
|
||||
// Set up arguments.
|
||||
|
||||
@ -7078,8 +7077,7 @@ intptr_t Simulator::CallImpl(byte* entry, int argument_count,
|
||||
return get_register(v0);
|
||||
}
|
||||
|
||||
|
||||
double Simulator::CallFP(byte* entry, double d0, double d1) {
|
||||
double Simulator::CallFP(Address entry, double d0, double d1) {
|
||||
if (!IsMipsSoftFloatABI) {
|
||||
set_fpu_register_double(f12, d0);
|
||||
set_fpu_register_double(f14, d1);
|
||||
|
@ -226,9 +226,7 @@ class Simulator : public SimulatorBase {
|
||||
void set_pc(int32_t value);
|
||||
int32_t get_pc() const;
|
||||
|
||||
Address get_sp() const {
|
||||
return reinterpret_cast<Address>(static_cast<intptr_t>(get_register(sp)));
|
||||
}
|
||||
Address get_sp() const { return static_cast<Address>(get_register(sp)); }
|
||||
|
||||
// Accessor to the internal simulator stack area.
|
||||
uintptr_t StackLimit(uintptr_t c_limit) const;
|
||||
@ -237,12 +235,12 @@ class Simulator : public SimulatorBase {
|
||||
void Execute();
|
||||
|
||||
template <typename Return, typename... Args>
|
||||
Return Call(byte* entry, Args... args) {
|
||||
Return Call(Address entry, Args... args) {
|
||||
return VariadicCall<Return>(this, &Simulator::CallImpl, entry, args...);
|
||||
}
|
||||
|
||||
// Alternative: call a 2-argument double function.
|
||||
double CallFP(byte* entry, double d0, double d1);
|
||||
double CallFP(Address entry, double d0, double d1);
|
||||
|
||||
// Push an address onto the JS stack.
|
||||
uintptr_t PushAddress(uintptr_t address);
|
||||
@ -280,7 +278,7 @@ class Simulator : public SimulatorBase {
|
||||
Unpredictable = 0xbadbeaf
|
||||
};
|
||||
|
||||
V8_EXPORT_PRIVATE intptr_t CallImpl(byte* entry, int argument_count,
|
||||
V8_EXPORT_PRIVATE intptr_t CallImpl(Address entry, int argument_count,
|
||||
const intptr_t* arguments);
|
||||
|
||||
// Unsupported instructions use Format to print an error and stop execution.
|
||||
@ -511,7 +509,7 @@ class Simulator : public SimulatorBase {
|
||||
void GetFpArgs(double* x, double* y, int32_t* z);
|
||||
void SetFpResult(const double& result);
|
||||
|
||||
void CallInternal(byte* entry);
|
||||
void CallInternal(Address entry);
|
||||
|
||||
// Architecture state.
|
||||
// Registers.
|
||||
|
@ -68,8 +68,7 @@ int64_t Operand::immediate() const {
|
||||
void RelocInfo::apply(intptr_t delta) {
|
||||
if (IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_)) {
|
||||
// Absolute code pointer inside code object moves with the code object.
|
||||
byte* p = reinterpret_cast<byte*>(pc_);
|
||||
Assembler::RelocateInternalReference(rmode_, p, delta);
|
||||
Assembler::RelocateInternalReference(rmode_, pc_, delta);
|
||||
}
|
||||
}
|
||||
|
||||
@ -97,10 +96,7 @@ Address RelocInfo::target_address_address() {
|
||||
// place, ready to be patched with the target. After jump optimization,
|
||||
// that is the address of the instruction that follows J/JAL/JR/JALR
|
||||
// instruction.
|
||||
// return reinterpret_cast<Address>(
|
||||
// pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
|
||||
return reinterpret_cast<Address>(
|
||||
pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize);
|
||||
return pc_ + Assembler::kInstructionsFor64BitConstant * Assembler::kInstrSize;
|
||||
}
|
||||
|
||||
|
||||
@ -121,7 +117,7 @@ void Assembler::deserialization_set_special_target_at(
|
||||
Address instruction_payload, Code* code, Address target) {
|
||||
set_target_address_at(
|
||||
instruction_payload - kInstructionsFor64BitConstant * kInstrSize,
|
||||
code ? code->constant_pool() : nullptr, target);
|
||||
code ? code->constant_pool() : kNullAddress, target);
|
||||
}
|
||||
|
||||
void Assembler::set_target_internal_reference_encoded_at(Address pc,
|
||||
@ -129,8 +125,7 @@ void Assembler::set_target_internal_reference_encoded_at(Address pc,
|
||||
// Encoded internal references are j/jal instructions.
|
||||
Instr instr = Assembler::instr_at(pc + 0 * Assembler::kInstrSize);
|
||||
|
||||
uint64_t imm28 =
|
||||
(reinterpret_cast<uint64_t>(target) & static_cast<uint64_t>(kImm28Mask));
|
||||
uint64_t imm28 = target & static_cast<uint64_t>(kImm28Mask);
|
||||
|
||||
instr &= ~kImm26Mask;
|
||||
uint64_t imm26 = imm28 >> 2;
|
||||
@ -201,16 +196,15 @@ Address RelocInfo::target_internal_reference() {
|
||||
Instr instr = Assembler::instr_at(pc_ + 0 * Assembler::kInstrSize);
|
||||
instr &= kImm26Mask;
|
||||
uint64_t imm28 = instr << 2;
|
||||
uint64_t segment =
|
||||
(reinterpret_cast<uint64_t>(pc_) & ~static_cast<uint64_t>(kImm28Mask));
|
||||
return reinterpret_cast<Address>(segment | imm28);
|
||||
uint64_t segment = pc_ & ~static_cast<uint64_t>(kImm28Mask);
|
||||
return static_cast<Address>(segment | imm28);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Address RelocInfo::target_internal_reference_address() {
|
||||
DCHECK(rmode_ == INTERNAL_REFERENCE || rmode_ == INTERNAL_REFERENCE_ENCODED);
|
||||
return reinterpret_cast<Address>(pc_);
|
||||
return pc_;
|
||||
}
|
||||
|
||||
void RelocInfo::set_wasm_code_table_entry(Address target,
|
||||
@ -243,11 +237,11 @@ void RelocInfo::WipeOut() {
|
||||
IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
|
||||
IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_));
|
||||
if (IsInternalReference(rmode_)) {
|
||||
Memory::Address_at(pc_) = nullptr;
|
||||
Memory::Address_at(pc_) = kNullAddress;
|
||||
} else if (IsInternalReferenceEncoded(rmode_)) {
|
||||
Assembler::set_target_internal_reference_encoded_at(pc_, nullptr);
|
||||
Assembler::set_target_internal_reference_encoded_at(pc_, kNullAddress);
|
||||
} else {
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, nullptr);
|
||||
Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,8 +181,8 @@ Address RelocInfo::embedded_address() const {
|
||||
}
|
||||
|
||||
uint32_t RelocInfo::embedded_size() const {
|
||||
return static_cast<uint32_t>(reinterpret_cast<intptr_t>(
|
||||
(Assembler::target_address_at(pc_, constant_pool_))));
|
||||
return static_cast<uint32_t>(
|
||||
(Assembler::target_address_at(pc_, constant_pool_)));
|
||||
}
|
||||
|
||||
void RelocInfo::set_embedded_address(Address address,
|
||||
@ -192,7 +192,7 @@ void RelocInfo::set_embedded_address(Address address,
|
||||
|
||||
void RelocInfo::set_embedded_size(uint32_t size, ICacheFlushMode flush_mode) {
|
||||
Assembler::set_target_address_at(pc_, constant_pool_,
|
||||
reinterpret_cast<Address>(size), flush_mode);
|
||||
static_cast<Address>(size), flush_mode);
|
||||
}
|
||||
|
||||
void RelocInfo::set_js_to_wasm_address(Address address,
|
||||
@ -212,7 +212,7 @@ Address RelocInfo::js_to_wasm_address() const {
|
||||
|
||||
Operand::Operand(Handle<HeapObject> handle)
|
||||
: rm_(no_reg), rmode_(RelocInfo::EMBEDDED_OBJECT) {
|
||||
value_.immediate = reinterpret_cast<intptr_t>(handle.address());
|
||||
value_.immediate = static_cast<intptr_t>(handle.address());
|
||||
}
|
||||
|
||||
Operand Operand::EmbeddedNumber(double value) {
|
||||
@ -255,7 +255,7 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
|
||||
object = request.code_stub()->GetCode();
|
||||
break;
|
||||
}
|
||||
Address pc = buffer_ + request.offset();
|
||||
Address pc = reinterpret_cast<Address>(buffer_) + request.offset();
|
||||
set_target_value_at(pc, reinterpret_cast<uint64_t>(object.location()));
|
||||
}
|
||||
}
|
||||
@ -3932,7 +3932,7 @@ MSA_BIT_LIST(MSA_BIT)
|
||||
#undef MSA_BIT_FORMAT
|
||||
#undef MSA_BIT_LIST
|
||||
|
||||
int Assembler::RelocateInternalReference(RelocInfo::Mode rmode, byte* pc,
|
||||
int Assembler::RelocateInternalReference(RelocInfo::Mode rmode, Address pc,
|
||||
intptr_t pc_delta) {
|
||||
if (RelocInfo::IsInternalReference(rmode)) {
|
||||
int64_t* p = reinterpret_cast<int64_t*>(pc);
|
||||
@ -4049,8 +4049,7 @@ void Assembler::GrowBuffer() {
|
||||
for (RelocIterator it(desc); !it.done(); it.next()) {
|
||||
RelocInfo::Mode rmode = it.rinfo()->rmode();
|
||||
if (rmode == RelocInfo::INTERNAL_REFERENCE) {
|
||||
byte* p = reinterpret_cast<byte*>(it.rinfo()->pc());
|
||||
RelocateInternalReference(rmode, p, pc_delta);
|
||||
RelocateInternalReference(rmode, it.rinfo()->pc(), pc_delta);
|
||||
}
|
||||
}
|
||||
DCHECK(!overflow());
|
||||
@ -4092,7 +4091,7 @@ void Assembler::dd(Label* label) {
|
||||
|
||||
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
|
||||
// We do not try to reuse pool constants.
|
||||
RelocInfo rinfo(pc_, rmode, data, nullptr);
|
||||
RelocInfo rinfo(reinterpret_cast<Address>(pc_), rmode, data, nullptr);
|
||||
if (!RelocInfo::IsNone(rinfo.rmode())) {
|
||||
// Don't record external references unless the heap will be serialized.
|
||||
if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
|
||||
@ -4188,7 +4187,7 @@ Address Assembler::target_address_at(Address pc) {
|
||||
|
||||
// Sign extend to get canonical address.
|
||||
addr = (addr << 16) >> 16;
|
||||
return reinterpret_cast<Address>(addr);
|
||||
return static_cast<Address>(addr);
|
||||
}
|
||||
// We should never get here, force a bad address if we do.
|
||||
UNREACHABLE();
|
||||
|
@ -401,7 +401,7 @@ class Operand BASE_EMBEDDED {
|
||||
}
|
||||
INLINE(explicit Operand(const ExternalReference& f))
|
||||
: rm_(no_reg), rmode_(RelocInfo::EXTERNAL_REFERENCE) {
|
||||
value_.immediate = reinterpret_cast<int64_t>(f.address());
|
||||
value_.immediate = static_cast<int64_t>(f.address());
|
||||
}
|
||||
INLINE(explicit Operand(const char* s));
|
||||
INLINE(explicit Operand(Object** opp));
|
||||
@ -577,8 +577,7 @@ class Assembler : public AssemblerBase {
|
||||
INLINE(static void set_target_address_at(
|
||||
Address pc, Address target,
|
||||
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED)) {
|
||||
set_target_value_at(pc, reinterpret_cast<uint64_t>(target),
|
||||
icache_flush_mode);
|
||||
set_target_value_at(pc, target, icache_flush_mode);
|
||||
}
|
||||
// On MIPS there is no Constant Pool so we skip that parameter.
|
||||
INLINE(static Address target_address_at(Address pc, Address constant_pool)) {
|
||||
@ -1789,7 +1788,7 @@ class Assembler : public AssemblerBase {
|
||||
void RecordDeoptReason(DeoptimizeReason reason, SourcePosition position,
|
||||
int id);
|
||||
|
||||
static int RelocateInternalReference(RelocInfo::Mode rmode, byte* pc,
|
||||
static int RelocateInternalReference(RelocInfo::Mode rmode, Address pc,
|
||||
intptr_t pc_delta);
|
||||
|
||||
// Writes a single byte or word of data in the code stream. Used for
|
||||
@ -1815,8 +1814,8 @@ class Assembler : public AssemblerBase {
|
||||
}
|
||||
|
||||
// Read/patch instructions.
|
||||
static Instr instr_at(byte* pc) { return *reinterpret_cast<Instr*>(pc); }
|
||||
static void instr_at_put(byte* pc, Instr instr) {
|
||||
static Instr instr_at(Address pc) { return *reinterpret_cast<Instr*>(pc); }
|
||||
static void instr_at_put(Address pc, Instr instr) {
|
||||
*reinterpret_cast<Instr*>(pc) = instr;
|
||||
}
|
||||
Instr instr_at(int pos) { return *reinterpret_cast<Instr*>(buffer_ + pos); }
|
||||
|
@ -4104,14 +4104,14 @@ void TurboAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
|
||||
void TurboAssembler::Jump(Address target, RelocInfo::Mode rmode, Condition cond,
|
||||
Register rs, const Operand& rt, BranchDelaySlot bd) {
|
||||
DCHECK(!RelocInfo::IsCodeTarget(rmode));
|
||||
Jump(reinterpret_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
|
||||
Jump(static_cast<intptr_t>(target), rmode, cond, rs, rt, bd);
|
||||
}
|
||||
|
||||
void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
|
||||
Condition cond, Register rs, const Operand& rt,
|
||||
BranchDelaySlot bd) {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rmode));
|
||||
Jump(reinterpret_cast<intptr_t>(code.address()), rmode, cond, rs, rt, bd);
|
||||
Jump(static_cast<intptr_t>(code.address()), rmode, cond, rs, rt, bd);
|
||||
}
|
||||
|
||||
int TurboAssembler::CallSize(Register target, Condition cond, Register rs,
|
||||
@ -4178,8 +4178,7 @@ void TurboAssembler::Call(Address target, RelocInfo::Mode rmode, Condition cond,
|
||||
BlockTrampolinePoolScope block_trampoline_pool(this);
|
||||
Label start;
|
||||
bind(&start);
|
||||
int64_t target_int = reinterpret_cast<int64_t>(target);
|
||||
li(t9, Operand(target_int, rmode), ADDRESS_LOAD);
|
||||
li(t9, Operand(static_cast<int64_t>(target), rmode), ADDRESS_LOAD);
|
||||
Call(t9, cond, rs, rt, bd);
|
||||
DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd),
|
||||
SizeOfCodeGeneratedSince(&start));
|
||||
@ -4883,8 +4882,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
|
||||
}
|
||||
|
||||
void MacroAssembler::JumpToInstructionStream(Address entry) {
|
||||
li(kOffHeapTrampolineRegister,
|
||||
Operand(reinterpret_cast<uint64_t>(entry), RelocInfo::OFF_HEAP_TARGET));
|
||||
li(kOffHeapTrampolineRegister, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
|
||||
Jump(kOffHeapTrampolineRegister);
|
||||
}
|
||||
|
||||
|
@ -861,7 +861,6 @@ Simulator* Simulator::current(Isolate* isolate) {
|
||||
v8::internal::Isolate::PerIsolateThreadData* isolate_data =
|
||||
isolate->FindOrAllocatePerThreadDataForThisThread();
|
||||
DCHECK_NOT_NULL(isolate_data);
|
||||
DCHECK_NOT_NULL(isolate_data);
|
||||
|
||||
Simulator* sim = isolate_data->simulator();
|
||||
if (sim == nullptr) {
|
||||
@ -2238,17 +2237,18 @@ void Simulator::SoftwareInterrupt() {
|
||||
case ExternalReference::BUILTIN_FP_FP_CALL:
|
||||
case ExternalReference::BUILTIN_COMPARE_CALL:
|
||||
PrintF("Call to host function at %p with args %f, %f",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0,
|
||||
dval1);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0, dval1);
|
||||
break;
|
||||
case ExternalReference::BUILTIN_FP_CALL:
|
||||
PrintF("Call to host function at %p with arg %f",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0);
|
||||
break;
|
||||
case ExternalReference::BUILTIN_FP_INT_CALL:
|
||||
PrintF("Call to host function at %p with args %f, %d",
|
||||
static_cast<void*>(FUNCTION_ADDR(generic_target)), dval0,
|
||||
ival);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(generic_target)),
|
||||
dval0, ival);
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -2353,8 +2353,8 @@ void Simulator::SoftwareInterrupt() {
|
||||
"args %08" PRIx64 " , %08" PRIx64 " , %08" PRIx64 " , %08" PRIx64
|
||||
" , %08" PRIx64 " , %08" PRIx64 " , %08" PRIx64 " , %08" PRIx64
|
||||
" , %08" PRIx64 " \n",
|
||||
static_cast<void*>(FUNCTION_ADDR(target)), arg0, arg1, arg2, arg3,
|
||||
arg4, arg5, arg6, arg7, arg8);
|
||||
reinterpret_cast<void*>(FUNCTION_ADDR(target)), arg0, arg1, arg2,
|
||||
arg3, arg4, arg5, arg6, arg7, arg8);
|
||||
}
|
||||
ObjectPair result =
|
||||
target(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8);
|
||||
@ -7327,13 +7327,12 @@ void Simulator::Execute() {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Simulator::CallInternal(byte* entry) {
|
||||
void Simulator::CallInternal(Address entry) {
|
||||
// Adjust JS-based stack limit to C-based stack limit.
|
||||
isolate_->stack_guard()->AdjustStackLimitForSimulator();
|
||||
|
||||
// Prepare to execute the code at entry.
|
||||
set_register(pc, reinterpret_cast<int64_t>(entry));
|
||||
set_register(pc, static_cast<int64_t>(entry));
|
||||
// Put down marker for end of simulation. The simulator will stop simulation
|
||||
// when the PC reaches this value. By saving the "end simulation" value into
|
||||
// the LR the simulation stops when returning to this call point.
|
||||
@ -7397,7 +7396,7 @@ void Simulator::CallInternal(byte* entry) {
|
||||
set_register(fp, fp_val);
|
||||
}
|
||||
|
||||
intptr_t Simulator::CallImpl(byte* entry, int argument_count,
|
||||
intptr_t Simulator::CallImpl(Address entry, int argument_count,
|
||||
const intptr_t* arguments) {
|
||||
constexpr int kRegisterPassedArguments = 8;
|
||||
// Set up arguments.
|
||||
@ -7441,8 +7440,7 @@ intptr_t Simulator::CallImpl(byte* entry, int argument_count,
|
||||
return get_register(v0);
|
||||
}
|
||||
|
||||
|
||||
double Simulator::CallFP(byte* entry, double d0, double d1) {
|
||||
double Simulator::CallFP(Address entry, double d0, double d1) {
|
||||
if (!IsMipsSoftFloatABI) {
|
||||
const FPURegister fparg2 = f13;
|
||||
set_fpu_register_double(f12, d0);
|
||||
|
@ -228,9 +228,7 @@ class Simulator : public SimulatorBase {
|
||||
void set_pc(int64_t value);
|
||||
int64_t get_pc() const;
|
||||
|
||||
Address get_sp() const {
|
||||
return reinterpret_cast<Address>(static_cast<intptr_t>(get_register(sp)));
|
||||
}
|
||||
Address get_sp() const { return static_cast<Address>(get_register(sp)); }
|
||||
|
||||
// Accessor to the internal simulator stack area.
|
||||
uintptr_t StackLimit(uintptr_t c_limit) const;
|
||||
@ -239,12 +237,12 @@ class Simulator : public SimulatorBase {
|
||||
void Execute();
|
||||
|
||||
template <typename Return, typename... Args>
|
||||
Return Call(byte* entry, Args... args) {
|
||||
Return Call(Address entry, Args... args) {
|
||||
return VariadicCall<Return>(this, &Simulator::CallImpl, entry, args...);
|
||||
}
|
||||
|
||||
// Alternative: call a 2-argument double function.
|
||||
double CallFP(byte* entry, double d0, double d1);
|
||||
double CallFP(Address entry, double d0, double d1);
|
||||
|
||||
// Push an address onto the JS stack.
|
||||
uintptr_t PushAddress(uintptr_t address);
|
||||
@ -282,7 +280,7 @@ class Simulator : public SimulatorBase {
|
||||
Unpredictable = 0xbadbeaf
|
||||
};
|
||||
|
||||
V8_EXPORT_PRIVATE intptr_t CallImpl(byte* entry, int argument_count,
|
||||
V8_EXPORT_PRIVATE intptr_t CallImpl(Address entry, int argument_count,
|
||||
const intptr_t* arguments);
|
||||
|
||||
// Unsupported instructions use Format to print an error and stop execution.
|
||||
@ -533,7 +531,7 @@ class Simulator : public SimulatorBase {
|
||||
void GetFpArgs(double* x, double* y, int32_t* z);
|
||||
void SetFpResult(const double& result);
|
||||
|
||||
void CallInternal(byte* entry);
|
||||
void CallInternal(Address entry);
|
||||
|
||||
// Architecture state.
|
||||
// Registers.
|
||||
|
@ -22,9 +22,10 @@
|
||||
|
||||
// Marks a memory range as uninitialized, as if it was allocated here.
|
||||
#define MSAN_ALLOCATED_UNINITIALIZED_MEMORY(p, s) \
|
||||
__msan_allocated_memory((p), (s))
|
||||
__msan_allocated_memory(reinterpret_cast<const void*>(p), (s))
|
||||
// Marks a memory range as initialized.
|
||||
#define MSAN_MEMORY_IS_INITIALIZED(p, s) __msan_unpoison((p), (s))
|
||||
#define MSAN_MEMORY_IS_INITIALIZED(p, s) \
|
||||
__msan_unpoison(reinterpret_cast<const void*>(p), (s))
|
||||
#else
|
||||
# define MSAN_ALLOCATED_UNINITIALIZED_MEMORY(p, s)
|
||||
#define MSAN_MEMORY_IS_INITIALIZED(p, s)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user