2017-09-16 05:22:38 +00:00
|
|
|
// Copyright 2017 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2018-02-02 12:05:19 +00:00
|
|
|
#ifndef V8_WASM_WASM_CODE_MANAGER_H_
|
|
|
|
#define V8_WASM_WASM_CODE_MANAGER_H_
|
2017-09-16 05:22:38 +00:00
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
#include <functional>
|
2017-09-16 05:22:38 +00:00
|
|
|
#include <list>
|
2017-11-20 21:34:04 +00:00
|
|
|
#include <map>
|
|
|
|
#include <unordered_map>
|
2018-07-19 12:35:31 +00:00
|
|
|
#include <unordered_set>
|
2017-09-16 05:22:38 +00:00
|
|
|
|
|
|
|
#include "src/base/macros.h"
|
2017-11-20 21:34:04 +00:00
|
|
|
#include "src/handles.h"
|
|
|
|
#include "src/trap-handler/trap-handler.h"
|
2017-09-16 05:22:38 +00:00
|
|
|
#include "src/vector.h"
|
2018-03-19 09:22:23 +00:00
|
|
|
#include "src/wasm/module-compiler.h"
|
|
|
|
|
2017-09-16 05:22:38 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
struct CodeDesc;
|
|
|
|
class Code;
|
|
|
|
|
2017-09-16 05:22:38 +00:00
|
|
|
namespace wasm {
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
class NativeModule;
|
2018-04-05 14:14:24 +00:00
|
|
|
class WasmCodeManager;
|
2017-11-20 21:34:04 +00:00
|
|
|
struct WasmModule;
|
|
|
|
|
2018-06-06 13:22:09 +00:00
|
|
|
// Convenience macro listing all wasm runtime stubs. Note that the first few
|
|
|
|
// elements of the list coincide with {compiler::TrapId}, order matters.
|
|
|
|
#define WASM_RUNTIME_STUB_LIST(V, VTRAP) \
|
|
|
|
FOREACH_WASM_TRAPREASON(VTRAP) \
|
2018-06-19 14:19:19 +00:00
|
|
|
V(WasmAllocateHeapNumber) \
|
2018-06-14 13:28:18 +00:00
|
|
|
V(WasmArgumentsAdaptor) \
|
|
|
|
V(WasmCallJavaScript) \
|
2018-06-21 16:17:31 +00:00
|
|
|
V(WasmGrowMemory) \
|
2018-06-13 13:28:41 +00:00
|
|
|
V(WasmStackGuard) \
|
2018-06-19 14:19:19 +00:00
|
|
|
V(WasmToNumber) \
|
2018-06-13 13:28:41 +00:00
|
|
|
V(DoubleToI)
|
2018-06-04 10:12:54 +00:00
|
|
|
|
2018-06-08 11:05:58 +00:00
|
|
|
struct AddressRange {
|
|
|
|
Address start;
|
|
|
|
Address end;
|
|
|
|
|
|
|
|
AddressRange(Address s, Address e) : start(s), end(e) {
|
|
|
|
DCHECK_LE(start, end);
|
|
|
|
DCHECK_IMPLIES(start == kNullAddress, end == kNullAddress);
|
|
|
|
}
|
|
|
|
AddressRange() : AddressRange(kNullAddress, kNullAddress) {}
|
|
|
|
|
|
|
|
size_t size() const { return static_cast<size_t>(end - start); }
|
|
|
|
bool is_empty() const { return start == end; }
|
|
|
|
operator bool() const { return start == kNullAddress; }
|
|
|
|
};
|
|
|
|
|
2017-09-16 05:22:38 +00:00
|
|
|
// Sorted, disjoint and non-overlapping memory ranges. A range is of the
|
|
|
|
// form [start, end). So there's no [start, end), [end, other_end),
|
|
|
|
// because that should have been reduced to [start, other_end).
|
|
|
|
class V8_EXPORT_PRIVATE DisjointAllocationPool final {
|
|
|
|
public:
|
2018-06-08 11:05:58 +00:00
|
|
|
DisjointAllocationPool() = default;
|
2017-09-16 05:22:38 +00:00
|
|
|
|
2018-06-08 11:05:58 +00:00
|
|
|
explicit DisjointAllocationPool(AddressRange range) : ranges_({range}) {}
|
2017-09-16 05:22:38 +00:00
|
|
|
|
|
|
|
DisjointAllocationPool(DisjointAllocationPool&& other) = default;
|
|
|
|
DisjointAllocationPool& operator=(DisjointAllocationPool&& other) = default;
|
|
|
|
|
2018-06-08 11:05:58 +00:00
|
|
|
// Merge the parameter range into this object while preserving ordering of the
|
|
|
|
// ranges. The assumption is that the passed parameter is not intersecting
|
|
|
|
// this object - for example, it was obtained from a previous Allocate.
|
|
|
|
void Merge(AddressRange);
|
2017-09-16 05:22:38 +00:00
|
|
|
|
|
|
|
// Allocate a contiguous range of size {size}. Return an empty pool on
|
|
|
|
// failure.
|
2018-06-08 11:05:58 +00:00
|
|
|
AddressRange Allocate(size_t size);
|
2017-09-16 05:22:38 +00:00
|
|
|
|
|
|
|
bool IsEmpty() const { return ranges_.empty(); }
|
|
|
|
const std::list<AddressRange>& ranges() const { return ranges_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
std::list<AddressRange> ranges_;
|
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool)
|
|
|
|
};
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
class V8_EXPORT_PRIVATE WasmCode final {
|
|
|
|
public:
|
|
|
|
enum Kind {
|
2017-11-29 20:23:19 +00:00
|
|
|
kFunction,
|
|
|
|
kWasmToJsWrapper,
|
|
|
|
kLazyStub,
|
2018-06-04 10:12:54 +00:00
|
|
|
kRuntimeStub,
|
2018-05-15 07:18:34 +00:00
|
|
|
kInterpreterEntry,
|
2018-06-19 09:47:17 +00:00
|
|
|
kJumpTable
|
2017-11-20 21:34:04 +00:00
|
|
|
};
|
|
|
|
|
2018-06-04 10:12:54 +00:00
|
|
|
// Each runtime stub is identified by an id. This id is used to reference the
|
|
|
|
// stub via {RelocInfo::WASM_STUB_CALL} and gets resolved during relocation.
|
|
|
|
enum RuntimeStubId {
|
2018-06-06 13:22:09 +00:00
|
|
|
#define DEF_ENUM(Name) k##Name,
|
|
|
|
#define DEF_ENUM_TRAP(Name) kThrowWasm##Name,
|
|
|
|
WASM_RUNTIME_STUB_LIST(DEF_ENUM, DEF_ENUM_TRAP)
|
|
|
|
#undef DEF_ENUM_TRAP
|
2018-06-04 10:12:54 +00:00
|
|
|
#undef DEF_ENUM
|
|
|
|
kRuntimeStubCount
|
|
|
|
};
|
|
|
|
|
2018-02-28 15:49:53 +00:00
|
|
|
// kOther is used if we have WasmCode that is neither
|
|
|
|
// liftoff- nor turbofan-compiled, i.e. if Kind is
|
|
|
|
// not a kFunction.
|
2018-03-24 14:05:21 +00:00
|
|
|
enum Tier : int8_t { kLiftoff, kTurbofan, kOther };
|
2018-02-28 15:49:53 +00:00
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
Vector<byte> instructions() const { return instructions_; }
|
2018-04-13 22:28:05 +00:00
|
|
|
Address instruction_start() const {
|
|
|
|
return reinterpret_cast<Address>(instructions_.start());
|
|
|
|
}
|
2018-06-27 12:57:52 +00:00
|
|
|
Vector<const byte> reloc_info() const { return reloc_info_.as_vector(); }
|
2018-03-23 11:30:26 +00:00
|
|
|
Vector<const byte> source_positions() const {
|
2018-06-27 12:57:52 +00:00
|
|
|
return source_position_table_.as_vector();
|
2018-03-23 11:30:26 +00:00
|
|
|
}
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
uint32_t index() const { return index_.ToChecked(); }
|
2018-06-19 15:48:38 +00:00
|
|
|
// Anonymous functions are functions that don't carry an index.
|
2017-11-20 21:34:04 +00:00
|
|
|
bool IsAnonymous() const { return index_.IsNothing(); }
|
|
|
|
Kind kind() const { return kind_; }
|
2018-03-15 12:53:10 +00:00
|
|
|
NativeModule* native_module() const { return native_module_; }
|
2018-02-28 15:49:53 +00:00
|
|
|
Tier tier() const { return tier_; }
|
2017-11-20 21:34:04 +00:00
|
|
|
Address constant_pool() const;
|
|
|
|
size_t constant_pool_offset() const { return constant_pool_offset_; }
|
|
|
|
size_t safepoint_table_offset() const { return safepoint_table_offset_; }
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t handler_table_offset() const { return handler_table_offset_; }
|
2017-11-20 21:34:04 +00:00
|
|
|
uint32_t stack_slots() const { return stack_slots_; }
|
2018-02-28 15:49:53 +00:00
|
|
|
bool is_liftoff() const { return tier_ == kLiftoff; }
|
2018-05-07 13:37:03 +00:00
|
|
|
bool contains(Address pc) const {
|
|
|
|
return reinterpret_cast<Address>(instructions_.start()) <= pc &&
|
|
|
|
pc < reinterpret_cast<Address>(instructions_.end());
|
|
|
|
}
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-06-27 10:26:30 +00:00
|
|
|
Vector<trap_handler::ProtectedInstructionData> protected_instructions()
|
|
|
|
const {
|
|
|
|
return protected_instructions_.as_vector();
|
2017-11-20 21:34:04 +00:00
|
|
|
}
|
|
|
|
|
2018-05-24 10:49:13 +00:00
|
|
|
void Validate() const;
|
2018-06-22 11:41:06 +00:00
|
|
|
void Print(const char* name = nullptr) const;
|
|
|
|
void Disassemble(const char* name, std::ostream& os,
|
2018-04-23 07:21:06 +00:00
|
|
|
Address current_pc = kNullAddress) const;
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-03-22 12:24:51 +00:00
|
|
|
static bool ShouldBeLogged(Isolate* isolate);
|
|
|
|
void LogCode(Isolate* isolate) const;
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
~WasmCode();
|
|
|
|
|
2018-03-22 11:20:00 +00:00
|
|
|
enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
private:
|
|
|
|
friend class NativeModule;
|
|
|
|
|
2018-06-28 09:45:22 +00:00
|
|
|
WasmCode(NativeModule* native_module, Maybe<uint32_t> index,
|
|
|
|
Vector<byte> instructions, uint32_t stack_slots,
|
|
|
|
size_t safepoint_table_offset, size_t handler_table_offset,
|
|
|
|
size_t constant_pool_offset,
|
2018-06-27 10:26:30 +00:00
|
|
|
OwnedVector<trap_handler::ProtectedInstructionData>
|
|
|
|
protected_instructions,
|
2018-06-28 09:45:22 +00:00
|
|
|
OwnedVector<const byte> reloc_info,
|
|
|
|
OwnedVector<const byte> source_position_table, Kind kind, Tier tier)
|
2017-11-20 21:34:04 +00:00
|
|
|
: instructions_(instructions),
|
|
|
|
reloc_info_(std::move(reloc_info)),
|
2018-06-28 09:45:22 +00:00
|
|
|
source_position_table_(std::move(source_position_table)),
|
2018-03-15 12:53:10 +00:00
|
|
|
native_module_(native_module),
|
2017-11-20 21:34:04 +00:00
|
|
|
index_(index),
|
|
|
|
kind_(kind),
|
|
|
|
constant_pool_offset_(constant_pool_offset),
|
|
|
|
stack_slots_(stack_slots),
|
|
|
|
safepoint_table_offset_(safepoint_table_offset),
|
2018-02-27 09:23:48 +00:00
|
|
|
handler_table_offset_(handler_table_offset),
|
2018-01-08 12:05:08 +00:00
|
|
|
protected_instructions_(std::move(protected_instructions)),
|
2018-03-20 16:14:55 +00:00
|
|
|
tier_(tier) {
|
|
|
|
DCHECK_LE(safepoint_table_offset, instructions.size());
|
|
|
|
DCHECK_LE(constant_pool_offset, instructions.size());
|
|
|
|
DCHECK_LE(handler_table_offset, instructions.size());
|
|
|
|
}
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-04-06 13:27:34 +00:00
|
|
|
// Code objects that have been registered with the global trap handler within
|
|
|
|
// this process, will have a {trap_handler_index} associated with them.
|
|
|
|
size_t trap_handler_index() const;
|
|
|
|
void set_trap_handler_index(size_t);
|
|
|
|
bool HasTrapHandlerIndex() const;
|
2018-06-18 11:34:41 +00:00
|
|
|
|
|
|
|
// Register protected instruction information with the trap handler. Sets
|
|
|
|
// trap_handler_index.
|
|
|
|
void RegisterTrapHandlerData();
|
2018-04-06 13:27:34 +00:00
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
Vector<byte> instructions_;
|
2018-06-27 12:57:52 +00:00
|
|
|
OwnedVector<const byte> reloc_info_;
|
|
|
|
OwnedVector<const byte> source_position_table_;
|
2018-03-15 12:53:10 +00:00
|
|
|
NativeModule* native_module_ = nullptr;
|
2017-11-20 21:34:04 +00:00
|
|
|
Maybe<uint32_t> index_;
|
|
|
|
Kind kind_;
|
|
|
|
size_t constant_pool_offset_ = 0;
|
|
|
|
uint32_t stack_slots_ = 0;
|
|
|
|
// we care about safepoint data for wasm-to-js functions,
|
|
|
|
// since there may be stack/register tagged values for large number
|
|
|
|
// conversions.
|
|
|
|
size_t safepoint_table_offset_ = 0;
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t handler_table_offset_ = 0;
|
2017-11-20 21:34:04 +00:00
|
|
|
intptr_t trap_handler_index_ = -1;
|
2018-06-27 10:26:30 +00:00
|
|
|
OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions_;
|
2018-02-28 15:49:53 +00:00
|
|
|
Tier tier_;
|
2018-03-21 13:00:04 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(WasmCode);
|
2017-11-20 21:34:04 +00:00
|
|
|
};
|
|
|
|
|
2017-11-29 17:53:43 +00:00
|
|
|
// Return a textual description of the kind.
|
|
|
|
const char* GetWasmCodeKindAsString(WasmCode::Kind);
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
class V8_EXPORT_PRIVATE NativeModule final {
|
|
|
|
public:
|
2018-06-28 13:23:24 +00:00
|
|
|
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64
|
|
|
|
static constexpr bool kCanAllocateMoreMemory = false;
|
|
|
|
#else
|
|
|
|
static constexpr bool kCanAllocateMoreMemory = true;
|
|
|
|
#endif
|
|
|
|
|
2018-07-05 14:26:11 +00:00
|
|
|
// {AddCode} is thread safe w.r.t. other calls to {AddCode} or {AddCodeCopy},
|
|
|
|
// i.e. it can be called concurrently from background threads.
|
2018-06-28 09:45:22 +00:00
|
|
|
WasmCode* AddCode(uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t safepoint_table_offset, size_t handler_table_offset,
|
2018-06-27 10:26:30 +00:00
|
|
|
OwnedVector<trap_handler::ProtectedInstructionData>
|
|
|
|
protected_instructions,
|
2018-06-28 09:45:22 +00:00
|
|
|
OwnedVector<const byte> source_position_table,
|
2018-02-28 15:49:53 +00:00
|
|
|
WasmCode::Tier tier);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-06-28 09:45:22 +00:00
|
|
|
WasmCode* AddDeserializedCode(
|
|
|
|
uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
|
|
|
|
size_t safepoint_table_offset, size_t handler_table_offset,
|
|
|
|
size_t constant_pool_offset,
|
|
|
|
OwnedVector<trap_handler::ProtectedInstructionData>
|
|
|
|
protected_instructions,
|
|
|
|
OwnedVector<const byte> reloc_info,
|
|
|
|
OwnedVector<const byte> source_position_table, WasmCode::Tier tier);
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
// A way to copy over JS-allocated code. This is because we compile
|
|
|
|
// certain wrappers using a different pipeline.
|
|
|
|
WasmCode* AddCodeCopy(Handle<Code> code, WasmCode::Kind kind, uint32_t index);
|
|
|
|
|
2018-05-15 07:18:34 +00:00
|
|
|
// Add an interpreter entry. For the same reason as AddCodeCopy, we
|
2017-11-20 21:34:04 +00:00
|
|
|
// currently compile these using a different pipeline and we can't get a
|
|
|
|
// CodeDesc here. When adding interpreter wrappers, we do not insert them in
|
2018-06-28 09:45:22 +00:00
|
|
|
// the code_table, however, we let them self-identify as the {index} function.
|
2018-05-15 07:18:34 +00:00
|
|
|
WasmCode* AddInterpreterEntry(Handle<Code> code, uint32_t index);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
// When starting lazy compilation, provide the WasmLazyCompile builtin by
|
2018-06-19 09:47:17 +00:00
|
|
|
// calling SetLazyBuiltin. It will be copied into this NativeModule and the
|
|
|
|
// jump table will be populated with that copy.
|
2018-01-22 16:48:14 +00:00
|
|
|
void SetLazyBuiltin(Handle<Code> code);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-06-04 10:12:54 +00:00
|
|
|
// Initializes all runtime stubs by copying them over from the JS-allocated
|
|
|
|
// heap into this native module. It must be called exactly once per native
|
|
|
|
// module before adding other WasmCode so that runtime stub ids can be
|
|
|
|
// resolved during relocation.
|
|
|
|
void SetRuntimeStubs(Isolate* isolate);
|
|
|
|
|
2018-07-12 15:08:26 +00:00
|
|
|
// Makes the code available to the system (by entering it into the code table
|
|
|
|
// and patching the jump table). Callers have to take care not to race with
|
|
|
|
// threads executing the old code.
|
|
|
|
void PublishCode(WasmCode* code);
|
|
|
|
|
2018-05-07 13:37:03 +00:00
|
|
|
WasmCode* code(uint32_t index) const {
|
2018-06-28 13:23:24 +00:00
|
|
|
DCHECK_LT(index, num_functions());
|
|
|
|
DCHECK_LE(module_->num_imported_functions, index);
|
|
|
|
return code_table_[index - module_->num_imported_functions];
|
2018-05-07 13:37:03 +00:00
|
|
|
}
|
|
|
|
|
2018-06-28 13:23:24 +00:00
|
|
|
bool has_code(uint32_t index) const { return code(index) != nullptr; }
|
2018-05-15 17:08:44 +00:00
|
|
|
|
2018-06-04 10:12:54 +00:00
|
|
|
WasmCode* runtime_stub(WasmCode::RuntimeStubId index) const {
|
|
|
|
DCHECK_LT(index, WasmCode::kRuntimeStubCount);
|
2018-06-07 00:49:00 +00:00
|
|
|
WasmCode* code = runtime_stub_table_[index];
|
|
|
|
DCHECK_NOT_NULL(code);
|
|
|
|
return code;
|
2018-06-04 10:12:54 +00:00
|
|
|
}
|
|
|
|
|
2018-07-06 14:27:20 +00:00
|
|
|
Address jump_table_start() const {
|
|
|
|
return jump_table_ ? jump_table_->instruction_start() : kNullAddress;
|
|
|
|
}
|
|
|
|
|
2018-06-19 09:47:17 +00:00
|
|
|
bool is_jump_table_slot(Address address) const {
|
|
|
|
return jump_table_->contains(address);
|
|
|
|
}
|
|
|
|
|
2018-07-02 06:58:09 +00:00
|
|
|
uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const;
|
2018-06-19 09:47:17 +00:00
|
|
|
|
2018-06-11 15:16:26 +00:00
|
|
|
// Transition this module from code relying on trap handlers (i.e. without
|
|
|
|
// explicit memory bounds checks) to code that does not require trap handlers
|
|
|
|
// (i.e. code with explicit bounds checks).
|
|
|
|
// This method must only be called if {use_trap_handler()} is true (it will be
|
|
|
|
// false afterwards). All code in this {NativeModule} needs to be re-added
|
|
|
|
// after calling this method.
|
|
|
|
void DisableTrapHandler();
|
|
|
|
|
2018-06-19 09:47:17 +00:00
|
|
|
// Returns the target to call for the given function (returns a jump table
|
|
|
|
// slot within {jump_table_}).
|
|
|
|
Address GetCallTargetForFunction(uint32_t func_index) const;
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2017-12-04 16:41:22 +00:00
|
|
|
bool SetExecutable(bool executable);
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
// For cctests, where we build both WasmModule and the runtime objects
|
|
|
|
// on the fly, and bypass the instance builder pipeline.
|
2018-06-04 12:01:49 +00:00
|
|
|
void ReserveCodeTableForTesting(uint32_t max_functions);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-06-15 12:00:30 +00:00
|
|
|
void LogWasmCodes(Isolate* isolate);
|
|
|
|
|
2018-03-19 09:22:23 +00:00
|
|
|
CompilationState* compilation_state() { return compilation_state_.get(); }
|
|
|
|
|
2018-06-28 13:23:24 +00:00
|
|
|
uint32_t num_functions() const {
|
|
|
|
return module_->num_declared_functions + module_->num_imported_functions;
|
|
|
|
}
|
|
|
|
uint32_t num_imported_functions() const {
|
|
|
|
return module_->num_imported_functions;
|
|
|
|
}
|
2018-06-04 12:01:49 +00:00
|
|
|
Vector<WasmCode*> code_table() const {
|
2018-06-28 13:23:24 +00:00
|
|
|
return {code_table_.get(), module_->num_declared_functions};
|
2018-06-04 12:01:49 +00:00
|
|
|
}
|
2018-04-30 14:47:44 +00:00
|
|
|
bool use_trap_handler() const { return use_trap_handler_; }
|
|
|
|
void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
|
|
|
|
bool lazy_compile_frozen() const { return lazy_compile_frozen_; }
|
2018-06-28 14:29:04 +00:00
|
|
|
Vector<const byte> wire_bytes() const { return wire_bytes_.as_vector(); }
|
|
|
|
void set_wire_bytes(OwnedVector<const byte> wire_bytes) {
|
2018-06-22 14:34:47 +00:00
|
|
|
wire_bytes_ = std::move(wire_bytes);
|
|
|
|
}
|
2018-06-28 16:31:31 +00:00
|
|
|
const WasmModule* module() const { return module_.get(); }
|
2018-07-31 08:16:22 +00:00
|
|
|
WasmCodeManager* code_manager() const { return wasm_code_manager_; }
|
2018-04-30 14:47:44 +00:00
|
|
|
|
2018-06-19 09:47:17 +00:00
|
|
|
WasmCode* Lookup(Address) const;
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
~NativeModule();
|
|
|
|
|
|
|
|
private:
|
2018-06-13 13:28:41 +00:00
|
|
|
friend class WasmCode;
|
2017-11-20 21:34:04 +00:00
|
|
|
friend class WasmCodeManager;
|
2018-02-21 11:42:57 +00:00
|
|
|
friend class NativeModuleModificationScope;
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-06-28 13:23:24 +00:00
|
|
|
NativeModule(Isolate* isolate, bool can_request_more,
|
2018-06-19 09:47:17 +00:00
|
|
|
VirtualMemory* code_space, WasmCodeManager* code_manager,
|
2018-06-28 13:23:24 +00:00
|
|
|
std::shared_ptr<const WasmModule> module, const ModuleEnv& env);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind);
|
|
|
|
Address AllocateForCode(size_t size);
|
|
|
|
|
|
|
|
// Primitive for adding code to the native module. All code added to a native
|
|
|
|
// module is owned by that module. Various callers get to decide on how the
|
|
|
|
// code is obtained (CodeDesc vs, as a point in time, Code*), the kind,
|
|
|
|
// whether it has an index or is anonymous, etc.
|
2018-06-28 09:45:22 +00:00
|
|
|
WasmCode* AddOwnedCode(Maybe<uint32_t> index, Vector<const byte> instructions,
|
|
|
|
uint32_t stack_slots, size_t safepoint_table_offset,
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t handler_table_offset,
|
2018-06-28 09:45:22 +00:00
|
|
|
size_t constant_pool_offset,
|
2018-06-27 10:26:30 +00:00
|
|
|
OwnedVector<trap_handler::ProtectedInstructionData>,
|
2018-06-28 09:45:22 +00:00
|
|
|
OwnedVector<const byte> reloc_info,
|
|
|
|
OwnedVector<const byte> source_position_table,
|
2018-07-09 06:49:01 +00:00
|
|
|
WasmCode::Kind, WasmCode::Tier);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-06-19 09:47:17 +00:00
|
|
|
WasmCode* CreateEmptyJumpTable(uint32_t num_wasm_functions);
|
|
|
|
|
|
|
|
void PatchJumpTable(uint32_t func_index, Address target,
|
|
|
|
WasmCode::FlushICache);
|
|
|
|
|
2018-06-04 13:58:37 +00:00
|
|
|
void set_code(uint32_t index, WasmCode* code) {
|
2018-06-28 13:23:24 +00:00
|
|
|
DCHECK_LT(index, num_functions());
|
|
|
|
DCHECK_LE(module_->num_imported_functions, index);
|
2018-06-04 13:58:37 +00:00
|
|
|
DCHECK_EQ(code->index(), index);
|
2018-06-28 13:23:24 +00:00
|
|
|
code_table_[index - module_->num_imported_functions] = code;
|
2018-06-04 13:58:37 +00:00
|
|
|
}
|
|
|
|
|
2018-06-28 13:23:24 +00:00
|
|
|
// TODO(clemensh): Make this a unique_ptr (requires refactoring
|
|
|
|
// AsyncCompileJob).
|
|
|
|
std::shared_ptr<const WasmModule> module_;
|
|
|
|
|
2018-03-21 13:00:04 +00:00
|
|
|
// Holds all allocated code objects, is maintained to be in ascending order
|
|
|
|
// according to the codes instruction start address to allow lookups.
|
2017-11-20 21:34:04 +00:00
|
|
|
std::vector<std::unique_ptr<WasmCode>> owned_code_;
|
|
|
|
|
2018-06-04 12:01:49 +00:00
|
|
|
std::unique_ptr<WasmCode* []> code_table_;
|
|
|
|
|
2018-06-28 14:29:04 +00:00
|
|
|
OwnedVector<const byte> wire_bytes_;
|
2018-06-22 14:34:47 +00:00
|
|
|
|
2018-06-04 12:01:49 +00:00
|
|
|
WasmCode* runtime_stub_table_[WasmCode::kRuntimeStubCount] = {nullptr};
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-06-19 09:47:17 +00:00
|
|
|
// Jump table used to easily redirect wasm function calls.
|
|
|
|
WasmCode* jump_table_ = nullptr;
|
|
|
|
|
2018-07-10 12:30:26 +00:00
|
|
|
// The compilation state keeps track of compilation tasks for this module.
|
|
|
|
// Note that its destructor blocks until all tasks are finished/aborted and
|
|
|
|
// hence needs to be destructed first when this native module dies.
|
2018-03-19 09:22:23 +00:00
|
|
|
std::unique_ptr<CompilationState, CompilationStateDeleter> compilation_state_;
|
|
|
|
|
2018-07-10 12:30:26 +00:00
|
|
|
// This mutex protects concurrent calls to {AddCode} and {AddCodeCopy}.
|
|
|
|
mutable base::Mutex allocation_mutex_;
|
|
|
|
|
2018-05-16 12:05:49 +00:00
|
|
|
DisjointAllocationPool free_code_space_;
|
|
|
|
DisjointAllocationPool allocated_code_space_;
|
|
|
|
std::list<VirtualMemory> owned_code_space_;
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
WasmCodeManager* wasm_code_manager_;
|
2018-07-24 15:58:31 +00:00
|
|
|
std::atomic<size_t> committed_code_space_{0};
|
2018-04-30 14:47:44 +00:00
|
|
|
int modification_scope_depth_ = 0;
|
2017-11-20 21:34:04 +00:00
|
|
|
bool can_request_more_memory_;
|
2018-06-11 15:16:26 +00:00
|
|
|
bool use_trap_handler_ = false;
|
2017-12-04 16:41:22 +00:00
|
|
|
bool is_executable_ = false;
|
2018-04-30 14:47:44 +00:00
|
|
|
bool lazy_compile_frozen_ = false;
|
2018-03-21 13:00:04 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(NativeModule);
|
2017-11-20 21:34:04 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
class V8_EXPORT_PRIVATE WasmCodeManager final {
|
|
|
|
public:
|
2018-06-12 16:43:02 +00:00
|
|
|
explicit WasmCodeManager(size_t max_committed);
|
2017-11-20 21:34:04 +00:00
|
|
|
// Create a new NativeModule. The caller is responsible for its
|
|
|
|
// lifetime. The native module will be given some memory for code,
|
|
|
|
// which will be page size aligned. The size of the initial memory
|
|
|
|
// is determined with a heuristic based on the total size of wasm
|
|
|
|
// code. The native module may later request more memory.
|
2018-06-12 16:43:02 +00:00
|
|
|
// TODO(titzer): isolate is only required here for CompilationState.
|
|
|
|
std::unique_ptr<NativeModule> NewNativeModule(
|
2018-06-28 13:23:24 +00:00
|
|
|
Isolate* isolate, size_t memory_estimate, bool can_request_more,
|
|
|
|
std::shared_ptr<const WasmModule> module, const ModuleEnv& env);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-06-19 09:47:17 +00:00
|
|
|
NativeModule* LookupNativeModule(Address pc) const;
|
2017-11-20 21:34:04 +00:00
|
|
|
WasmCode* LookupCode(Address pc) const;
|
|
|
|
WasmCode* GetCodeFromStartAddress(Address pc) const;
|
2018-05-16 12:05:49 +00:00
|
|
|
size_t remaining_uncommitted_code_space() const;
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-07-19 12:35:31 +00:00
|
|
|
// Add a sample of all module sizes.
|
|
|
|
void SampleModuleSizes(Isolate* isolate) const;
|
|
|
|
|
|
|
|
// TODO(v8:7424): For now we sample module sizes in a GC callback. This will
|
|
|
|
// bias samples towards apps with high memory pressure. We should switch to
|
|
|
|
// using sampling based on regular intervals independent of the GC.
|
|
|
|
static void InstallSamplingGCCallback(Isolate* isolate);
|
|
|
|
|
2018-06-12 15:51:30 +00:00
|
|
|
static size_t EstimateNativeModuleSize(const WasmModule* module);
|
2018-05-16 12:39:18 +00:00
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
private:
|
|
|
|
friend class NativeModule;
|
|
|
|
|
|
|
|
void TryAllocate(size_t size, VirtualMemory*, void* hint = nullptr);
|
|
|
|
bool Commit(Address, size_t);
|
|
|
|
// Currently, we uncommit a whole module, so all we need is account
|
2018-05-16 12:05:49 +00:00
|
|
|
// for the freed memory size. We do that in FreeNativeModule.
|
2017-11-20 21:34:04 +00:00
|
|
|
// There's no separate Uncommit.
|
|
|
|
|
2018-05-16 12:05:49 +00:00
|
|
|
void FreeNativeModule(NativeModule*);
|
2017-11-20 21:34:04 +00:00
|
|
|
void Free(VirtualMemory* mem);
|
2018-04-13 22:28:05 +00:00
|
|
|
void AssignRanges(Address start, Address end, NativeModule*);
|
2018-07-24 15:58:31 +00:00
|
|
|
bool ShouldForceCriticalMemoryPressureNotification();
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-07-24 15:58:31 +00:00
|
|
|
mutable base::Mutex native_modules_mutex_;
|
2017-11-20 21:34:04 +00:00
|
|
|
std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
|
2018-07-19 12:35:31 +00:00
|
|
|
std::unordered_set<NativeModule*> native_modules_;
|
2018-05-16 12:05:49 +00:00
|
|
|
std::atomic<size_t> remaining_uncommitted_code_space_;
|
2017-12-01 16:13:36 +00:00
|
|
|
|
2018-03-21 13:00:04 +00:00
|
|
|
DISALLOW_COPY_AND_ASSIGN(WasmCodeManager);
|
2017-11-20 21:34:04 +00:00
|
|
|
};
|
|
|
|
|
2017-12-04 16:41:22 +00:00
|
|
|
// Within the scope, the native_module is writable and not executable.
|
|
|
|
// At the scope's destruction, the native_module is executable and not writable.
|
|
|
|
// The states inside the scope and at the scope termination are irrespective of
|
|
|
|
// native_module's state when entering the scope.
|
|
|
|
// We currently mark the entire module's memory W^X:
|
|
|
|
// - for AOT, that's as efficient as it can be.
|
|
|
|
// - for Lazy, we don't have a heuristic for functions that may need patching,
|
|
|
|
// and even if we did, the resulting set of pages may be fragmented.
|
|
|
|
// Currently, we try and keep the number of syscalls low.
|
|
|
|
// - similar argument for debug time.
|
|
|
|
class NativeModuleModificationScope final {
|
|
|
|
public:
|
|
|
|
explicit NativeModuleModificationScope(NativeModule* native_module);
|
|
|
|
~NativeModuleModificationScope();
|
|
|
|
|
|
|
|
private:
|
|
|
|
NativeModule* native_module_;
|
|
|
|
};
|
|
|
|
|
2017-09-16 05:22:38 +00:00
|
|
|
} // namespace wasm
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|
2018-02-02 12:05:19 +00:00
|
|
|
|
|
|
|
#endif // V8_WASM_WASM_CODE_MANAGER_H_
|