2017-09-16 05:22:38 +00:00
|
|
|
// Copyright 2017 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2018-02-02 12:05:19 +00:00
|
|
|
#ifndef V8_WASM_WASM_CODE_MANAGER_H_
|
|
|
|
#define V8_WASM_WASM_CODE_MANAGER_H_
|
2017-09-16 05:22:38 +00:00
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
#include <functional>
|
2017-09-16 05:22:38 +00:00
|
|
|
#include <list>
|
2017-11-20 21:34:04 +00:00
|
|
|
#include <map>
|
|
|
|
#include <unordered_map>
|
2017-09-16 05:22:38 +00:00
|
|
|
|
|
|
|
#include "src/base/macros.h"
|
2017-11-20 21:34:04 +00:00
|
|
|
#include "src/handles.h"
|
|
|
|
#include "src/trap-handler/trap-handler.h"
|
2017-09-16 05:22:38 +00:00
|
|
|
#include "src/vector.h"
|
2018-03-19 09:22:23 +00:00
|
|
|
#include "src/wasm/module-compiler.h"
|
|
|
|
|
2017-09-16 05:22:38 +00:00
|
|
|
namespace v8 {
|
2017-11-20 21:34:04 +00:00
|
|
|
class Isolate;
|
2017-09-16 05:22:38 +00:00
|
|
|
namespace internal {
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
struct CodeDesc;
|
|
|
|
class Code;
|
|
|
|
class WasmCompiledModule;
|
|
|
|
|
2017-09-16 05:22:38 +00:00
|
|
|
namespace wasm {
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
using GlobalHandleAddress = Address;
|
|
|
|
class NativeModule;
|
|
|
|
struct WasmModule;
|
|
|
|
|
|
|
|
struct AddressHasher {
|
|
|
|
size_t operator()(const Address& addr) const {
|
|
|
|
return std::hash<intptr_t>()(reinterpret_cast<intptr_t>(addr));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-09-16 05:22:38 +00:00
|
|
|
// Sorted, disjoint and non-overlapping memory ranges. A range is of the
|
|
|
|
// form [start, end). So there's no [start, end), [end, other_end),
|
|
|
|
// because that should have been reduced to [start, other_end).
|
|
|
|
using AddressRange = std::pair<Address, Address>;
|
|
|
|
class V8_EXPORT_PRIVATE DisjointAllocationPool final {
|
|
|
|
public:
|
|
|
|
enum ExtractionMode : bool { kAny = false, kContiguous = true };
|
|
|
|
DisjointAllocationPool() {}
|
|
|
|
|
|
|
|
explicit DisjointAllocationPool(Address, Address);
|
|
|
|
|
|
|
|
DisjointAllocationPool(DisjointAllocationPool&& other) = default;
|
|
|
|
DisjointAllocationPool& operator=(DisjointAllocationPool&& other) = default;
|
|
|
|
|
|
|
|
// Merge the ranges of the parameter into this object. Ordering is
|
|
|
|
// preserved. The assumption is that the passed parameter is
|
|
|
|
// not intersecting this object - for example, it was obtained
|
|
|
|
// from a previous Allocate{Pool}.
|
2017-09-28 02:53:20 +00:00
|
|
|
void Merge(DisjointAllocationPool&&);
|
2017-09-16 05:22:38 +00:00
|
|
|
|
|
|
|
// Allocate a contiguous range of size {size}. Return an empty pool on
|
|
|
|
// failure.
|
|
|
|
DisjointAllocationPool Allocate(size_t size) {
|
|
|
|
return Extract(size, kContiguous);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Allocate a sub-pool of size {size}. Return an empty pool on failure.
|
|
|
|
DisjointAllocationPool AllocatePool(size_t size) {
|
|
|
|
return Extract(size, kAny);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsEmpty() const { return ranges_.empty(); }
|
|
|
|
const std::list<AddressRange>& ranges() const { return ranges_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
// Extract out a total of {size}. By default, the return may
|
|
|
|
// be more than one range. If kContiguous is passed, the return
|
|
|
|
// will be one range. If the operation fails, this object is
|
|
|
|
// unchanged, and the return {IsEmpty()}
|
|
|
|
DisjointAllocationPool Extract(size_t size, ExtractionMode mode);
|
|
|
|
|
|
|
|
std::list<AddressRange> ranges_;
|
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool)
|
|
|
|
};
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
using ProtectedInstructions =
|
|
|
|
std::vector<trap_handler::ProtectedInstructionData>;
|
|
|
|
|
|
|
|
class V8_EXPORT_PRIVATE WasmCode final {
|
|
|
|
public:
|
|
|
|
enum Kind {
|
2017-11-29 20:23:19 +00:00
|
|
|
kFunction,
|
|
|
|
kWasmToWasmWrapper,
|
|
|
|
kWasmToJsWrapper,
|
|
|
|
kLazyStub,
|
|
|
|
kInterpreterStub,
|
|
|
|
kCopiedStub,
|
|
|
|
kTrampoline
|
2017-11-20 21:34:04 +00:00
|
|
|
};
|
|
|
|
|
2018-02-28 15:49:53 +00:00
|
|
|
// kOther is used if we have WasmCode that is neither
|
|
|
|
// liftoff- nor turbofan-compiled, i.e. if Kind is
|
|
|
|
// not a kFunction.
|
2018-03-22 14:20:53 +00:00
|
|
|
enum Tier : int32_t { kLiftoff, kTurbofan, kOther };
|
2018-02-28 15:49:53 +00:00
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
Vector<byte> instructions() const { return instructions_; }
|
|
|
|
Vector<const byte> reloc_info() const {
|
|
|
|
return {reloc_info_.get(), reloc_size_};
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t index() const { return index_.ToChecked(); }
|
|
|
|
// Anonymous functions are functions that don't carry an index, like
|
|
|
|
// trampolines.
|
|
|
|
bool IsAnonymous() const { return index_.IsNothing(); }
|
|
|
|
Kind kind() const { return kind_; }
|
2018-03-15 12:53:10 +00:00
|
|
|
NativeModule* native_module() const { return native_module_; }
|
2018-02-28 15:49:53 +00:00
|
|
|
Tier tier() const { return tier_; }
|
2017-11-20 21:34:04 +00:00
|
|
|
Address constant_pool() const;
|
|
|
|
size_t constant_pool_offset() const { return constant_pool_offset_; }
|
|
|
|
size_t safepoint_table_offset() const { return safepoint_table_offset_; }
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t handler_table_offset() const { return handler_table_offset_; }
|
2017-11-20 21:34:04 +00:00
|
|
|
uint32_t stack_slots() const { return stack_slots_; }
|
2018-02-28 15:49:53 +00:00
|
|
|
bool is_liftoff() const { return tier_ == kLiftoff; }
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
size_t trap_handler_index() const;
|
|
|
|
void set_trap_handler_index(size_t);
|
|
|
|
bool HasTrapHandlerIndex() const;
|
|
|
|
void ResetTrapHandlerIndex();
|
|
|
|
|
|
|
|
const ProtectedInstructions& protected_instructions() const {
|
2018-02-05 21:23:10 +00:00
|
|
|
// TODO(mstarzinger): Code that doesn't have trapping instruction should
|
|
|
|
// not be required to have this vector, make it possible to be null.
|
|
|
|
DCHECK_NOT_NULL(protected_instructions_);
|
2017-11-20 21:34:04 +00:00
|
|
|
return *protected_instructions_.get();
|
|
|
|
}
|
|
|
|
|
|
|
|
void Print(Isolate* isolate) const;
|
2018-01-12 17:25:34 +00:00
|
|
|
void Disassemble(const char* name, Isolate* isolate, std::ostream& os) const;
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-03-22 12:24:51 +00:00
|
|
|
static bool ShouldBeLogged(Isolate* isolate);
|
|
|
|
void LogCode(Isolate* isolate) const;
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
~WasmCode();
|
|
|
|
|
2018-03-22 11:20:00 +00:00
|
|
|
enum FlushICache : bool { kFlushICache = true, kNoFlushICache = false };
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
private:
|
|
|
|
friend class NativeModule;
|
|
|
|
|
|
|
|
WasmCode(Vector<byte> instructions,
|
|
|
|
std::unique_ptr<const byte[]>&& reloc_info, size_t reloc_size,
|
2018-03-15 12:53:10 +00:00
|
|
|
NativeModule* native_module, Maybe<uint32_t> index, Kind kind,
|
2017-11-20 21:34:04 +00:00
|
|
|
size_t constant_pool_offset, uint32_t stack_slots,
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t safepoint_table_offset, size_t handler_table_offset,
|
2017-11-20 21:34:04 +00:00
|
|
|
std::shared_ptr<ProtectedInstructions> protected_instructions,
|
2018-02-28 15:49:53 +00:00
|
|
|
Tier tier)
|
2017-11-20 21:34:04 +00:00
|
|
|
: instructions_(instructions),
|
|
|
|
reloc_info_(std::move(reloc_info)),
|
|
|
|
reloc_size_(reloc_size),
|
2018-03-15 12:53:10 +00:00
|
|
|
native_module_(native_module),
|
2017-11-20 21:34:04 +00:00
|
|
|
index_(index),
|
|
|
|
kind_(kind),
|
|
|
|
constant_pool_offset_(constant_pool_offset),
|
|
|
|
stack_slots_(stack_slots),
|
|
|
|
safepoint_table_offset_(safepoint_table_offset),
|
2018-02-27 09:23:48 +00:00
|
|
|
handler_table_offset_(handler_table_offset),
|
2018-01-08 12:05:08 +00:00
|
|
|
protected_instructions_(std::move(protected_instructions)),
|
2018-03-20 16:14:55 +00:00
|
|
|
tier_(tier) {
|
|
|
|
DCHECK_LE(safepoint_table_offset, instructions.size());
|
|
|
|
DCHECK_LE(constant_pool_offset, instructions.size());
|
|
|
|
DCHECK_LE(handler_table_offset, instructions.size());
|
|
|
|
}
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
Vector<byte> instructions_;
|
|
|
|
std::unique_ptr<const byte[]> reloc_info_;
|
|
|
|
size_t reloc_size_ = 0;
|
2018-03-15 12:53:10 +00:00
|
|
|
NativeModule* native_module_ = nullptr;
|
2017-11-20 21:34:04 +00:00
|
|
|
Maybe<uint32_t> index_;
|
|
|
|
Kind kind_;
|
|
|
|
size_t constant_pool_offset_ = 0;
|
|
|
|
uint32_t stack_slots_ = 0;
|
|
|
|
// we care about safepoint data for wasm-to-js functions,
|
|
|
|
// since there may be stack/register tagged values for large number
|
|
|
|
// conversions.
|
|
|
|
size_t safepoint_table_offset_ = 0;
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t handler_table_offset_ = 0;
|
2017-11-20 21:34:04 +00:00
|
|
|
intptr_t trap_handler_index_ = -1;
|
|
|
|
std::shared_ptr<ProtectedInstructions> protected_instructions_;
|
2018-02-28 15:49:53 +00:00
|
|
|
Tier tier_;
|
2018-03-21 13:00:04 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(WasmCode);
|
2017-11-20 21:34:04 +00:00
|
|
|
};
|
|
|
|
|
2017-11-29 17:53:43 +00:00
|
|
|
// Return a textual description of the kind.
|
|
|
|
const char* GetWasmCodeKindAsString(WasmCode::Kind);
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
class WasmCodeManager;
|
|
|
|
|
|
|
|
// Note that we currently need to add code on the main thread, because we may
|
|
|
|
// trigger a GC if we believe there's a chance the GC would clear up native
|
|
|
|
// modules. The code is ready for concurrency otherwise, we just need to be
|
|
|
|
// careful about this GC consideration. See WouldGCHelp and
|
|
|
|
// WasmCodeManager::Commit.
|
|
|
|
class V8_EXPORT_PRIVATE NativeModule final {
|
|
|
|
public:
|
2018-03-22 18:04:01 +00:00
|
|
|
// Helper class to selectively clone and patch code from a
|
|
|
|
// {source_native_module} into a {cloning_native_module}.
|
|
|
|
class CloneCodeHelper {
|
|
|
|
public:
|
|
|
|
explicit CloneCodeHelper(NativeModule* source_native_module,
|
|
|
|
NativeModule* cloning_native_module);
|
|
|
|
|
|
|
|
void SelectForCloning(int32_t code_index);
|
|
|
|
|
|
|
|
void CloneAndPatchCode(bool patch_stub_to_stub_calls);
|
|
|
|
|
|
|
|
private:
|
|
|
|
void PatchStubToStubCalls();
|
|
|
|
|
|
|
|
NativeModule* source_native_module_;
|
|
|
|
NativeModule* cloning_native_module_;
|
|
|
|
std::vector<uint32_t> selection_;
|
|
|
|
std::unordered_map<Address, Address, AddressHasher> reverse_lookup_;
|
|
|
|
};
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
std::unique_ptr<NativeModule> Clone();
|
|
|
|
|
|
|
|
WasmCode* AddCode(const CodeDesc& desc, uint32_t frame_count, uint32_t index,
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t safepoint_table_offset, size_t handler_table_offset,
|
2018-02-28 15:49:53 +00:00
|
|
|
std::unique_ptr<ProtectedInstructions>,
|
|
|
|
WasmCode::Tier tier);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
// A way to copy over JS-allocated code. This is because we compile
|
|
|
|
// certain wrappers using a different pipeline.
|
|
|
|
WasmCode* AddCodeCopy(Handle<Code> code, WasmCode::Kind kind, uint32_t index);
|
|
|
|
|
|
|
|
// Add an interpreter wrapper. For the same reason as AddCodeCopy, we
|
|
|
|
// currently compile these using a different pipeline and we can't get a
|
|
|
|
// CodeDesc here. When adding interpreter wrappers, we do not insert them in
|
|
|
|
// the code_table, however, we let them self-identify as the {index} function
|
|
|
|
WasmCode* AddInterpreterWrapper(Handle<Code> code, uint32_t index);
|
|
|
|
|
|
|
|
// When starting lazy compilation, provide the WasmLazyCompile builtin by
|
2018-01-22 16:48:14 +00:00
|
|
|
// calling SetLazyBuiltin. It will initialize the code table with it. Copies
|
|
|
|
// of it might be cloned from them later when creating entries for exported
|
2017-11-20 21:34:04 +00:00
|
|
|
// functions and indirect callable functions, so that they may be identified
|
|
|
|
// by the runtime.
|
2018-01-22 16:48:14 +00:00
|
|
|
void SetLazyBuiltin(Handle<Code> code);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
|
|
|
// FunctionCount is WasmModule::functions.size().
|
|
|
|
uint32_t FunctionCount() const;
|
|
|
|
WasmCode* GetCode(uint32_t index) const;
|
|
|
|
|
|
|
|
// We special-case lazy cloning because we currently rely on making copies
|
|
|
|
// of the lazy builtin, to be able to identify, in the runtime, which function
|
|
|
|
// the lazy builtin is a placeholder of. If we used trampolines, we would call
|
|
|
|
// the runtime function from a common pc. We could, then, figure who the
|
|
|
|
// caller was if the trampolines called rather than jumped to the common
|
|
|
|
// builtin. The logic for seeking though frames would change, though.
|
|
|
|
// TODO(mtrofin): perhaps we can do exactly that - either before or after
|
|
|
|
// this change.
|
2018-03-20 19:59:25 +00:00
|
|
|
WasmCode* CloneLazyBuiltinInto(const WasmCode* code, uint32_t index,
|
2018-03-22 11:20:00 +00:00
|
|
|
WasmCode::FlushICache);
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2017-12-04 16:41:22 +00:00
|
|
|
bool SetExecutable(bool executable);
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
// For cctests, where we build both WasmModule and the runtime objects
|
|
|
|
// on the fly, and bypass the instance builder pipeline.
|
|
|
|
void ResizeCodeTableForTest(size_t);
|
|
|
|
|
2018-03-19 09:22:23 +00:00
|
|
|
CompilationState* compilation_state() { return compilation_state_.get(); }
|
|
|
|
|
2018-02-27 09:23:48 +00:00
|
|
|
// TODO(mstarzinger): needed until we sort out source positions, which are
|
|
|
|
// still on the GC-heap.
|
2017-11-20 21:34:04 +00:00
|
|
|
WasmCompiledModule* compiled_module() const;
|
|
|
|
void SetCompiledModule(Handle<WasmCompiledModule>);
|
|
|
|
|
|
|
|
uint32_t num_imported_functions() const { return num_imported_functions_; }
|
|
|
|
|
|
|
|
size_t committed_memory() const { return committed_memory_; }
|
|
|
|
const size_t instance_id = 0;
|
|
|
|
~NativeModule();
|
|
|
|
|
|
|
|
private:
|
|
|
|
friend class WasmCodeManager;
|
|
|
|
friend class NativeModuleSerializer;
|
|
|
|
friend class NativeModuleDeserializer;
|
2018-02-21 11:42:57 +00:00
|
|
|
friend class NativeModuleModificationScope;
|
2017-11-20 21:34:04 +00:00
|
|
|
|
2018-01-24 13:51:09 +00:00
|
|
|
static base::AtomicNumber<size_t> next_id_;
|
2017-11-20 21:34:04 +00:00
|
|
|
NativeModule(uint32_t num_functions, uint32_t num_imports,
|
|
|
|
bool can_request_more, VirtualMemory* vmem,
|
|
|
|
WasmCodeManager* code_manager);
|
|
|
|
|
|
|
|
WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind);
|
|
|
|
Address AllocateForCode(size_t size);
|
|
|
|
|
|
|
|
// Primitive for adding code to the native module. All code added to a native
|
|
|
|
// module is owned by that module. Various callers get to decide on how the
|
|
|
|
// code is obtained (CodeDesc vs, as a point in time, Code*), the kind,
|
|
|
|
// whether it has an index or is anonymous, etc.
|
|
|
|
WasmCode* AddOwnedCode(Vector<const byte> orig_instructions,
|
2017-12-01 15:38:38 +00:00
|
|
|
std::unique_ptr<const byte[]> reloc_info,
|
2017-11-20 21:34:04 +00:00
|
|
|
size_t reloc_size, Maybe<uint32_t> index,
|
|
|
|
WasmCode::Kind kind, size_t constant_pool_offset,
|
|
|
|
uint32_t stack_slots, size_t safepoint_table_offset,
|
2018-02-27 09:23:48 +00:00
|
|
|
size_t handler_table_offset,
|
2018-03-20 19:59:25 +00:00
|
|
|
std::shared_ptr<ProtectedInstructions>, WasmCode::Tier,
|
2018-03-22 11:20:00 +00:00
|
|
|
WasmCode::FlushICache);
|
|
|
|
WasmCode* CloneCode(const WasmCode*, WasmCode::FlushICache);
|
|
|
|
void CloneTrampolinesAndStubs(const NativeModule* other,
|
|
|
|
WasmCode::FlushICache);
|
2017-11-20 21:34:04 +00:00
|
|
|
WasmCode* Lookup(Address);
|
|
|
|
Address GetLocalAddressFor(Handle<Code>);
|
|
|
|
Address CreateTrampolineTo(Handle<Code>);
|
|
|
|
|
2018-03-21 13:00:04 +00:00
|
|
|
// Holds all allocated code objects, is maintained to be in ascending order
|
|
|
|
// according to the codes instruction start address to allow lookups.
|
2017-11-20 21:34:04 +00:00
|
|
|
std::vector<std::unique_ptr<WasmCode>> owned_code_;
|
|
|
|
|
|
|
|
std::vector<WasmCode*> code_table_;
|
|
|
|
uint32_t num_imported_functions_;
|
|
|
|
|
2018-03-20 19:59:25 +00:00
|
|
|
// Maps from instruction start of an immovable code object to instruction
|
|
|
|
// start of the trampoline.
|
2017-11-20 21:34:04 +00:00
|
|
|
std::unordered_map<Address, Address, AddressHasher> trampolines_;
|
2018-03-20 19:59:25 +00:00
|
|
|
|
|
|
|
// Maps from stub key to wasm code (containing a copy of that stub).
|
2017-11-20 21:34:04 +00:00
|
|
|
std::unordered_map<uint32_t, WasmCode*> stubs_;
|
|
|
|
|
2018-03-19 09:22:23 +00:00
|
|
|
std::unique_ptr<CompilationState, CompilationStateDeleter> compilation_state_;
|
|
|
|
|
2017-11-20 21:34:04 +00:00
|
|
|
DisjointAllocationPool free_memory_;
|
|
|
|
DisjointAllocationPool allocated_memory_;
|
|
|
|
std::list<VirtualMemory> owned_memory_;
|
|
|
|
WasmCodeManager* wasm_code_manager_;
|
|
|
|
base::Mutex allocation_mutex_;
|
|
|
|
Handle<WasmCompiledModule> compiled_module_;
|
|
|
|
size_t committed_memory_ = 0;
|
|
|
|
bool can_request_more_memory_;
|
2017-12-04 16:41:22 +00:00
|
|
|
bool is_executable_ = false;
|
2018-02-21 11:42:57 +00:00
|
|
|
int modification_scope_depth_ = 0;
|
2018-03-21 13:00:04 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(NativeModule);
|
2017-11-20 21:34:04 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
class V8_EXPORT_PRIVATE WasmCodeManager final {
|
|
|
|
public:
|
|
|
|
// The only reason we depend on Isolate is to report native memory used
|
|
|
|
// and held by a GC-ed object. We'll need to mitigate that when we
|
|
|
|
// start sharing wasm heaps.
|
|
|
|
WasmCodeManager(v8::Isolate*, size_t max_committed);
|
|
|
|
// Create a new NativeModule. The caller is responsible for its
|
|
|
|
// lifetime. The native module will be given some memory for code,
|
|
|
|
// which will be page size aligned. The size of the initial memory
|
|
|
|
// is determined with a heuristic based on the total size of wasm
|
|
|
|
// code. The native module may later request more memory.
|
|
|
|
std::unique_ptr<NativeModule> NewNativeModule(const WasmModule&);
|
|
|
|
std::unique_ptr<NativeModule> NewNativeModule(size_t memory_estimate,
|
|
|
|
uint32_t num_functions,
|
|
|
|
uint32_t num_imported_functions,
|
|
|
|
bool can_request_more);
|
|
|
|
|
|
|
|
WasmCode* LookupCode(Address pc) const;
|
|
|
|
WasmCode* GetCodeFromStartAddress(Address pc) const;
|
|
|
|
intptr_t remaining_uncommitted() const;
|
|
|
|
|
|
|
|
private:
|
|
|
|
friend class NativeModule;
|
|
|
|
|
|
|
|
void TryAllocate(size_t size, VirtualMemory*, void* hint = nullptr);
|
|
|
|
bool Commit(Address, size_t);
|
|
|
|
// Currently, we uncommit a whole module, so all we need is account
|
|
|
|
// for the freed memory size. We do that in FreeNativeModuleMemories.
|
|
|
|
// There's no separate Uncommit.
|
|
|
|
|
|
|
|
void FreeNativeModuleMemories(NativeModule*);
|
|
|
|
void Free(VirtualMemory* mem);
|
|
|
|
void AssignRanges(void* start, void* end, NativeModule*);
|
|
|
|
size_t GetAllocationChunk(const WasmModule& module);
|
|
|
|
bool WouldGCHelp() const;
|
|
|
|
|
|
|
|
std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
|
|
|
|
// count of NativeModules not yet collected. Helps determine if it's
|
|
|
|
// worth requesting a GC on memory pressure.
|
|
|
|
size_t active_ = 0;
|
|
|
|
base::AtomicNumber<intptr_t> remaining_uncommitted_;
|
2017-12-01 16:13:36 +00:00
|
|
|
|
|
|
|
// TODO(mtrofin): remove the dependency on isolate.
|
2017-11-20 21:34:04 +00:00
|
|
|
v8::Isolate* isolate_;
|
2018-03-21 13:00:04 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(WasmCodeManager);
|
2017-11-20 21:34:04 +00:00
|
|
|
};
|
|
|
|
|
2017-12-04 16:41:22 +00:00
|
|
|
// Within the scope, the native_module is writable and not executable.
|
|
|
|
// At the scope's destruction, the native_module is executable and not writable.
|
|
|
|
// The states inside the scope and at the scope termination are irrespective of
|
|
|
|
// native_module's state when entering the scope.
|
|
|
|
// We currently mark the entire module's memory W^X:
|
|
|
|
// - for AOT, that's as efficient as it can be.
|
|
|
|
// - for Lazy, we don't have a heuristic for functions that may need patching,
|
|
|
|
// and even if we did, the resulting set of pages may be fragmented.
|
|
|
|
// Currently, we try and keep the number of syscalls low.
|
|
|
|
// - similar argument for debug time.
|
|
|
|
class NativeModuleModificationScope final {
|
|
|
|
public:
|
|
|
|
explicit NativeModuleModificationScope(NativeModule* native_module);
|
|
|
|
~NativeModuleModificationScope();
|
|
|
|
|
|
|
|
private:
|
|
|
|
NativeModule* native_module_;
|
|
|
|
};
|
|
|
|
|
2017-09-16 05:22:38 +00:00
|
|
|
} // namespace wasm
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|
2018-02-02 12:05:19 +00:00
|
|
|
|
|
|
|
#endif // V8_WASM_WASM_CODE_MANAGER_H_
|