[wasm] Improve module code size sampling approach.

This samples module code sizes at GC time instead of during destruction.
It hence makes sure that we also receive samples for long-lived modules
which would otherwise die with the Isolate and never be finalized. Note
that this approach is still biased and just a stop-gap until we have a
sampling tick based on actual wall-clock time.

R=clemensh@chromium.org

Change-Id: I9558d383a5aada8876bc9cbf63baca771dbe5c28
Reviewed-on: https://chromium-review.googlesource.com/1141866
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Clemens Hammacher <clemensh@chromium.org>
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54554}
This commit is contained in:
Michael Starzinger 2018-07-19 10:16:51 +02:00 committed by Commit Bot
parent 6252c1aacd
commit 0f2d22dd22
3 changed files with 38 additions and 21 deletions

View File

@ -2969,8 +2969,7 @@ bool Isolate::Init(StartupDeserializer* des) {
wasm_engine_.reset(
new wasm::WasmEngine(std::unique_ptr<wasm::WasmCodeManager>(
new wasm::WasmCodeManager(kMaxWasmCodeMemory))));
wasm_engine_->code_manager()->SetModuleCodeSizeHistogram(
counters()->wasm_module_code_size_mb());
wasm::WasmCodeManager::InstallSamplingGCCallback(this);
}
deoptimizer_data_ = new DeoptimizerData(heap());

View File

@ -793,6 +793,29 @@ void WasmCodeManager::TryAllocate(size_t size, VirtualMemory* ret, void* hint) {
reinterpret_cast<void*>(ret->end()), ret->size());
}
void WasmCodeManager::SampleModuleSizes(Isolate* isolate) const {
for (NativeModule* native_module : native_modules_) {
int code_size = static_cast<int>(native_module->committed_code_space_ / MB);
isolate->counters()->wasm_module_code_size_mb()->AddSample(code_size);
}
}
namespace {
void ModuleSamplingCallback(v8::Isolate* v8_isolate, v8::GCType type,
v8::GCCallbackFlags flags, void* data) {
Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
isolate->wasm_engine()->code_manager()->SampleModuleSizes(isolate);
}
} // namespace
// static
void WasmCodeManager::InstallSamplingGCCallback(Isolate* isolate) {
isolate->heap()->AddGCEpilogueCallback(ModuleSamplingCallback,
v8::kGCTypeMarkSweepCompact, nullptr);
}
// static
size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) {
constexpr size_t kCodeSizeMultiplier = 4;
@ -825,7 +848,7 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
// all isolates, at the point of commit.
constexpr size_t kCriticalThreshold = 32 * 1024 * 1024;
bool force_critical_notification =
(active_ > 1) &&
(native_modules_.size() > 1) &&
(remaining_uncommitted_code_space_.load() < kCriticalThreshold);
if (force_critical_notification) {
@ -846,7 +869,7 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", this, start,
size);
AssignRanges(start, end, ret.get());
++active_;
native_modules_.emplace(ret.get());
return ret;
}
@ -899,8 +922,8 @@ bool NativeModule::SetExecutable(bool executable) {
}
void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
DCHECK_GE(active_, 1);
--active_;
DCHECK_EQ(1, native_modules_.count(native_module));
native_modules_.erase(native_module);
TRACE_HEAP("Freeing NativeModule %p\n", this);
for (auto& vmem : native_module->owned_code_space_) {
lookup_map_.erase(vmem.address());
@ -911,11 +934,6 @@ void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
size_t code_size = native_module->committed_code_space_;
DCHECK(IsAligned(code_size, AllocatePageSize()));
if (module_code_size_mb_) {
module_code_size_mb_->AddSample(static_cast<int>(code_size / MB));
}
remaining_uncommitted_code_space_.fetch_add(code_size);
}

View File

@ -9,6 +9,7 @@
#include <list>
#include <map>
#include <unordered_map>
#include <unordered_set>
#include "src/base/macros.h"
#include "src/handles.h"
@ -21,7 +22,6 @@ namespace internal {
struct CodeDesc;
class Code;
class Histogram;
namespace wasm {
@ -433,9 +433,14 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
WasmCode* GetCodeFromStartAddress(Address pc) const;
size_t remaining_uncommitted_code_space() const;
void SetModuleCodeSizeHistogram(Histogram* histogram) {
module_code_size_mb_ = histogram;
}
// Add a sample of all module sizes.
void SampleModuleSizes(Isolate* isolate) const;
// TODO(v8:7424): For now we sample module sizes in a GC callback. This will
// bias samples towards apps with high memory pressure. We should switch to
// using sampling based on regular intervals independent of the GC.
static void InstallSamplingGCCallback(Isolate* isolate);
static size_t EstimateNativeModuleSize(const WasmModule* module);
private:
@ -452,14 +457,9 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
void AssignRanges(Address start, Address end, NativeModule*);
std::map<Address, std::pair<Address, NativeModule*>> lookup_map_;
// Count of NativeModules not yet collected. Helps determine if it's
// worth requesting a GC on memory pressure.
size_t active_ = 0;
std::unordered_set<NativeModule*> native_modules_;
std::atomic<size_t> remaining_uncommitted_code_space_;
// Histogram to update with the maximum used code space for each NativeModule.
Histogram* module_code_size_mb_ = nullptr;
DISALLOW_COPY_AND_ASSIGN(WasmCodeManager);
};