[wasm] Simplify {RemoveCompiledCode}
Inline {GetCodeLocked} and {ResetCodeLocked} to make the code simpler and more efficient. Drive-by: Make {FindJumpTablesForRegionLocked} private. R=ahaas@chromium.org Cq-Include-Trybots: luci.v8.try:v8_linux64_tsan_rel Cq-Include-Trybots: luci.v8.try:v8_linux64_tsan_isolates_rel Change-Id: Id0649924440737f3a04dbb536bd2141732f4f3bb Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4123535 Commit-Queue: Clemens Backes <clemensb@chromium.org> Reviewed-by: Andreas Haas <ahaas@chromium.org> Cr-Commit-Position: refs/heads/main@{#85048}
This commit is contained in:
parent
cc6fd8c8c0
commit
bfa76578c8
@ -1471,29 +1471,11 @@ std::vector<WasmCode*> NativeModule::SnapshotAllOwnedCode() const {
|
||||
|
||||
WasmCode* NativeModule::GetCode(uint32_t index) const {
|
||||
base::RecursiveMutexGuard guard(&allocation_mutex_);
|
||||
return GetCodeLocked(index);
|
||||
}
|
||||
|
||||
WasmCode* NativeModule::GetCodeLocked(uint32_t index) const {
|
||||
allocation_mutex_.AssertHeld();
|
||||
WasmCode* code = code_table_[declared_function_index(module(), index)];
|
||||
if (code) WasmCodeRefScope::AddRef(code);
|
||||
return code;
|
||||
}
|
||||
|
||||
void NativeModule::ResetCodeLocked(uint32_t index) const {
|
||||
allocation_mutex_.AssertHeld();
|
||||
int declared_index = declared_function_index(module(), index);
|
||||
WasmCode* code = code_table_[declared_index];
|
||||
if (!code) return;
|
||||
|
||||
WasmCodeRefScope::AddRef(code);
|
||||
code_table_[declared_index] = nullptr;
|
||||
// The code is added to the current {WasmCodeRefScope}, hence the ref
|
||||
// count cannot drop to zero here.
|
||||
code->DecRefOnLiveCode();
|
||||
}
|
||||
|
||||
bool NativeModule::HasCode(uint32_t index) const {
|
||||
base::RecursiveMutexGuard guard(&allocation_mutex_);
|
||||
return code_table_[declared_function_index(module(), index)] != nullptr;
|
||||
@ -2439,7 +2421,6 @@ void NativeModule::SetDebugState(DebugState new_debug_state) {
|
||||
|
||||
namespace {
|
||||
bool ShouldRemoveCode(WasmCode* code, NativeModule::RemoveFilter filter) {
|
||||
if (!code) return false;
|
||||
if (filter == NativeModule::RemoveFilter::kRemoveDebugCode &&
|
||||
!code->for_debugging()) {
|
||||
return false;
|
||||
@ -2459,10 +2440,14 @@ void NativeModule::RemoveCompiledCode(RemoveFilter filter) {
|
||||
CodeSpaceWriteScope write_scope(this);
|
||||
base::RecursiveMutexGuard guard(&allocation_mutex_);
|
||||
for (uint32_t i = 0; i < num_functions; i++) {
|
||||
uint32_t func_index = i + num_imports;
|
||||
WasmCode* code = GetCodeLocked(func_index);
|
||||
if (ShouldRemoveCode(code, filter)) {
|
||||
ResetCodeLocked(func_index);
|
||||
WasmCode* code = code_table_[i];
|
||||
if (code && ShouldRemoveCode(code, filter)) {
|
||||
code_table_[i] = nullptr;
|
||||
// Add the code to the {WasmCodeRefScope}, so the ref count cannot drop to
|
||||
// zero here. It might in the {WasmCodeRefScope} destructor, though.
|
||||
WasmCodeRefScope::AddRef(code);
|
||||
code->DecRefOnLiveCode();
|
||||
uint32_t func_index = i + num_imports;
|
||||
UseLazyStubLocked(func_index);
|
||||
}
|
||||
}
|
||||
|
@ -676,10 +676,8 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
std::vector<WasmCode*> SnapshotAllOwnedCode() const;
|
||||
|
||||
WasmCode* GetCode(uint32_t index) const;
|
||||
WasmCode* GetCodeLocked(uint32_t index) const;
|
||||
bool HasCode(uint32_t index) const;
|
||||
bool HasCodeWithTier(uint32_t index, ExecutionTier tier) const;
|
||||
void ResetCodeLocked(uint32_t index) const;
|
||||
|
||||
void SetWasmSourceMap(std::unique_ptr<WasmModuleSourceMap> source_map);
|
||||
WasmModuleSourceMap* GetWasmSourceMap() const;
|
||||
@ -689,12 +687,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
: kNullAddress;
|
||||
}
|
||||
|
||||
// Finds the jump tables that should be used for given code region. This
|
||||
// information is then passed to {GetNearCallTargetForFunction} and
|
||||
// {GetNearRuntimeStubEntry} to avoid the overhead of looking this information
|
||||
// up there. Return an empty struct if no suitable jump tables exist.
|
||||
JumpTablesRef FindJumpTablesForRegionLocked(base::AddressRegion) const;
|
||||
|
||||
// Get the call target in the jump table previously looked up via
|
||||
// {FindJumpTablesForRegionLocked}.
|
||||
Address GetNearCallTargetForFunction(uint32_t func_index,
|
||||
@ -898,6 +890,12 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
WasmCode* CreateEmptyJumpTableInRegionLocked(int jump_table_size,
|
||||
base::AddressRegion);
|
||||
|
||||
// Finds the jump tables that should be used for given code region. This
|
||||
// information is then passed to {GetNearCallTargetForFunction} and
|
||||
// {GetNearRuntimeStubEntry} to avoid the overhead of looking this information
|
||||
// up there. Return an empty struct if no suitable jump tables exist.
|
||||
JumpTablesRef FindJumpTablesForRegionLocked(base::AddressRegion) const;
|
||||
|
||||
void UpdateCodeSize(size_t, ExecutionTier, ForDebugging);
|
||||
|
||||
// Hold the {allocation_mutex_} when calling one of these methods.
|
||||
|
Loading…
Reference in New Issue
Block a user