Revert "[nci] Share smi feedback and enable related optimizations"

This reverts commit 3599cce1f5.

Originally landed in
https://chromium-review.googlesource.com/c/v8/v8/+/2531775

Work on NCI is suspended, remove unused complexity. We may want to share
native-context-independent feedback in the future, but probably through other
means.

Bug: v8:8888
Change-Id: I23dfb67f6f01b4891af87bc42a9e62f99d0bf044
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2567701
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Mythri Alle <mythria@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71553}
This commit is contained in:
Jakob Gruber 2020-12-01 15:53:58 +01:00 committed by Commit Bot
parent e2aa734aef
commit ab4a540c06
20 changed files with 79 additions and 497 deletions

View File

@ -3094,9 +3094,6 @@ v8_source_set("v8_base_without_compiler") {
"src/objects/scope-info.h",
"src/objects/script-inl.h",
"src/objects/script.h",
"src/objects/serialized-feedback-inl.h",
"src/objects/serialized-feedback.cc",
"src/objects/serialized-feedback.h",
"src/objects/shared-function-info-inl.h",
"src/objects/shared-function-info.cc",
"src/objects/shared-function-info.h",

View File

@ -17,18 +17,11 @@
namespace v8 {
namespace internal {
namespace {
// The number of generations for each sub cache.
constexpr int kRegExpGenerations = 2;
static const int kRegExpGenerations = 2;
// Initial size of each compilation cache table allocated.
constexpr int kInitialCacheSize = 64;
// The index of the youngest generation table.
constexpr int kYoungestTableIndex = 0;
} // namespace
static const int kInitialCacheSize = 64;
CompilationCache::CompilationCache(Isolate* isolate)
: isolate_(isolate),
@ -48,22 +41,17 @@ CompilationCache::CompilationCache(Isolate* isolate)
Handle<CompilationCacheTable> CompilationSubCache::GetTable(int generation) {
DCHECK_LT(generation, generations());
Handle<CompilationCacheTable> result;
if (has_table(generation)) {
if (tables_[generation].IsUndefined(isolate())) {
result = CompilationCacheTable::New(isolate(), kInitialCacheSize);
tables_[generation] = *result;
} else {
CompilationCacheTable table =
CompilationCacheTable::cast(tables_[generation]);
result = Handle<CompilationCacheTable>(table, isolate());
} else {
result = CompilationCacheTable::New(isolate(), kInitialCacheSize);
tables_[generation] = *result;
}
return result;
}
bool CompilationSubCache::has_table(int generation) const {
DCHECK_LT(generation, generations());
return !tables_[generation].IsUndefined(isolate());
}
// static
void CompilationSubCache::AgeByGeneration(CompilationSubCache* c) {
DCHECK_GT(c->generations(), 1);
@ -74,15 +62,14 @@ void CompilationSubCache::AgeByGeneration(CompilationSubCache* c) {
}
// Set the first generation as unborn.
c->tables_[kYoungestTableIndex] =
ReadOnlyRoots(c->isolate()).undefined_value();
c->tables_[0] = ReadOnlyRoots(c->isolate()).undefined_value();
}
// static
void CompilationSubCache::AgeCustom(CompilationSubCache* c) {
DCHECK_EQ(c->generations(), 1);
if (!c->has_table(kYoungestTableIndex)) return;
CompilationCacheTable::cast(c->tables_[kYoungestTableIndex]).Age();
if (c->tables_[0].IsUndefined(c->isolate())) return;
CompilationCacheTable::cast(c->tables_[0]).Age();
}
void CompilationCacheScript::Age() {
@ -283,77 +270,43 @@ void CompilationCacheRegExp::Put(Handle<String> source, JSRegExp::Flags flags,
CompilationCacheTable::PutRegExp(isolate(), table, source, flags, data));
}
bool CompilationCacheCode::Lookup(
Handle<SharedFunctionInfo> key, MaybeHandle<Code>* code_out,
MaybeHandle<SerializedFeedback>* feedback_out) {
Code raw_code;
SerializedFeedback raw_feedback;
MaybeHandle<Code> CompilationCacheCode::Lookup(Handle<SharedFunctionInfo> key) {
// Make sure not to leak the table into the surrounding handle
// scope. Otherwise, we risk keeping old tables around even after
// having cleared the cache.
HandleScope scope(isolate());
MaybeHandle<Code> maybe_value;
int generation = 0;
{
// Make sure not to leak the table into the surrounding handle
// scope. Otherwise, we risk keeping old tables around even after
// having cleared the cache.
HandleScope scope(isolate());
MaybeHandle<Code> maybe_code;
MaybeHandle<SerializedFeedback> maybe_feedback;
bool found = false;
for (; generation < generations(); generation++) {
Handle<CompilationCacheTable> table = GetTable(generation);
found = table->LookupCode(key, &maybe_code, &maybe_feedback);
if (found) break;
}
if (!found) {
isolate()->counters()->compilation_cache_misses()->Increment();
return false;
}
raw_code = *maybe_code.ToHandleChecked();
raw_feedback = *maybe_feedback.ToHandleChecked();
for (; generation < generations(); generation++) {
Handle<CompilationCacheTable> table = GetTable(generation);
maybe_value = table->LookupCode(key);
if (!maybe_value.is_null()) break;
}
// The Code object shouldn't be marked for deoptimization, otherwise it'd
// immediately be thrown out on the next call.
DCHECK(!raw_code.marked_for_deoptimization());
*code_out = handle(raw_code, isolate());
*feedback_out = handle(raw_feedback, isolate());
if (generation != kYoungestTableIndex) {
// Copy to youngest generation.
Put(key, code_out->ToHandleChecked(), feedback_out->ToHandleChecked());
if (maybe_value.is_null()) {
isolate()->counters()->compilation_cache_misses()->Increment();
return MaybeHandle<Code>();
}
Handle<Code> value = maybe_value.ToHandleChecked();
if (generation != 0) Put(key, value); // Add to the first generation.
isolate()->counters()->compilation_cache_hits()->Increment();
return true;
return scope.CloseAndEscape(value);
}
void CompilationCacheCode::Put(Handle<SharedFunctionInfo> key,
Handle<Code> value_code,
Handle<SerializedFeedback> value_feedback) {
Handle<Code> value) {
HandleScope scope(isolate());
Handle<CompilationCacheTable> table = GetFirstTable();
SetFirstTable(CompilationCacheTable::PutCode(isolate(), table, key,
value_code, value_feedback));
SetFirstTable(CompilationCacheTable::PutCode(isolate(), table, key, value));
}
// static
void CompilationCacheCode::TraceAgeing() {
DCHECK(FLAG_trace_turbo_nci);
StdoutStream os;
os << "NCI cache ageing: Removing oldest generation" << std::endl;
}
// static
void CompilationCacheCode::TraceRemovalForDeoptimization(SharedFunctionInfo key,
Code value) {
DCHECK(FLAG_trace_turbo_nci);
StdoutStream os;
os << "NCI cache removal for deoptimization: " << Brief(key) << ", "
<< Brief(value) << std::endl;
}
// static
void CompilationCacheCode::TraceInsertion(Handle<SharedFunctionInfo> key,
Handle<Code> value) {
DCHECK(FLAG_trace_turbo_nci);
@ -362,7 +315,6 @@ void CompilationCacheCode::TraceInsertion(Handle<SharedFunctionInfo> key,
<< std::endl;
}
// static
void CompilationCacheCode::TraceHit(Handle<SharedFunctionInfo> key,
Handle<Code> value) {
DCHECK(FLAG_trace_turbo_nci);
@ -423,10 +375,8 @@ MaybeHandle<FixedArray> CompilationCache::LookupRegExp(Handle<String> source,
return reg_exp_.Lookup(source, flags);
}
bool CompilationCache::LookupCode(
Handle<SharedFunctionInfo> sfi, MaybeHandle<Code>* code_out,
MaybeHandle<SerializedFeedback>* feedback_out) {
return code_.Lookup(sfi, code_out, feedback_out);
MaybeHandle<Code> CompilationCache::LookupCode(Handle<SharedFunctionInfo> sfi) {
return code_.Lookup(sfi);
}
void CompilationCache::PutScript(Handle<String> source,
@ -469,18 +419,8 @@ void CompilationCache::PutRegExp(Handle<String> source, JSRegExp::Flags flags,
}
void CompilationCache::PutCode(Handle<SharedFunctionInfo> shared,
Handle<Code> code,
Handle<SerializedFeedback> feedback) {
code_.Put(shared, code, feedback);
}
void CompilationCache::ClearDeoptimizedCode() { code_.ClearDeoptimizedCode(); }
void CompilationCacheCode::ClearDeoptimizedCode() {
for (int i = 0; i < generations(); i++) {
if (!has_table(i)) continue;
GetTable(i)->ClearDeoptimizedCode();
}
Handle<Code> code) {
code_.Put(shared, code);
}
void CompilationCache::Clear() {

View File

@ -34,7 +34,6 @@ class CompilationSubCache {
// Get the compilation cache tables for a specific generation.
Handle<CompilationCacheTable> GetTable(int generation);
bool has_table(int generation) const;
// Accessors for first generation.
Handle<CompilationCacheTable> GetFirstTable() {
@ -160,11 +159,8 @@ class CompilationCacheCode : public CompilationSubCache {
explicit CompilationCacheCode(Isolate* isolate)
: CompilationSubCache(isolate, kGenerations) {}
bool Lookup(Handle<SharedFunctionInfo> key, MaybeHandle<Code>* code_out,
MaybeHandle<SerializedFeedback>* feedback_out);
void Put(Handle<SharedFunctionInfo> key, Handle<Code> value_code,
Handle<SerializedFeedback> value_feedback);
void ClearDeoptimizedCode();
MaybeHandle<Code> Lookup(Handle<SharedFunctionInfo> key);
void Put(Handle<SharedFunctionInfo> key, Handle<Code> value);
void Age() override;
@ -174,7 +170,6 @@ class CompilationCacheCode : public CompilationSubCache {
static constexpr int kGenerations = 2;
static void TraceAgeing();
static void TraceRemovalForDeoptimization(SharedFunctionInfo key, Code value);
static void TraceInsertion(Handle<SharedFunctionInfo> key,
Handle<Code> value);
static void TraceHit(Handle<SharedFunctionInfo> key, Handle<Code> value);
@ -210,8 +205,7 @@ class V8_EXPORT_PRIVATE CompilationCache {
MaybeHandle<FixedArray> LookupRegExp(Handle<String> source,
JSRegExp::Flags flags);
bool LookupCode(Handle<SharedFunctionInfo> sfi, MaybeHandle<Code>* code_out,
MaybeHandle<SerializedFeedback>* feedback_out);
MaybeHandle<Code> LookupCode(Handle<SharedFunctionInfo> sfi);
// Associate the (source, kind) pair to the shared function
// info. This may overwrite an existing mapping.
@ -231,9 +225,7 @@ class V8_EXPORT_PRIVATE CompilationCache {
void PutRegExp(Handle<String> source, JSRegExp::Flags flags,
Handle<FixedArray> data);
void PutCode(Handle<SharedFunctionInfo> shared, Handle<Code> code,
Handle<SerializedFeedback> feedback);
void ClearDeoptimizedCode();
void PutCode(Handle<SharedFunctionInfo> shared, Handle<Code> code);
// Clear the cache - also used to initialize the cache at startup.
void Clear();

View File

@ -45,7 +45,6 @@
#include "src/objects/js-function-inl.h"
#include "src/objects/map.h"
#include "src/objects/object-list-macros.h"
#include "src/objects/serialized-feedback.h"
#include "src/objects/shared-function-info.h"
#include "src/objects/string.h"
#include "src/parsing/parse-info.h"
@ -919,19 +918,10 @@ void InsertCodeIntoCompilationCache(Isolate* isolate,
Handle<Code> code = info->code();
DCHECK(!info->function_context_specializing());
Handle<SerializedFeedback> serialized_feedback =
SerializedFeedback::Serialize(
isolate, handle(info->closure()->feedback_vector(), isolate));
Handle<SharedFunctionInfo> sfi = info->shared_info();
CompilationCache* cache = isolate->compilation_cache();
cache->PutCode(sfi, code, serialized_feedback);
#ifdef DEBUG
MaybeHandle<Code> maybe_code;
MaybeHandle<SerializedFeedback> maybe_feedback;
DCHECK(cache->LookupCode(sfi, &maybe_code, &maybe_feedback));
#endif // DEBUG
cache->PutCode(sfi, code);
DCHECK(!cache->LookupCode(sfi).is_null());
sfi->set_may_have_cached_code(true);

View File

@ -4128,6 +4128,7 @@ JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedUnaryOp(const Operator* op,
Node* operand,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
@ -4141,6 +4142,7 @@ JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedBinaryOp(const Operator* op, Node* left,
Node* right,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(op)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
@ -4155,6 +4157,7 @@ BytecodeGraphBuilder::TryBuildSimplifiedForInNext(Node* receiver,
Node* cache_array,
Node* cache_type, Node* index,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(IrOpcode::kJSForInNext)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =
@ -4167,6 +4170,7 @@ BytecodeGraphBuilder::TryBuildSimplifiedForInNext(Node* receiver,
JSTypeHintLowering::LoweringResult
BytecodeGraphBuilder::TryBuildSimplifiedForInPrepare(Node* enumerator,
FeedbackSlot slot) {
if (!CanApplyTypeHintLowering(IrOpcode::kJSForInPrepare)) return NoChange();
Node* effect = environment()->GetEffectDependency();
Node* control = environment()->GetControlDependency();
JSTypeHintLowering::LoweringResult result =

View File

@ -4723,8 +4723,8 @@ bool HasMigrationTargets(const MapHandles& maps) {
} // namespace
bool JSHeapBroker::CanUseFeedback(const FeedbackNexus& nexus) const {
// TODO(jgruber,v8:8888): Currently, nci code does not use all feedback
// kinds. This restriction will be relaxed in the future.
// TODO(jgruber,v8:8888): Currently, nci code does not use any
// feedback. This restriction will be relaxed in the future.
return !is_native_context_independent() && !nexus.IsUninitialized();
}
@ -4837,7 +4837,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForGlobalAccess(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForBinaryOperation(
FeedbackSource const& source) const {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
BinaryOperationHint hint = nexus.GetBinaryOperationFeedback();
DCHECK_NE(hint, BinaryOperationHint::kNone); // Not uninitialized.
return *zone()->New<BinaryOperationFeedback>(hint, nexus.kind());
@ -4846,7 +4846,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForBinaryOperation(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForCompareOperation(
FeedbackSource const& source) const {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
CompareOperationHint hint = nexus.GetCompareOperationFeedback();
DCHECK_NE(hint, CompareOperationHint::kNone); // Not uninitialized.
return *zone()->New<CompareOperationFeedback>(hint, nexus.kind());
@ -4855,7 +4855,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForCompareOperation(
ProcessedFeedback const& JSHeapBroker::ReadFeedbackForForIn(
FeedbackSource const& source) const {
FeedbackNexus nexus(source.vector, source.slot, feedback_nexus_config());
if (nexus.IsUninitialized()) return NewInsufficientFeedback(nexus.kind());
if (!CanUseFeedback(nexus)) return NewInsufficientFeedback(nexus.kind());
ForInHint hint = nexus.GetForInFeedback();
DCHECK_NE(hint, ForInHint::kNone); // Not uninitialized.
return *zone()->New<ForInFeedback>(hint, nexus.kind());

View File

@ -2371,7 +2371,7 @@ Reduction JSTypedLowering::Reduce(Node* node) {
const IrOpcode::Value opcode = node->opcode();
if (broker()->generate_full_feedback_collection() &&
IrOpcode::OpcodeMustCollectFeedbackForNCI(opcode)) {
IrOpcode::IsFeedbackCollectingOpcode(opcode)) {
// In NCI code, it is not valid to reduce feedback-collecting JS opcodes
// into non-feedback-collecting lower-level opcodes; missed feedback would
// result in soft deopts.

View File

@ -1092,48 +1092,12 @@ class V8_EXPORT_PRIVATE IrOpcode {
return kJSCreateFunctionContext <= value && value <= kJSCreateBlockContext;
}
// These opcodes *must* collect full feedback in NCI code in order to avoid
// deopts after tier-up to Turbofan.
// TODO(jgruber,v8:8888): The goal is for this to be the empty set at some
// point in the future.
static bool OpcodeMustCollectFeedbackForNCI(Value value) {
switch (value) {
case kJSCall:
case kJSCallWithArrayLike:
case kJSCallWithSpread:
case kJSCloneObject:
case kJSConstruct:
case kJSConstructWithArrayLike:
case kJSConstructWithSpread:
case kJSCreateEmptyLiteralArray:
case kJSCreateLiteralArray:
case kJSCreateLiteralObject:
case kJSCreateLiteralRegExp:
case kJSGetIterator:
case kJSGetTemplateObject:
case kJSHasProperty:
case kJSInstanceOf:
case kJSLoadGlobal:
case kJSLoadNamed:
case kJSLoadNamedFromSuper:
case kJSLoadProperty:
case kJSStoreDataPropertyInLiteral:
case kJSStoreGlobal:
case kJSStoreInArrayLiteral:
case kJSStoreNamed:
case kJSStoreNamedOwn:
case kJSStoreProperty:
return true;
default:
return false;
}
UNREACHABLE();
}
// These opcode take the feedback vector as an input, and implement
// feedback-collecting logic in generic lowering.
static bool IsFeedbackCollectingOpcode(Value value) {
#define CASE(Name, ...) case k##Name:
#define CASE(Name, ...) \
case k##Name: \
return true;
switch (value) {
JS_ARITH_BINOP_LIST(CASE)
JS_ARITH_UNOP_LIST(CASE)

View File

@ -10,7 +10,6 @@
#include "src/builtins/accessors.h"
#include "src/codegen/assembler-inl.h"
#include "src/codegen/callable.h"
#include "src/codegen/compilation-cache.h"
#include "src/codegen/macro-assembler.h"
#include "src/codegen/register-configuration.h"
#include "src/common/assert-scope.h"
@ -393,7 +392,6 @@ void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
MarkAllCodeForContext(native_context);
OSROptimizedCodeCache::Clear(native_context);
DeoptimizeMarkedCodeForContext(native_context);
isolate->compilation_cache()->ClearDeoptimizedCode();
context = native_context.next_context_link();
}
}
@ -454,16 +452,6 @@ void Deoptimizer::DeoptimizeFunction(JSFunction function, Code code) {
// this call from here.
OSROptimizedCodeCache::Compact(
Handle<NativeContext>(function.context().native_context(), isolate));
// Remove deoptimized Code from the NCI cache - such objects currently
// cannot be reused.
// TODO(jgruber): Instead of removal (and possibly later re-insertion), we
// should learn from deopts. Potentially this already happens now;
// re-insertion will also insert updated (generalized) feedback. We should
// still take a closer look at this in the future though.
if (code.kind() == CodeKind::NATIVE_CONTEXT_INDEPENDENT) {
isolate->compilation_cache()->ClearDeoptimizedCode();
}
}
}

View File

@ -3498,27 +3498,18 @@ Handle<JSFunction> Factory::JSFunctionBuilder::Build() {
PrepareMap();
PrepareFeedbackCell();
// Determine the associated Code object. If we hit the NCI cache, take that;
// otherwise, ask the SharedFunctionInfo for the appropriate Code object.
MaybeHandle<Code> maybe_code;
MaybeHandle<SerializedFeedback> maybe_feedback;
const bool have_nci_cache = sfi_->TryGetCachedCodeAndSerializedFeedback(
isolate_, &maybe_code, &maybe_feedback);
Handle<Code> code = have_nci_cache ? maybe_code.ToHandleChecked()
: handle(sfi_->GetCode(), isolate_);
// Determine the associated Code object.
Handle<Code> code;
const bool have_cached_code =
sfi_->TryGetCachedCode(isolate_).ToHandle(&code);
if (!have_cached_code) code = handle(sfi_->GetCode(), isolate_);
Handle<JSFunction> result = BuildRaw(code);
if (have_nci_cache) {
if (have_cached_code) {
IsCompiledScope is_compiled_scope(sfi_->is_compiled_scope(isolate_));
JSFunction::EnsureFeedbackVector(result, &is_compiled_scope);
if (FLAG_trace_turbo_nci) CompilationCacheCode::TraceHit(sfi_, code);
if (!result->has_feedback_vector()) {
IsCompiledScope is_compiled_scope(sfi_->is_compiled_scope(isolate_));
JSFunction::EnsureFeedbackVector(result, &is_compiled_scope);
// TODO(jgruber,v8:8888): Consider combining shared feedback with
// existing feedback here.
maybe_feedback.ToHandleChecked()->DeserializeInto(
result->feedback_vector());
}
}
Compiler::PostInstantiation(result);

View File

@ -458,6 +458,7 @@ int Code::stack_slots() const {
}
bool Code::marked_for_deoptimization() const {
DCHECK(CodeKindCanDeoptimize(kind()));
int32_t flags = code_data_container(kAcquireLoad).kind_specific_flags();
return MarkedForDeoptimizationField::decode(flags);
}

View File

@ -4,10 +4,8 @@
#include "src/objects/compilation-cache-table.h"
#include "src/codegen/compilation-cache.h"
#include "src/common/assert-scope.h"
#include "src/objects/compilation-cache-table-inl.h"
#include "src/objects/serialized-feedback-inl.h"
namespace v8 {
namespace internal {
@ -285,32 +283,14 @@ Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src,
return Handle<Object>(get(EntryToIndex(entry) + 1), isolate);
}
namespace {
constexpr int CodeCacheSFIIndex(InternalIndex entry) {
return CompilationCacheTable::EntryToIndex(entry) + 0;
}
constexpr int CodeCacheCodeIndex(InternalIndex entry) {
return CompilationCacheTable::EntryToIndex(entry) + 1;
}
constexpr int CodeCacheFeedbackIndex(InternalIndex entry) {
return CompilationCacheTable::EntryToIndex(entry) + 2;
}
} // namespace
bool CompilationCacheTable::LookupCode(
Handle<SharedFunctionInfo> key, MaybeHandle<Code>* code_out,
MaybeHandle<SerializedFeedback>* feedback_out) {
MaybeHandle<Code> CompilationCacheTable::LookupCode(
Handle<SharedFunctionInfo> key) {
Isolate* isolate = GetIsolate();
DisallowGarbageCollection no_gc;
CodeKey k(key);
InternalIndex entry = FindEntry(isolate, &k);
if (entry.is_not_found()) return false;
*code_out = handle(Code::cast(get(CodeCacheCodeIndex(entry))), isolate);
*feedback_out = handle(
SerializedFeedback::cast(get(CodeCacheFeedbackIndex(entry))), isolate);
return true;
if (entry.is_not_found()) return {};
return Handle<Code>(Code::cast(get(EntryToIndex(entry) + 1)), isolate);
}
Handle<CompilationCacheTable> CompilationCacheTable::PutScript(
@ -390,18 +370,15 @@ Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp(
Handle<CompilationCacheTable> CompilationCacheTable::PutCode(
Isolate* isolate, Handle<CompilationCacheTable> cache,
Handle<SharedFunctionInfo> key, Handle<Code> value_code,
Handle<SerializedFeedback> value_feedback) {
STATIC_ASSERT(CompilationCacheShape::kEntrySize >= 3);
Handle<SharedFunctionInfo> key, Handle<Code> value) {
CodeKey k(key);
{
InternalIndex entry = cache->FindEntry(isolate, &k);
if (entry.is_found()) {
// Update.
cache->set(CodeCacheSFIIndex(entry), *key);
cache->set(CodeCacheCodeIndex(entry), *value_code);
cache->set(CodeCacheFeedbackIndex(entry), *value_feedback);
cache->set(EntryToIndex(entry), *key);
cache->set(EntryToIndex(entry) + 1, *value);
return cache;
}
}
@ -409,9 +386,8 @@ Handle<CompilationCacheTable> CompilationCacheTable::PutCode(
// Insert.
cache = EnsureCapacity(isolate, cache);
InternalIndex entry = cache->FindInsertionEntry(isolate, k.Hash());
cache->set(CodeCacheSFIIndex(entry), *key);
cache->set(CodeCacheCodeIndex(entry), *value_code);
cache->set(CodeCacheFeedbackIndex(entry), *value_feedback);
cache->set(EntryToIndex(entry), *key);
cache->set(EntryToIndex(entry) + 1, *value);
cache->ElementAdded();
return cache;
}
@ -449,35 +425,6 @@ void CompilationCacheTable::Age() {
}
}
void CompilationCacheTable::ClearDeoptimizedCode() {
DisallowGarbageCollection no_gc;
Object the_hole_value = GetReadOnlyRoots().the_hole_value();
Object undefined_value = GetReadOnlyRoots().undefined_value();
for (InternalIndex entry : IterateEntries()) {
Object maybe_code = get(CodeCacheCodeIndex(entry));
if (maybe_code == the_hole_value) continue;
// TODO(jgruber): There should only be one not-present value. Undefined is
// used as the initial filler on allocation. The hole is inserted by
// deletions.
if (maybe_code == undefined_value) continue;
SharedFunctionInfo sfi =
SharedFunctionInfo::cast(get(CodeCacheSFIIndex(entry)));
Code code = Code::cast(maybe_code);
if (code.marked_for_deoptimization()) {
// TODO(jgruber): Due to design of the compilation cache, there's a
// strange layering violation here in which we call the higher-level
// tracing function from the lower-level implementation. At some point,
// the entire compilation cache design should be reconsidered.
if (FLAG_trace_turbo_nci) {
CompilationCacheCode::TraceRemovalForDeoptimization(sfi, code);
}
sfi.set_may_have_cached_code(false);
RemoveEntry(EntryToIndex(entry));
}
}
}
void CompilationCacheTable::Remove(Object value) {
DisallowGarbageCollection no_gc;
for (InternalIndex entry : IterateEntries()) {

View File

@ -8,7 +8,6 @@
#include "src/objects/feedback-cell.h"
#include "src/objects/hash-table.h"
#include "src/objects/js-regexp.h"
#include "src/objects/serialized-feedback.h"
#include "src/objects/shared-function-info.h"
#include "src/roots/roots.h"
@ -123,14 +122,10 @@ class CompilationCacheTable
// The Code cache shares native-context-independent (NCI) code between
// contexts.
bool LookupCode(Handle<SharedFunctionInfo> key, MaybeHandle<Code>* code_out,
MaybeHandle<SerializedFeedback>* feedback_out);
MaybeHandle<Code> LookupCode(Handle<SharedFunctionInfo> key);
static Handle<CompilationCacheTable> PutCode(
Isolate* isolate, Handle<CompilationCacheTable> cache,
Handle<SharedFunctionInfo> key, Handle<Code> value_code,
Handle<SerializedFeedback> value_feedback);
// Removes Code objects that are marked_for_deoptimization from the cache.
void ClearDeoptimizedCode();
Handle<SharedFunctionInfo> key, Handle<Code> value);
void Remove(Object value);
void Age();

View File

@ -1,26 +0,0 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_OBJECTS_SERIALIZED_FEEDBACK_INL_H_
#define V8_OBJECTS_SERIALIZED_FEEDBACK_INL_H_
#include "src/objects/fixed-array-inl.h"
#include "src/objects/serialized-feedback.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
SerializedFeedback::SerializedFeedback(Address ptr) : ByteArray(ptr) {}
CAST_ACCESSOR(SerializedFeedback)
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"
#endif // V8_OBJECTS_SERIALIZED_FEEDBACK_INL_H_

View File

@ -1,121 +0,0 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/objects/serialized-feedback.h"
#include "src/common/assert-scope.h"
#include "src/objects/fixed-array-inl.h"
#include "src/objects/serialized-feedback-inl.h"
namespace v8 {
namespace internal {
namespace {
// Currently, only smi-based feedback is serialized.
bool IsSerialized(FeedbackSlotKind kind) {
switch (kind) {
case FeedbackSlotKind::kBinaryOp:
case FeedbackSlotKind::kCompareOp:
case FeedbackSlotKind::kForIn:
return true;
case FeedbackSlotKind::kStoreGlobalSloppy:
case FeedbackSlotKind::kStoreNamedSloppy:
case FeedbackSlotKind::kStoreKeyedSloppy:
case FeedbackSlotKind::kCall:
case FeedbackSlotKind::kLoadProperty:
case FeedbackSlotKind::kLoadGlobalNotInsideTypeof:
case FeedbackSlotKind::kLoadGlobalInsideTypeof:
case FeedbackSlotKind::kLoadKeyed:
case FeedbackSlotKind::kHasKeyed:
case FeedbackSlotKind::kStoreGlobalStrict:
case FeedbackSlotKind::kStoreNamedStrict:
case FeedbackSlotKind::kStoreOwnNamed:
case FeedbackSlotKind::kStoreKeyedStrict:
case FeedbackSlotKind::kStoreInArrayLiteral:
case FeedbackSlotKind::kStoreDataPropertyInLiteral:
case FeedbackSlotKind::kTypeProfile:
case FeedbackSlotKind::kLiteral:
case FeedbackSlotKind::kInstanceOf:
case FeedbackSlotKind::kCloneObject:
return false;
case FeedbackSlotKind::kInvalid:
case FeedbackSlotKind::kKindsNumber:
UNREACHABLE();
}
UNREACHABLE();
}
constexpr int SlotCountToByteLength(int slot_count) {
return slot_count * kUInt32Size;
}
constexpr int ByteLengthToSlotCount(int byte_length) {
CONSTEXPR_DCHECK((byte_length % kUInt32Size) == 0);
return byte_length / kUInt32Size;
}
} // namespace
// static
Handle<SerializedFeedback> SerializedFeedback::Serialize(
Isolate* isolate, Handle<FeedbackVector> vector) {
Handle<FeedbackMetadata> metadata(vector->metadata(), isolate);
const int slot_count = metadata->slot_count();
const int byte_length = SlotCountToByteLength(slot_count);
// Allocating in old space since these objects are inserted into long-lived
// caches.
auto sf = Handle<SerializedFeedback>::cast(
isolate->factory()->NewByteArray(byte_length, AllocationType::kOld));
// Initialize all relevant slots.
for (int i = 0; i < slot_count;) {
const FeedbackSlot slot{i};
const FeedbackSlotKind slot_kind = metadata->GetKind(slot);
const int slot_size = FeedbackMetadata::GetSlotSize(slot_kind);
if (IsSerialized(slot_kind)) {
// All handled slot kinds currently use smi-based feedback; these are
// simply serialized as the value.
DCHECK_EQ(slot_size, 1);
const uint32_t value = vector->Get(slot)->ToSmi().value();
sf->set_uint32(i, value);
} else {
// Unhandled slot kinds are zeroed.
sf->set_uint32(i, 0);
}
i += slot_size;
}
return sf;
}
void SerializedFeedback::DeserializeInto(FeedbackVector vector) const {
DisallowGarbageCollection no_gc;
FeedbackMetadata metadata = vector.metadata();
const int slot_count = metadata.slot_count();
CHECK_EQ(slot_count, ByteLengthToSlotCount(length()));
for (int i = 0; i < slot_count;) {
const FeedbackSlot slot{i};
const FeedbackSlotKind slot_kind = metadata.GetKind(slot);
const int slot_size = FeedbackMetadata::GetSlotSize(slot_kind);
const uint32_t serialized_value = get_uint32(i);
if (IsSerialized(slot_kind)) {
DCHECK_EQ(slot_size, 1);
DCHECK_EQ(vector.Get(slot)->ToSmi().value(), 0); // Uninitialized.
vector.SynchronizedSet(slot, Smi::FromInt(serialized_value),
SKIP_WRITE_BARRIER);
DCHECK_EQ(vector.Get(slot)->ToSmi().value(), serialized_value);
} else {
DCHECK_EQ(serialized_value, 0);
}
i += slot_size;
}
}
} // namespace internal
} // namespace v8

View File

@ -1,41 +0,0 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_OBJECTS_SERIALIZED_FEEDBACK_H_
#define V8_OBJECTS_SERIALIZED_FEEDBACK_H_
#include "src/objects/fixed-array.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
class FeedbackVector;
// A serialized representation of FeedbackVector, used to share collected
// feedback between native contexts.
//
// Note: The encoding is not final and thus not documented here yet. Currently,
// only smi-based feedback is shared/serialized.
class SerializedFeedback : public ByteArray {
public:
// Serialize current feedback vector values into a SerializedFeedback object.
static Handle<SerializedFeedback> Serialize(Isolate* isolate,
Handle<FeedbackVector> vector);
// Deserialize into the given vector.
void DeserializeInto(FeedbackVector vector) const;
DECL_CAST(SerializedFeedback)
OBJECT_CONSTRUCTORS(SerializedFeedback, ByteArray);
};
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"
#endif // V8_OBJECTS_SERIALIZED_FEEDBACK_H_

View File

@ -440,21 +440,10 @@ std::ostream& operator<<(std::ostream& os, const SourceCodeOf& v) {
}
}
bool SharedFunctionInfo::TryGetCachedCodeAndSerializedFeedback(
Isolate* isolate, MaybeHandle<Code>* code_out,
MaybeHandle<SerializedFeedback>* feedback_out) {
if (!may_have_cached_code()) return {};
Handle<SharedFunctionInfo> sfi(*this, isolate);
return isolate->compilation_cache()->LookupCode(sfi, code_out, feedback_out);
}
MaybeHandle<Code> SharedFunctionInfo::TryGetCachedCode(Isolate* isolate) {
if (!may_have_cached_code()) return {};
Handle<SharedFunctionInfo> sfi(*this, isolate);
MaybeHandle<Code> maybe_code;
MaybeHandle<SerializedFeedback> maybe_feedback;
isolate->compilation_cache()->LookupCode(sfi, &maybe_code, &maybe_feedback);
return maybe_code;
Handle<SharedFunctionInfo> zis(*this, isolate);
return isolate->compilation_cache()->LookupCode(zis);
}
void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {

View File

@ -34,7 +34,6 @@ class BytecodeArray;
class CoverageInfo;
class DebugInfo;
class IsCompiledScope;
class SerializedFeedback;
class WasmCapiFunctionData;
class WasmExportedFunctionData;
class WasmJSFunctionData;
@ -397,12 +396,8 @@ class SharedFunctionInfo
// hence the 'may'.
DECL_BOOLEAN_ACCESSORS(may_have_cached_code)
// Fetches cached NCI artifacts, if they exist. Some callsites only care
// about the cached Code object; to distinguish them clearly, there's a
// dedicated helper that only returns the Code object.
bool TryGetCachedCodeAndSerializedFeedback(
Isolate* isolate, MaybeHandle<Code>* code_out,
MaybeHandle<SerializedFeedback>* feedback_out);
// Returns the cached Code object for this SFI if it exists, an empty handle
// otherwise.
MaybeHandle<Code> TryGetCachedCode(Isolate* isolate);
// Is this function a top-level function (scripts, evals).

View File

@ -79,21 +79,11 @@ void TryInstallNCICode(Isolate* isolate, Handle<JSFunction> function,
DCHECK(sfi->may_have_cached_code());
DCHECK_EQ(function->shared(), *sfi);
MaybeHandle<Code> maybe_code;
MaybeHandle<SerializedFeedback> maybe_feedback;
if (sfi->TryGetCachedCodeAndSerializedFeedback(isolate, &maybe_code,
&maybe_feedback)) {
Handle<Code> code = maybe_code.ToHandleChecked();
if (FLAG_trace_turbo_nci) CompilationCacheCode::TraceHit(sfi, code);
Handle<Code> code;
if (sfi->TryGetCachedCode(isolate).ToHandle(&code)) {
function->set_code(*code);
if (!function->has_feedback_vector()) {
JSFunction::EnsureFeedbackVector(function, is_compiled_scope);
// TODO(jgruber,v8:8888): Consider combining shared feedback with
// existing feedback here.
maybe_feedback.ToHandleChecked()->DeserializeInto(
function->feedback_vector());
}
JSFunction::EnsureFeedbackVector(function, is_compiled_scope);
if (FLAG_trace_turbo_nci) CompilationCacheCode::TraceHit(sfi, code);
}
}

View File

@ -1447,19 +1447,6 @@
'compiler/serializer-transition-propagation': [SKIP],
# crbug.com/v8/11110
'es6/super-ic-opt*': [SKIP],
# Rely on optimizations not yet enabled for NCI. For example, preserving
# kIdentifyZero for kNumberFloor (and thus avoiding later deopts) only works
# if the JSCallReducer reduces the Math.floor builtin call to the
# kNumberFloor operator.
'compiler/number-floor': [SKIP],
'compiler/number-ceil': [SKIP],
'compiler/number-min': [SKIP],
'compiler/number-modulus': [SKIP],
'compiler/number-toboolean': [SKIP],
'compiler/number-abs': [SKIP],
'compiler/number-round': [SKIP],
'compiler/number-max': [SKIP],
'compiler/number-trunc': [SKIP],
}], # variant == nci or variant == nci_as_midtier
['((arch == mipsel or arch == mips64el or arch == mips or arch == mips64) and not simd_mips) or (arch in [ppc64, s390x])', {