[counters] Make RuntimeCallStats aware of thread status

RuntimeCallTimerScope can now be called with the optional flag
kThreadSpecific, which chooses the appropriate RuntimeCounterId given
whether the RuntimeCallStats object is for the main isolate thread or a
worker thread.

While this doesn't change any existing timers over to use this flag it
does add checks that in the default case that any thread-specific
counters are the correct one given the thread status.

Bug: v8:10006
Change-Id: Idb545714284bcd2e2fdca991918ddf976dcbdf70
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1928863
Commit-Queue: Dan Elphick <delphick@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Mythri Alle <mythria@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65125}
This commit is contained in:
Dan Elphick 2019-11-22 17:59:31 +00:00 committed by Commit Bot
parent f6f0646d2d
commit 90c6444292
2 changed files with 113 additions and 38 deletions

View File

@ -125,7 +125,7 @@ Counters::Counters(Isolate* isolate)
STATS_COUNTER_TS_LIST(SC)
#undef SC
// clang format on
runtime_call_stats_(),
runtime_call_stats_(RuntimeCallStats::kMainIsolateThread),
worker_thread_runtime_call_stats_() {
static const struct {
Histogram Counters::*member;
@ -426,7 +426,8 @@ void RuntimeCallTimer::Snapshot() {
Resume(now);
}
RuntimeCallStats::RuntimeCallStats() : in_use_(false) {
RuntimeCallStats::RuntimeCallStats(ThreadType thread_type)
: in_use_(false), thread_type_(thread_type) {
static const char* const kNames[] = {
#define CALL_BUILTIN_COUNTER(name) "GC_" #name,
FOR_EACH_GC_COUNTER(CALL_BUILTIN_COUNTER) //
@ -446,12 +447,47 @@ RuntimeCallStats::RuntimeCallStats() : in_use_(false) {
#define CALL_BUILTIN_COUNTER(name) #name,
FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define THREAD_SPECIFIC_COUNTER(name) #name,
FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) //
#undef THREAD_SPECIFIC_COUNTER
};
for (int i = 0; i < kNumberOfCounters; i++) {
this->counters_[i] = RuntimeCallCounter(kNames[i]);
}
}
namespace {
constexpr RuntimeCallCounterId FirstCounter(RuntimeCallCounterId first, ...) {
return first;
}
#define THREAD_SPECIFIC_COUNTER(name) k##name,
constexpr RuntimeCallCounterId kFirstThreadVariantCounter =
FirstCounter(FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) 0);
#undef THREAD_SPECIFIC_COUNTER
#define THREAD_SPECIFIC_COUNTER(name) +1
constexpr int kThreadVariantCounterCount =
0 FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER);
#undef THREAD_SPECIFIC_COUNTER
constexpr auto kLastThreadVariantCounter = static_cast<RuntimeCallCounterId>(
kFirstThreadVariantCounter + kThreadVariantCounterCount - 1);
} // namespace
bool RuntimeCallStats::HasThreadSpecificCounterVariants(
RuntimeCallCounterId id) {
// Check that it's in the range of the thread-specific variant counters and
// also that it's one of the background counters.
return id >= kFirstThreadVariantCounter && id <= kLastThreadVariantCounter;
}
bool RuntimeCallStats::IsBackgroundThreadSpecificVariant(
RuntimeCallCounterId id) {
return HasThreadSpecificCounterVariants(id) &&
(id - kFirstThreadVariantCounter) % 2 == 1;
}
void RuntimeCallStats::Enter(RuntimeCallTimer* timer,
RuntimeCallCounterId counter_id) {
DCHECK(IsCalledOnTheSameThread());
@ -536,7 +572,7 @@ void RuntimeCallStats::Dump(v8::tracing::TracedValue* value) {
in_use_ = false;
}
WorkerThreadRuntimeCallStats::WorkerThreadRuntimeCallStats() {}
WorkerThreadRuntimeCallStats::WorkerThreadRuntimeCallStats() = default;
WorkerThreadRuntimeCallStats::~WorkerThreadRuntimeCallStats() {
if (tls_key_) base::Thread::DeleteThreadLocalKey(*tls_key_);
@ -551,7 +587,7 @@ base::Thread::LocalStorageKey WorkerThreadRuntimeCallStats::GetKey() {
RuntimeCallStats* WorkerThreadRuntimeCallStats::NewTable() {
DCHECK(TracingFlags::is_runtime_stats_enabled());
std::unique_ptr<RuntimeCallStats> new_table =
std::make_unique<RuntimeCallStats>();
std::make_unique<RuntimeCallStats>(RuntimeCallStats::kWorkerThread);
RuntimeCallStats* result = new_table.get();
base::MutexGuard lock(&mutex_);

View File

@ -893,6 +893,25 @@ class RuntimeCallTimer final {
V(WeakMap_New) \
V(WeakMap_Set)
#define ADD_THREAD_SPECIFIC_COUNTER(V, Prefix, Suffix) \
V(Prefix##Suffix) \
V(Prefix##Background##Suffix)
#define FOR_EACH_THREAD_SPECIFIC_COUNTER(V) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Analyse) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Eval) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Function) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Ignition) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, RewriteReturnResult) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, ScopeAnalysis) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Script) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssembleCode) \
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, ArrowFunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, FunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, Program) \
ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, ArrowFunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, WithVariableResolution)
#define FOR_EACH_MANUAL_COUNTER(V) \
V(AccessorGetterCallback) \
V(AccessorSetterCallback) \
@ -901,28 +920,14 @@ class RuntimeCallTimer final {
V(BoundFunctionLengthGetter) \
V(BoundFunctionNameGetter) \
V(CodeGenerationFromStringsCallbacks) \
V(CompileAnalyse) \
V(CompileBackgroundAnalyse) \
V(CompileBackgroundCompileTask) \
V(CompileBackgroundEval) \
V(CompileBackgroundFunction) \
V(CompileBackgroundIgnition) \
V(CompileBackgroundRewriteReturnResult) \
V(CompileBackgroundScopeAnalysis) \
V(CompileBackgroundScript) \
V(CompileCollectSourcePositions) \
V(CompileDeserialize) \
V(CompileEnqueueOnDispatcher) \
V(CompileEval) \
V(CompileFinalizeBackgroundCompileTask) \
V(CompileFinishNowOnDispatcher) \
V(CompileFunction) \
V(CompileGetFromOptimizedCodeMap) \
V(CompileIgnition) \
V(CompileIgnitionFinalization) \
V(CompileRewriteReturnResult) \
V(CompileScopeAnalysis) \
V(CompileScript) \
V(CompileSerialize) \
V(CompileWaitForDispatcher) \
V(DeoptimizeCode) \
@ -964,18 +969,8 @@ class RuntimeCallTimer final {
V(Object_DeleteProperty) \
V(ObjectVerify) \
V(OptimizeCode) \
V(ParseArrowFunctionLiteral) \
V(ParseBackgroundArrowFunctionLiteral) \
V(ParseBackgroundFunctionLiteral) \
V(ParseBackgroundProgram) \
V(ParseEval) \
V(ParseFunction) \
V(ParseFunctionLiteral) \
V(ParseProgram) \
V(PreParseArrowFunctionLiteral) \
V(PreParseBackgroundArrowFunctionLiteral) \
V(PreParseBackgroundWithVariableResolution) \
V(PreParseWithVariableResolution) \
V(PropertyCallback) \
V(PrototypeMap_TransitionToAccessorProperty) \
V(PrototypeMap_TransitionToDataProperty) \
@ -1046,29 +1041,34 @@ class RuntimeCallTimer final {
enum RuntimeCallCounterId {
#define CALL_RUNTIME_COUNTER(name) kGC_##name,
FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER)
FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_RUNTIME_COUNTER(name) k##name,
FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER)
FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_RUNTIME_COUNTER(name, nargs, ressize) kRuntime_##name,
FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER)
FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_BUILTIN_COUNTER(name) kBuiltin_##name,
BUILTIN_LIST_C(CALL_BUILTIN_COUNTER)
BUILTIN_LIST_C(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) kAPI_##name,
FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER)
FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) kHandler_##name,
FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER)
FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
kNumberOfCounters
#define THREAD_SPECIFIC_COUNTER(name) k##name,
FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) //
#undef THREAD_SPECIFIC_COUNTER
kNumberOfCounters,
};
class RuntimeCallStats final {
public:
V8_EXPORT_PRIVATE RuntimeCallStats();
enum ThreadType { kMainIsolateThread, kWorkerThread };
explicit V8_EXPORT_PRIVATE RuntimeCallStats(ThreadType thread_type);
// Starting measuring the time for a function. This will establish the
// connection to the parent counter for properly calculating the own times.
@ -1098,6 +1098,32 @@ class RuntimeCallStats final {
bool InUse() { return in_use_; }
bool IsCalledOnTheSameThread();
V8_EXPORT_PRIVATE bool IsBackgroundThreadSpecificVariant(
RuntimeCallCounterId id);
V8_EXPORT_PRIVATE bool HasThreadSpecificCounterVariants(
RuntimeCallCounterId id);
// This should only be called for counters with a dual Background variant. If
// on the main thread, this just returns the counter. If on a worker thread,
// it returns Background variant of the counter.
RuntimeCallCounterId CounterIdForThread(RuntimeCallCounterId id) {
DCHECK(HasThreadSpecificCounterVariants(id));
// All thread specific counters are laid out with the main thread variant
// first followed by the background variant.
return thread_type_ == kWorkerThread
? static_cast<RuntimeCallCounterId>(id + 1)
: id;
}
bool IsCounterAppropriateForThread(RuntimeCallCounterId id) {
// TODO(delphick): We should add background-only counters and ensure that
// all counters (not just the thread-specific variants) are only invoked on
// the correct thread.
if (!HasThreadSpecificCounterVariants(id)) return true;
return IsBackgroundThreadSpecificVariant(id) ==
(thread_type_ == kWorkerThread);
}
static const int kNumberOfCounters =
static_cast<int>(RuntimeCallCounterId::kNumberOfCounters);
RuntimeCallCounter* GetCounter(RuntimeCallCounterId counter_id) {
@ -1114,6 +1140,7 @@ class RuntimeCallStats final {
base::AtomicValue<RuntimeCallCounter*> current_counter_;
// Used to track nested tracing scopes.
bool in_use_;
ThreadType thread_type_;
ThreadId thread_id_;
RuntimeCallCounter counters_[kNumberOfCounters];
};
@ -1171,14 +1198,26 @@ class WorkerThreadRuntimeCallStatsScope final {
// the time of C++ scope.
class RuntimeCallTimerScope {
public:
// If kExact is chosen the counter will be use as given. With kThreadSpecific,
// if the RuntimeCallStats was created for a worker thread, then the
// background specific version of the counter will be used instead.
enum CounterMode { kExact, kThreadSpecific };
inline RuntimeCallTimerScope(Isolate* isolate,
RuntimeCallCounterId counter_id);
inline RuntimeCallTimerScope(RuntimeCallStats* stats,
RuntimeCallCounterId counter_id) {
RuntimeCallCounterId counter_id,
CounterMode mode = CounterMode::kExact) {
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled() ||
stats == nullptr))
stats == nullptr)) {
return;
}
stats_ = stats;
if (mode == CounterMode::kThreadSpecific) {
counter_id = stats->CounterIdForThread(counter_id);
}
DCHECK(stats->IsCounterAppropriateForThread(counter_id));
stats_->Enter(&timer_, counter_id);
}