[logging] Add runtime-call-stats compile-time flag

Make runtime-call-stats a compile-time flag. Disabling RCS saves roughly
1MB binary size on 64bit systems and yields minor performance
improvements.

Bug: v8:11299
Change-Id: Ia1db75e330a665db5251b685c164b96857e38d2d
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2799766
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: Michael Achenbach <machenbach@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Cr-Commit-Position: refs/heads/master@{#73910}
This commit is contained in:
Camillo Bruni 2021-04-12 16:38:06 +02:00 committed by Commit Bot
parent fd29e246f6
commit e101c057af
61 changed files with 2065 additions and 1971 deletions

View File

@ -1038,6 +1038,10 @@ config("toolchain") {
defines += [ "ENABLE_VERIFY_CSA" ]
}
if (v8_enable_runtime_call_stats) {
defines += [ "V8_RUNTIME_CALL_STATS" ]
}
if (!v8_untrusted_code_mitigations) {
defines += [ "DISABLE_UNTRUSTED_CODE_MITIGATIONS" ]
}
@ -1896,6 +1900,7 @@ action("v8_dump_build_config") {
"v8_enable_verify_predictable=$v8_enable_verify_predictable",
"v8_enable_verify_csa=$v8_enable_verify_csa",
"v8_enable_lite_mode=$v8_enable_lite_mode",
"v8_enable_runtime_call_stats=$v8_enable_runtime_call_stats",
"v8_enable_pointer_compression=$v8_enable_pointer_compression",
"v8_enable_webassembly=$v8_enable_webassembly",
"v8_control_flow_integrity=$v8_control_flow_integrity",
@ -2679,13 +2684,13 @@ v8_header_set("v8_internal_headers") {
"src/libsampler/sampler.h",
"src/logging/code-events.h",
"src/logging/counters-definitions.h",
"src/logging/counters-inl.h",
"src/logging/counters.h",
"src/logging/local-logger.h",
"src/logging/log-inl.h",
"src/logging/log-utils.h",
"src/logging/log.h",
"src/logging/metrics.h",
"src/logging/runtime-call-stats.h",
"src/logging/tracing-flags.h",
"src/numbers/bignum-dtoa.h",
"src/numbers/bignum.h",
@ -3763,6 +3768,7 @@ v8_source_set("v8_base_without_compiler") {
"src/logging/log-utils.cc",
"src/logging/log.cc",
"src/logging/metrics.cc",
"src/logging/runtime-call-stats.cc",
"src/logging/tracing-flags.cc",
"src/numbers/bignum-dtoa.cc",
"src/numbers/bignum.cc",

View File

@ -75,6 +75,9 @@ declare_args() {
# executed as standard JavaScript instead.
v8_enable_webassembly = ""
# Enable runtime call stats.
v8_enable_runtime_call_stats = true
# Add fuzzilli fuzzer support.
v8_fuzzilli = false

View File

@ -96,8 +96,7 @@ inline JSReceiver FunctionCallbackArguments::holder() {
Handle<InterceptorInfo> interceptor, Handle<Name> name) { \
DCHECK_NAME_COMPATIBLE(interceptor, name); \
Isolate* isolate = this->isolate(); \
RuntimeCallTimerScope timer( \
isolate, RuntimeCallCounterId::kNamed##FUNCTION##Callback); \
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamed##FUNCTION##Callback); \
Handle<Object> receiver_check_unsupported; \
GenericNamedProperty##FUNCTION##Callback f = \
ToCData<GenericNamedProperty##FUNCTION##Callback>( \
@ -120,8 +119,7 @@ FOR_EACH_CALLBACK(CREATE_NAMED_CALLBACK)
Handle<InterceptorInfo> interceptor, uint32_t index) { \
DCHECK(!interceptor->is_named()); \
Isolate* isolate = this->isolate(); \
RuntimeCallTimerScope timer( \
isolate, RuntimeCallCounterId::kIndexed##FUNCTION##Callback); \
RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexed##FUNCTION##Callback); \
Handle<Object> receiver_check_unsupported; \
IndexedProperty##FUNCTION##Callback f = \
ToCData<IndexedProperty##FUNCTION##Callback>(interceptor->TYPE()); \
@ -142,7 +140,7 @@ FOR_EACH_CALLBACK(CREATE_INDEXED_CALLBACK)
Handle<Object> FunctionCallbackArguments::Call(CallHandlerInfo handler) {
Isolate* isolate = this->isolate();
LOG(isolate, ApiObjectAccess("call", holder()));
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kFunctionCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionCallback);
v8::FunctionCallback f =
v8::ToCData<v8::FunctionCallback>(handler.callback());
Handle<Object> receiver_check_unsupported;
@ -163,8 +161,7 @@ Handle<JSObject> PropertyCallbackArguments::CallNamedEnumerator(
Handle<InterceptorInfo> interceptor) {
DCHECK(interceptor->is_named());
LOG(isolate(), ApiObjectAccess("interceptor-named-enumerator", holder()));
RuntimeCallTimerScope timer(isolate(),
RuntimeCallCounterId::kNamedEnumeratorCallback);
RCS_SCOPE(isolate(), RuntimeCallCounterId::kNamedEnumeratorCallback);
return CallPropertyEnumerator(interceptor);
}
@ -172,8 +169,7 @@ Handle<JSObject> PropertyCallbackArguments::CallIndexedEnumerator(
Handle<InterceptorInfo> interceptor) {
DCHECK(!interceptor->is_named());
LOG(isolate(), ApiObjectAccess("interceptor-indexed-enumerator", holder()));
RuntimeCallTimerScope timer(isolate(),
RuntimeCallCounterId::kIndexedEnumeratorCallback);
RCS_SCOPE(isolate(), RuntimeCallCounterId::kIndexedEnumeratorCallback);
return CallPropertyEnumerator(interceptor);
}
@ -181,8 +177,7 @@ Handle<Object> PropertyCallbackArguments::CallNamedGetter(
Handle<InterceptorInfo> interceptor, Handle<Name> name) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kNamedGetterCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedGetterCallback);
LOG(isolate,
ApiNamedPropertyAccess("interceptor-named-getter", holder(), *name));
GenericNamedPropertyGetterCallback f =
@ -194,8 +189,7 @@ Handle<Object> PropertyCallbackArguments::CallNamedDescriptor(
Handle<InterceptorInfo> interceptor, Handle<Name> name) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kNamedDescriptorCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedDescriptorCallback);
LOG(isolate,
ApiNamedPropertyAccess("interceptor-named-descriptor", holder(), *name));
GenericNamedPropertyDescriptorCallback f =
@ -222,8 +216,7 @@ Handle<Object> PropertyCallbackArguments::CallNamedSetter(
GenericNamedPropertySetterCallback f =
ToCData<GenericNamedPropertySetterCallback>(interceptor->setter());
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kNamedSetterCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedSetterCallback);
PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(isolate, f, Handle<Object>,
v8::Value);
LOG(isolate,
@ -237,8 +230,7 @@ Handle<Object> PropertyCallbackArguments::CallNamedDefiner(
const v8::PropertyDescriptor& desc) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kNamedDefinerCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedDefinerCallback);
GenericNamedPropertyDefinerCallback f =
ToCData<GenericNamedPropertyDefinerCallback>(interceptor->definer());
PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(isolate, f, Handle<Object>,
@ -253,8 +245,7 @@ Handle<Object> PropertyCallbackArguments::CallIndexedSetter(
Handle<InterceptorInfo> interceptor, uint32_t index, Handle<Object> value) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kIndexedSetterCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedSetterCallback);
IndexedPropertySetterCallback f =
ToCData<IndexedPropertySetterCallback>(interceptor->setter());
PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(isolate, f, Handle<Object>,
@ -270,8 +261,7 @@ Handle<Object> PropertyCallbackArguments::CallIndexedDefiner(
const v8::PropertyDescriptor& desc) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kIndexedDefinerCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedDefinerCallback);
IndexedPropertyDefinerCallback f =
ToCData<IndexedPropertyDefinerCallback>(interceptor->definer());
PREPARE_CALLBACK_INFO_FAIL_SIDE_EFFECT_CHECK(isolate, f, Handle<Object>,
@ -286,8 +276,7 @@ Handle<Object> PropertyCallbackArguments::CallIndexedGetter(
Handle<InterceptorInfo> interceptor, uint32_t index) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kNamedGetterCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedGetterCallback);
LOG(isolate,
ApiIndexedPropertyAccess("interceptor-indexed-getter", holder(), index));
IndexedPropertyGetterCallback f =
@ -299,8 +288,7 @@ Handle<Object> PropertyCallbackArguments::CallIndexedDescriptor(
Handle<InterceptorInfo> interceptor, uint32_t index) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kIndexedDescriptorCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedDescriptorCallback);
LOG(isolate, ApiIndexedPropertyAccess("interceptor-indexed-descriptor",
holder(), index));
IndexedPropertyDescriptorCallback f =
@ -338,8 +326,7 @@ Handle<JSObject> PropertyCallbackArguments::CallPropertyEnumerator(
Handle<Object> PropertyCallbackArguments::CallAccessorGetter(
Handle<AccessorInfo> info, Handle<Name> name) {
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kAccessorGetterCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorGetterCallback);
LOG(isolate, ApiNamedPropertyAccess("accessor-getter", holder(), *name));
AccessorNameGetterCallback f =
ToCData<AccessorNameGetterCallback>(info->getter());
@ -351,8 +338,7 @@ Handle<Object> PropertyCallbackArguments::CallAccessorSetter(
Handle<AccessorInfo> accessor_info, Handle<Name> name,
Handle<Object> value) {
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kAccessorSetterCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorSetterCallback);
AccessorNameSetterCallback f =
ToCData<AccessorNameSetterCallback>(accessor_info->setter());
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, void, accessor_info,

View File

@ -30,9 +30,9 @@
* TODO(jochen): Remove calls form API methods to DO_NOT_USE macros.
*/
#define LOG_API(isolate, class_name, function_name) \
i::RuntimeCallTimerScope _runtime_timer( \
isolate, i::RuntimeCallCounterId::kAPI_##class_name##_##function_name); \
#define LOG_API(isolate, class_name, function_name) \
RCS_SCOPE(isolate, \
i::RuntimeCallCounterId::kAPI_##class_name##_##function_name); \
LOG(isolate, ApiEntryCall("v8::" #class_name "::" #function_name))
#define ENTER_V8_DO_NOT_USE(isolate) i::VMState<v8::OTHER> __state__((isolate))

View File

@ -184,8 +184,7 @@ Object GetIntrinsic(Isolate* isolate, v8::Intrinsic intrinsic) {
template <typename TemplateInfoT>
MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
Handle<TemplateInfoT> data) {
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kConfigureInstance);
RCS_SCOPE(isolate, RuntimeCallCounterId::kConfigureInstance);
HandleScope scope(isolate);
// Disable access checks while instantiating the object.
AccessCheckDisableScope access_check_scope(isolate, obj);
@ -371,8 +370,7 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
Handle<ObjectTemplateInfo> info,
Handle<JSReceiver> new_target,
bool is_prototype) {
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kInstantiateObject);
RCS_SCOPE(isolate, RuntimeCallCounterId::kInstantiateObject);
Handle<JSFunction> constructor;
int serial_number = info->serial_number();
if (!new_target.is_null()) {
@ -465,8 +463,7 @@ MaybeHandle<Object> GetInstancePrototype(Isolate* isolate,
MaybeHandle<JSFunction> InstantiateFunction(
Isolate* isolate, Handle<NativeContext> native_context,
Handle<FunctionTemplateInfo> data, MaybeHandle<Name> maybe_name) {
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kInstantiateFunction);
RCS_SCOPE(isolate, RuntimeCallCounterId::kInstantiateFunction);
int serial_number = data->serial_number();
if (serial_number) {
Handle<JSObject> result;
@ -653,8 +650,7 @@ Handle<JSFunction> ApiNatives::CreateApiFunction(
Isolate* isolate, Handle<NativeContext> native_context,
Handle<FunctionTemplateInfo> obj, Handle<Object> prototype,
InstanceType type, MaybeHandle<Name> maybe_name) {
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kCreateApiFunction);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCreateApiFunction);
Handle<SharedFunctionInfo> shared =
FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(isolate, obj,
maybe_name);

View File

@ -10044,8 +10044,7 @@ void InvokeAccessorGetterCallback(
v8::AccessorNameGetterCallback getter) {
// Leaving JavaScript.
Isolate* isolate = reinterpret_cast<Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kAccessorGetterCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorGetterCallback);
Address getter_address = reinterpret_cast<Address>(getter);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, getter_address);
@ -10055,7 +10054,7 @@ void InvokeAccessorGetterCallback(
void InvokeFunctionCallback(const v8::FunctionCallbackInfo<v8::Value>& info,
v8::FunctionCallback callback) {
Isolate* isolate = reinterpret_cast<Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kFunctionCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionCallback);
Address callback_address = reinterpret_cast<Address>(callback);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, callback_address);
@ -10067,8 +10066,8 @@ void InvokeFinalizationRegistryCleanupFromTask(
Handle<JSFinalizationRegistry> finalization_registry,
Handle<Object> callback) {
Isolate* isolate = finalization_registry->native_context().GetIsolate();
RuntimeCallTimerScope timer(
isolate, RuntimeCallCounterId::kFinalizationRegistryCleanupFromTask);
RCS_SCOPE(isolate,
RuntimeCallCounterId::kFinalizationRegistryCleanupFromTask);
// Do not use ENTER_V8 because this is always called from a running
// FinalizationRegistryCleanupTask within V8 and we should not log it as an
// API call. This method is implemented here to avoid duplication of the

View File

@ -623,9 +623,9 @@ void DeclarationScope::HoistSloppyBlockFunctions(AstNodeFactory* factory) {
}
bool DeclarationScope::Analyze(ParseInfo* info) {
RuntimeCallTimerScope runtimeTimer(
info->runtime_call_stats(), RuntimeCallCounterId::kCompileScopeAnalysis,
RuntimeCallStats::kThreadSpecific);
RCS_SCOPE(info->runtime_call_stats(),
RuntimeCallCounterId::kCompileScopeAnalysis,
RuntimeCallStats::kThreadSpecific);
DCHECK_NOT_NULL(info->literal());
DeclarationScope* scope = info->literal()->scope();

View File

@ -246,8 +246,7 @@ BaselineCompiler::BaselineCompiler(
void BaselineCompiler::GenerateCode() {
{
RuntimeCallTimerScope runtimeTimer(
stats_, RuntimeCallCounterId::kCompileBaselinePreVisit);
RCS_SCOPE(stats_, RuntimeCallCounterId::kCompileBaselinePreVisit);
for (; !iterator_.done(); iterator_.Advance()) {
PreVisitSingleBytecode();
}
@ -259,8 +258,7 @@ void BaselineCompiler::GenerateCode() {
__ CodeEntry();
{
RuntimeCallTimerScope runtimeTimer(
stats_, RuntimeCallCounterId::kCompileBaselineVisit);
RCS_SCOPE(stats_, RuntimeCallCounterId::kCompileBaselineVisit);
Prologue();
AddPosition();
for (; !iterator_.done(); iterator_.Advance()) {

View File

@ -23,8 +23,7 @@ namespace internal {
MaybeHandle<Code> GenerateBaselineCode(Isolate* isolate,
Handle<SharedFunctionInfo> shared) {
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kCompileBaseline);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCompileBaseline);
baseline::BaselineCompiler compiler(
isolate, shared, handle(shared->GetBytecodeArray(isolate), isolate));

View File

@ -113,8 +113,7 @@ void Accessors::ReconfigureToDataProperty(
v8::Local<v8::Name> key, v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<v8::Boolean>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope stats_scope(
isolate, RuntimeCallCounterId::kReconfigureToDataProperty);
RCS_SCOPE(isolate, RuntimeCallCounterId::kReconfigureToDataProperty);
HandleScope scope(isolate);
Handle<Object> receiver = Utils::OpenHandle(*info.This());
Handle<JSObject> holder =
@ -155,8 +154,7 @@ Handle<AccessorInfo> Accessors::MakeArgumentsIteratorInfo(Isolate* isolate) {
void Accessors::ArrayLengthGetter(
v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kArrayLengthGetter);
RCS_SCOPE(isolate, RuntimeCallCounterId::kArrayLengthGetter);
DisallowGarbageCollection no_gc;
HandleScope scope(isolate);
JSArray holder = JSArray::cast(*Utils::OpenHandle(*info.Holder()));
@ -168,8 +166,7 @@ void Accessors::ArrayLengthSetter(
v8::Local<v8::Name> name, v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<v8::Boolean>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kArrayLengthSetter);
RCS_SCOPE(isolate, RuntimeCallCounterId::kArrayLengthSetter);
HandleScope scope(isolate);
DCHECK(Utils::OpenHandle(*name)->SameValue(
@ -282,8 +279,7 @@ Handle<AccessorInfo> Accessors::MakeModuleNamespaceEntryInfo(
void Accessors::StringLengthGetter(
v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kStringLengthGetter);
RCS_SCOPE(isolate, RuntimeCallCounterId::kStringLengthGetter);
DisallowGarbageCollection no_gc;
HandleScope scope(isolate);
@ -330,8 +326,7 @@ static Handle<Object> GetFunctionPrototype(Isolate* isolate,
void Accessors::FunctionPrototypeGetter(
v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kFunctionPrototypeGetter);
RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionPrototypeGetter);
HandleScope scope(isolate);
Handle<JSFunction> function =
Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder()));
@ -344,8 +339,7 @@ void Accessors::FunctionPrototypeSetter(
v8::Local<v8::Name> name, v8::Local<v8::Value> val,
const v8::PropertyCallbackInfo<v8::Boolean>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kFunctionPrototypeSetter);
RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionPrototypeSetter);
HandleScope scope(isolate);
Handle<Object> value = Utils::OpenHandle(*val);
Handle<JSFunction> object =
@ -367,8 +361,7 @@ Handle<AccessorInfo> Accessors::MakeFunctionPrototypeInfo(Isolate* isolate) {
void Accessors::FunctionLengthGetter(
v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kFunctionLengthGetter);
RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionLengthGetter);
HandleScope scope(isolate);
Handle<JSFunction> function =
Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder()));
@ -722,8 +715,7 @@ Handle<AccessorInfo> Accessors::MakeFunctionCallerInfo(Isolate* isolate) {
void Accessors::BoundFunctionLengthGetter(
v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kBoundFunctionLengthGetter);
RCS_SCOPE(isolate, RuntimeCallCounterId::kBoundFunctionLengthGetter);
HandleScope scope(isolate);
Handle<JSBoundFunction> function =
Handle<JSBoundFunction>::cast(Utils::OpenHandle(*info.Holder()));
@ -749,8 +741,7 @@ Handle<AccessorInfo> Accessors::MakeBoundFunctionLengthInfo(Isolate* isolate) {
void Accessors::BoundFunctionNameGetter(
v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kBoundFunctionNameGetter);
RCS_SCOPE(isolate, RuntimeCallCounterId::kBoundFunctionNameGetter);
HandleScope scope(isolate);
Handle<JSBoundFunction> function =
Handle<JSBoundFunction>::cast(Utils::OpenHandle(*info.Holder()));

View File

@ -23,8 +23,7 @@ namespace {
// TODO(dcarney): CallOptimization duplicates this logic, merge.
JSReceiver GetCompatibleReceiver(Isolate* isolate, FunctionTemplateInfo info,
JSReceiver receiver) {
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kGetCompatibleReceiver);
RCS_SCOPE(isolate, RuntimeCallCounterId::kGetCompatibleReceiver);
Object recv_type = info.signature();
// No signature, return holder.
if (!recv_type.IsFunctionTemplateInfo()) return receiver;
@ -171,8 +170,7 @@ MaybeHandle<Object> Builtins::InvokeApiFunction(Isolate* isolate,
Handle<Object> receiver,
int argc, Handle<Object> args[],
Handle<HeapObject> new_target) {
RuntimeCallTimerScope timer(isolate,
RuntimeCallCounterId::kInvokeApiFunction);
RCS_SCOPE(isolate, RuntimeCallCounterId::kInvokeApiFunction);
DCHECK(function->IsFunctionTemplateInfo() ||
(function->IsJSFunction() &&
JSFunction::cast(*function).shared().IsApiFunction()));

View File

@ -9,6 +9,7 @@
#include "src/json/json-stringifier.h"
#include "src/logging/counters.h"
#include "src/objects/objects-inl.h"
#include "src/tracing/traced-value.h"
#if defined(V8_USE_PERFETTO)
#include "protos/perfetto/trace/track_event/debug_annotation.pbzero.h"

View File

@ -85,8 +85,7 @@ class BuiltinArguments : public JavaScriptArguments {
V8_NOINLINE static Address Builtin_Impl_Stats_##name( \
int args_length, Address* args_object, Isolate* isolate) { \
BuiltinArguments args(args_length, args_object); \
RuntimeCallTimerScope timer(isolate, \
RuntimeCallCounterId::kBuiltin_##name); \
RCS_SCOPE(isolate, RuntimeCallCounterId::kBuiltin_##name); \
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.runtime"), \
"V8.Builtin_" #name); \
return CONVERT_OBJECT(Builtin_Impl_##name(args, isolate)); \

View File

@ -945,11 +945,9 @@ bool FinalizeDeferredUnoptimizedCompilationJobs(
V8_WARN_UNUSED_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeCache(
Handle<JSFunction> function, BytecodeOffset osr_offset,
CodeKind code_kind) {
RuntimeCallTimerScope runtimeTimer(
function->GetIsolate(),
RuntimeCallCounterId::kCompileGetFromOptimizedCodeMap);
Handle<SharedFunctionInfo> shared(function->shared(), function->GetIsolate());
Isolate* isolate = function->GetIsolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kCompileGetFromOptimizedCodeMap);
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
DisallowGarbageCollection no_gc;
Code code;
if (osr_offset.IsNone() && function->has_feedback_vector()) {
@ -1056,8 +1054,7 @@ bool PrepareJobWithHandleScope(OptimizedCompilationJob* job, Isolate* isolate,
bool GetOptimizedCodeNow(OptimizedCompilationJob* job, Isolate* isolate,
OptimizedCompilationInfo* compilation_info) {
TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
RuntimeCallTimerScope runtimeTimer(
isolate, RuntimeCallCounterId::kOptimizeNonConcurrent);
RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeNonConcurrent);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.OptimizeNonConcurrent");
@ -1113,8 +1110,7 @@ bool GetOptimizedCodeLater(std::unique_ptr<OptimizedCompilationJob> job,
}
TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
RuntimeCallTimerScope runtimeTimer(
isolate, RuntimeCallCounterId::kOptimizeConcurrentPrepare);
RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeConcurrentPrepare);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.OptimizeConcurrentPrepare");
@ -1252,8 +1248,7 @@ MaybeHandle<Code> GetOptimizedCode(
VMState<COMPILER> state(isolate);
TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kOptimizeCode);
RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeCode);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.OptimizeCode");
DCHECK(!isolate->has_pending_exception());
@ -1441,10 +1436,9 @@ MaybeHandle<SharedFunctionInfo> CompileToplevel(
PostponeInterruptsScope postpone(isolate);
DCHECK(!isolate->native_context().is_null());
RuntimeCallTimerScope runtimeTimer(
isolate, parse_info->flags().is_eval()
? RuntimeCallCounterId::kCompileEval
: RuntimeCallCounterId::kCompileScript);
RCS_SCOPE(isolate, parse_info->flags().is_eval()
? RuntimeCallCounterId::kCompileEval
: RuntimeCallCounterId::kCompileScript);
VMState<BYTECODE_COMPILER> state(isolate);
if (parse_info->literal() == nullptr &&
!parsing::ParseProgram(parse_info, script, maybe_outer_scope_info,
@ -1495,6 +1489,7 @@ MaybeHandle<SharedFunctionInfo> CompileToplevel(
return shared_info;
}
#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallCounterId RuntimeCallCounterIdForCompileBackground(
ParseInfo* parse_info) {
if (parse_info->flags().is_toplevel()) {
@ -1505,6 +1500,7 @@ RuntimeCallCounterId RuntimeCallCounterIdForCompileBackground(
}
return RuntimeCallCounterId::kCompileBackgroundFunction;
}
#endif // V8_RUNTIME_CALL_STATS
MaybeHandle<SharedFunctionInfo> CompileAndFinalizeOnBackgroundThread(
ParseInfo* parse_info, AccountingAllocator* allocator,
@ -1515,9 +1511,8 @@ MaybeHandle<SharedFunctionInfo> CompileAndFinalizeOnBackgroundThread(
IsCompiledScope* is_compiled_scope) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompileCodeBackground");
RuntimeCallTimerScope runtimeTimer(
parse_info->runtime_call_stats(),
RuntimeCallCounterIdForCompileBackground(parse_info));
RCS_SCOPE(parse_info->runtime_call_stats(),
RuntimeCallCounterIdForCompileBackground(parse_info));
Handle<SharedFunctionInfo> shared_info =
CreateTopLevelSharedFunctionInfo(parse_info, script, isolate);
@ -1542,9 +1537,8 @@ void CompileOnBackgroundThread(ParseInfo* parse_info,
DisallowHeapAccess no_heap_access;
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompileCodeBackground");
RuntimeCallTimerScope runtimeTimer(
parse_info->runtime_call_stats(),
RuntimeCallCounterIdForCompileBackground(parse_info));
RCS_SCOPE(parse_info->runtime_call_stats(),
RuntimeCallCounterIdForCompileBackground(parse_info));
// Generate the unoptimized bytecode or asm-js data.
DCHECK(jobs->empty());
@ -1661,8 +1655,8 @@ class V8_NODISCARD OffThreadParseInfoScope {
ParseInfo* parse_info,
WorkerThreadRuntimeCallStats* worker_thread_runtime_stats, int stack_size)
: parse_info_(parse_info),
original_runtime_call_stats_(parse_info_->runtime_call_stats()),
original_stack_limit_(parse_info_->stack_limit()),
original_runtime_call_stats_(parse_info_->runtime_call_stats()),
worker_thread_scope_(worker_thread_runtime_stats) {
parse_info_->SetPerThreadState(GetCurrentStackPosition() - stack_size * KB,
worker_thread_scope_.Get());
@ -1679,8 +1673,8 @@ class V8_NODISCARD OffThreadParseInfoScope {
private:
ParseInfo* parse_info_;
RuntimeCallStats* original_runtime_call_stats_;
uintptr_t original_stack_limit_;
RuntimeCallStats* original_runtime_call_stats_;
WorkerThreadRuntimeCallStatsScope worker_thread_scope_;
};
@ -1693,9 +1687,8 @@ void BackgroundCompileTask::Run() {
stack_size_);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"BackgroundCompileTask::Run");
RuntimeCallTimerScope runtimeTimer(
info_->runtime_call_stats(),
RuntimeCallCounterId::kCompileBackgroundCompileTask);
RCS_SCOPE(info_->runtime_call_stats(),
RuntimeCallCounterId::kCompileBackgroundCompileTask);
// Update the character stream's runtime call stats.
info_->character_stream()->set_runtime_call_stats(
@ -1818,8 +1811,7 @@ bool Compiler::CollectSourcePositions(Isolate* isolate,
DCHECK(!isolate->has_pending_exception());
VMState<BYTECODE_COMPILER> state(isolate);
PostponeInterruptsScope postpone(isolate);
RuntimeCallTimerScope runtimeTimer(
isolate, RuntimeCallCounterId::kCompileCollectSourcePositions);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCompileCollectSourcePositions);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CollectSourcePositions");
HistogramTimerScope timer(isolate->counters()->collect_source_positions());
@ -1895,8 +1887,7 @@ bool Compiler::Compile(Isolate* isolate, Handle<SharedFunctionInfo> shared_info,
VMState<BYTECODE_COMPILER> state(isolate);
PostponeInterruptsScope postpone(isolate);
TimerEventScope<TimerEventCompileCode> compile_timer(isolate);
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kCompileFunction);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCompileFunction);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
@ -2059,8 +2050,8 @@ bool Compiler::FinalizeBackgroundCompileTask(
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.FinalizeBackgroundCompileTask");
RuntimeCallTimerScope runtimeTimer(
isolate, RuntimeCallCounterId::kCompileFinalizeBackgroundCompileTask);
RCS_SCOPE(isolate,
RuntimeCallCounterId::kCompileFinalizeBackgroundCompileTask);
HandleScope scope(isolate);
ParseInfo* parse_info = task->info();
DCHECK(!parse_info->flags().is_toplevel());
@ -2282,8 +2273,7 @@ bool CodeGenerationFromStringsAllowed(Isolate* isolate, Handle<Context> context,
// Callback set. Let it decide if code generation is allowed.
VMState<EXTERNAL> state(isolate);
RuntimeCallTimerScope timer(
isolate, RuntimeCallCounterId::kCodeGenerationFromStringsCallbacks);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCodeGenerationFromStringsCallbacks);
AllowCodeGenerationFromStringsCallback callback =
isolate->allow_code_gen_callback();
return callback(v8::Utils::ToLocal(context), v8::Utils::ToLocal(source));
@ -2302,8 +2292,7 @@ bool ModifyCodeGenerationFromStrings(Isolate* isolate, Handle<Context> context,
// Callback set. Run it, and use the return value as source, or block
// execution if it's not set.
VMState<EXTERNAL> state(isolate);
RuntimeCallTimerScope timer(
isolate, RuntimeCallCounterId::kCodeGenerationFromStringsCallbacks);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCodeGenerationFromStringsCallbacks);
ModifyCodeGenerationFromStringsResult result =
isolate->modify_code_gen_callback()
? isolate->modify_code_gen_callback()(v8::Utils::ToLocal(context),
@ -2882,8 +2871,7 @@ MaybeHandle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForScript(
compile_timer.set_consuming_code_cache();
// Then check cached code provided by embedder.
HistogramTimerScope timer(isolate->counters()->compile_deserialize());
RuntimeCallTimerScope runtimeTimer(
isolate, RuntimeCallCounterId::kCompileDeserialize);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCompileDeserialize);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompileDeserialize");
Handle<SharedFunctionInfo> inner_result;
@ -2971,8 +2959,7 @@ MaybeHandle<JSFunction> Compiler::GetWrappedFunction(
compile_timer.set_consuming_code_cache();
// Then check cached code provided by embedder.
HistogramTimerScope timer(isolate->counters()->compile_deserialize());
RuntimeCallTimerScope runtimeTimer(
isolate, RuntimeCallCounterId::kCompileDeserialize);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCompileDeserialize);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompileDeserialize");
maybe_result = CodeSerializer::Deserialize(isolate, cached_data, source,
@ -3075,8 +3062,8 @@ Compiler::GetSharedFunctionInfoForStreamedScript(
Handle<Script> script;
if (FLAG_finalize_streaming_on_background && !origin_options.IsModule()) {
RuntimeCallTimerScope runtimeTimerScope(
isolate, RuntimeCallCounterId::kCompilePublishBackgroundFinalization);
RCS_SCOPE(isolate,
RuntimeCallCounterId::kCompilePublishBackgroundFinalization);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.OffThreadFinalization.Publish");
@ -3233,8 +3220,7 @@ bool Compiler::FinalizeOptimizedCompilationJob(OptimizedCompilationJob* job,
OptimizedCompilationInfo* compilation_info = job->compilation_info();
TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
RuntimeCallTimerScope runtimeTimer(
isolate, RuntimeCallCounterId::kOptimizeConcurrentFinalize);
RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeConcurrentFinalize);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.OptimizeConcurrentFinalize");

View File

@ -59,8 +59,7 @@ base::Optional<CompilerDispatcher::JobId> CompilerDispatcher::Enqueue(
const FunctionLiteral* function_literal) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompilerDispatcherEnqueue");
RuntimeCallTimerScope runtimeTimer(
isolate_, RuntimeCallCounterId::kCompileEnqueueOnDispatcher);
RCS_SCOPE(isolate_, RuntimeCallCounterId::kCompileEnqueueOnDispatcher);
if (!IsEnabled()) return base::nullopt;
@ -129,8 +128,7 @@ void CompilerDispatcher::RegisterSharedFunctionInfo(
void CompilerDispatcher::WaitForJobIfRunningOnBackground(Job* job) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompilerDispatcherWaitForBackgroundJob");
RuntimeCallTimerScope runtimeTimer(
isolate_, RuntimeCallCounterId::kCompileWaitForDispatcher);
RCS_SCOPE(isolate_, RuntimeCallCounterId::kCompileWaitForDispatcher);
base::MutexGuard lock(&mutex_);
if (running_background_jobs_.find(job) == running_background_jobs_.end()) {
@ -149,8 +147,7 @@ void CompilerDispatcher::WaitForJobIfRunningOnBackground(Job* job) {
bool CompilerDispatcher::FinishNow(Handle<SharedFunctionInfo> function) {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompilerDispatcherFinishNow");
RuntimeCallTimerScope runtimeTimer(
isolate_, RuntimeCallCounterId::kCompileFinishNowOnDispatcher);
RCS_SCOPE(isolate_, RuntimeCallCounterId::kCompileFinishNowOnDispatcher);
if (trace_compiler_dispatcher_) {
PrintF("CompilerDispatcher: finishing ");
function->ShortPrint();

View File

@ -58,16 +58,19 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
private:
// v8::Task overrides.
void RunInternal() override {
#ifdef V8_RUNTIME_CALL_STATS
WorkerThreadRuntimeCallStatsScope runtime_call_stats_scope(
worker_thread_runtime_call_stats_);
LocalIsolate local_isolate(isolate_, ThreadKind::kBackground,
runtime_call_stats_scope.Get());
#else // V8_RUNTIME_CALL_STATS
LocalIsolate local_isolate(isolate_, ThreadKind::kBackground);
#endif // V8_RUNTIME_CALL_STATS
DCHECK(local_isolate.heap()->IsParked());
{
RuntimeCallTimerScope runtimeTimer(
runtime_call_stats_scope.Get(),
RuntimeCallCounterId::kOptimizeBackgroundDispatcherJob);
RCS_SCOPE(runtime_call_stats_scope.Get(),
RuntimeCallCounterId::kOptimizeBackgroundDispatcherJob);
TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
@ -79,7 +82,7 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask {
}
dispatcher_->CompileNext(dispatcher_->NextInput(&local_isolate),
runtime_call_stats_scope.Get(), &local_isolate);
&local_isolate);
}
{
base::MutexGuard lock_guard(&dispatcher_->ref_count_mutex_);
@ -117,12 +120,12 @@ OptimizedCompilationJob* OptimizingCompileDispatcher::NextInput(
}
void OptimizingCompileDispatcher::CompileNext(OptimizedCompilationJob* job,
RuntimeCallStats* stats,
LocalIsolate* local_isolate) {
if (!job) return;
// The function may have already been optimized by OSR. Simply continue.
CompilationJob::Status status = job->ExecuteJob(stats, local_isolate);
CompilationJob::Status status =
job->ExecuteJob(local_isolate->runtime_call_stats(), local_isolate);
USE(status); // Prevent an unused-variable error.
{

View File

@ -61,8 +61,7 @@ class V8_EXPORT_PRIVATE OptimizingCompileDispatcher {
bool restore_function_code);
void FlushInputQueue();
void FlushOutputQueue(bool restore_function_code);
void CompileNext(OptimizedCompilationJob* job, RuntimeCallStats* stats,
LocalIsolate* local_isolate);
void CompileNext(OptimizedCompilationJob* job, LocalIsolate* local_isolate);
OptimizedCompilationJob* NextInput(LocalIsolate* local_isolate);
inline int InputQueueIndex(int i) {

View File

@ -783,6 +783,7 @@ class NodeOriginsWrapper final : public Reducer {
class V8_NODISCARD PipelineRunScope {
public:
#ifdef V8_RUNTIME_CALL_STATS
PipelineRunScope(
PipelineData* data, const char* phase_name,
RuntimeCallCounterId runtime_call_counter_id,
@ -794,6 +795,14 @@ class V8_NODISCARD PipelineRunScope {
runtime_call_counter_id, counter_mode) {
DCHECK_NOT_NULL(phase_name);
}
#else // V8_RUNTIME_CALL_STATS
PipelineRunScope(PipelineData* data, const char* phase_name)
: phase_scope_(data->pipeline_statistics(), phase_name),
zone_scope_(data->zone_stats(), phase_name),
origin_scope_(data->node_origins(), phase_name) {
DCHECK_NOT_NULL(phase_name);
}
#endif // V8_RUNTIME_CALL_STATS
Zone* zone() { return zone_scope_.zone(); }
@ -801,7 +810,9 @@ class V8_NODISCARD PipelineRunScope {
PhaseScope phase_scope_;
ZoneStats::Scope zone_scope_;
NodeOriginTable::PhaseScope origin_scope_;
#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallTimerScope runtime_call_timer_scope;
#endif // V8_RUNTIME_CALL_STATS
};
// LocalIsolateScope encapsulates the phase where persistent handles are
@ -1262,8 +1273,7 @@ PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl(
// Ensure that the RuntimeCallStats table of main thread is available for
// phases happening during PrepareJob.
PipelineJobScope scope(&data_, isolate->counters()->runtime_call_stats());
RuntimeCallTimerScope runtimeTimer(
isolate, RuntimeCallCounterId::kOptimizeFinalizePipelineJob);
RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeFinalizePipelineJob);
MaybeHandle<Code> maybe_code = pipeline_.FinalizeCode();
Handle<Code> code;
if (!maybe_code.ToHandle(&code)) {
@ -1309,17 +1319,26 @@ void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
template <typename Phase, typename... Args>
void PipelineImpl::Run(Args&&... args) {
#ifdef V8_RUNTIME_CALL_STATS
PipelineRunScope scope(this->data_, Phase::phase_name(),
Phase::kRuntimeCallCounterId, Phase::kCounterMode);
#else
PipelineRunScope scope(this->data_, Phase::phase_name());
#endif
Phase phase;
phase.Run(this->data_, scope.zone(), std::forward<Args>(args)...);
}
#ifdef V8_RUNTIME_CALL_STATS
#define DECL_PIPELINE_PHASE_CONSTANTS_HELPER(Name, Mode) \
static const char* phase_name() { return "V8.TF" #Name; } \
static constexpr RuntimeCallCounterId kRuntimeCallCounterId = \
RuntimeCallCounterId::kOptimize##Name; \
static constexpr RuntimeCallStats::CounterMode kCounterMode = Mode;
#else // V8_RUNTIME_CALL_STATS
#define DECL_PIPELINE_PHASE_CONSTANTS_HELPER(Name, Mode) \
static const char* phase_name() { return "V8.TF" #Name; }
#endif // V8_RUNTIME_CALL_STATS
#define DECL_PIPELINE_PHASE_CONSTANTS(Name) \
DECL_PIPELINE_PHASE_CONSTANTS_HELPER(Name, RuntimeCallStats::kThreadSpecific)
@ -2966,8 +2985,7 @@ MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub(
should_optimize_jumps ? &jump_opt : nullptr, options,
profile_data);
PipelineJobScope scope(&data, isolate->counters()->runtime_call_stats());
RuntimeCallTimerScope timer_scope(isolate,
RuntimeCallCounterId::kOptimizeCode);
RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeCode);
data.set_verify_graph(FLAG_verify_csa);
std::unique_ptr<PipelineStatistics> pipeline_statistics;
if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {

View File

@ -944,10 +944,12 @@ int64_t GetNextRandomInt64(v8::Isolate* v8_isolate) {
void EnumerateRuntimeCallCounters(v8::Isolate* v8_isolate,
RuntimeCallCounterCallback callback) {
#ifdef V8_RUNTIME_CALL_STATS
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
if (isolate->counters()) {
isolate->counters()->runtime_call_stats()->EnumerateCounters(callback);
}
#endif // V8_RUNTIME_CALL_STATS
}
int GetDebuggingId(v8::Local<v8::Function> function) {

View File

@ -380,8 +380,7 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(NativeContext native_context) {
}
void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kDeoptimizeCode);
RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
TRACE_EVENT0("v8", "V8.DeoptimizeCode");
TraceDeoptAll(isolate);
@ -399,8 +398,7 @@ void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
}
void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kDeoptimizeCode);
RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
TRACE_EVENT0("v8", "V8.DeoptimizeCode");
TraceDeoptMarked(isolate);
@ -427,8 +425,7 @@ void Deoptimizer::MarkAllCodeForContext(NativeContext native_context) {
void Deoptimizer::DeoptimizeFunction(JSFunction function, Code code) {
Isolate* isolate = function.GetIsolate();
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kDeoptimizeCode);
RCS_SCOPE(isolate, RuntimeCallCounterId::kDeoptimizeCode);
TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
TRACE_EVENT0("v8", "V8.DeoptimizeCode");
function.ResetIfBytecodeFlushed();

View File

@ -115,7 +115,7 @@ namespace internal {
}
void Object::ObjectVerify(Isolate* isolate) {
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kObjectVerify);
RCS_SCOPE(isolate, RuntimeCallCounterId::kObjectVerify);
if (IsSmi()) {
Smi::cast(*this).SmiVerify(isolate);
} else {

View File

@ -107,29 +107,40 @@ double ClobberDoubleRegisters(double x1, double x2, double x3, double x4);
// TODO(cbruni): add global flag to check whether any tracing events have been
// enabled.
#define RUNTIME_FUNCTION_RETURNS_TYPE(Type, InternalType, Convert, Name) \
static V8_INLINE InternalType __RT_impl_##Name(RuntimeArguments args, \
Isolate* isolate); \
\
#ifdef V8_RUNTIME_CALL_STATS
#define RUNTIME_ENTRY_WITH_RCS(Type, InternalType, Convert, Name) \
V8_NOINLINE static Type Stats_##Name(int args_length, Address* args_object, \
Isolate* isolate) { \
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::k##Name); \
RCS_SCOPE(isolate, RuntimeCallCounterId::k##Name); \
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.runtime"), \
"V8.Runtime_" #Name); \
RuntimeArguments args(args_length, args_object); \
return Convert(__RT_impl_##Name(args, isolate)); \
} \
\
Type Name(int args_length, Address* args_object, Isolate* isolate) { \
DCHECK(isolate->context().is_null() || isolate->context().IsContext()); \
CLOBBER_DOUBLE_REGISTERS(); \
if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled())) { \
return Stats_##Name(args_length, args_object, isolate); \
} \
RuntimeArguments args(args_length, args_object); \
return Convert(__RT_impl_##Name(args, isolate)); \
} \
\
}
#define TEST_AND_CALL_RCS(Name) \
if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled())) { \
return Stats_##Name(args_length, args_object, isolate); \
}
#else // V8_RUNTIME_CALL_STATS
#define RUNTIME_ENTRY_WITH_RCS(Type, InternalType, Convert, Name)
#define TEST_AND_CALL_RCS(Name)
#endif // V8_RUNTIME_CALL_STATS
#define RUNTIME_FUNCTION_RETURNS_TYPE(Type, InternalType, Convert, Name) \
static V8_INLINE InternalType __RT_impl_##Name(RuntimeArguments args, \
Isolate* isolate); \
RUNTIME_ENTRY_WITH_RCS(Type, InternalType, Convert, Name) \
Type Name(int args_length, Address* args_object, Isolate* isolate) { \
DCHECK(isolate->context().is_null() || isolate->context().IsContext()); \
CLOBBER_DOUBLE_REGISTERS(); \
TEST_AND_CALL_RCS(Name) \
RuntimeArguments args(args_length, args_object); \
return Convert(__RT_impl_##Name(args, isolate)); \
} \
\
static InternalType __RT_impl_##Name(RuntimeArguments args, Isolate* isolate)
#define CONVERT_OBJECT(x) (x).ptr()

View File

@ -244,7 +244,7 @@ MaybeHandle<Context> NewScriptContext(Isolate* isolate,
V8_WARN_UNUSED_RESULT MaybeHandle<Object> Invoke(Isolate* isolate,
const InvokeParams& params) {
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kInvoke);
RCS_SCOPE(isolate, RuntimeCallCounterId::kInvoke);
DCHECK(!params.receiver->IsJSGlobalObject());
DCHECK_LE(params.argc, FixedArray::kMaxLength);
@ -368,7 +368,7 @@ V8_WARN_UNUSED_RESULT MaybeHandle<Object> Invoke(Isolate* isolate,
Address func = params.target->ptr();
Address recv = params.receiver->ptr();
Address** argv = reinterpret_cast<Address**>(params.argv);
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kJS_Execution);
RCS_SCOPE(isolate, RuntimeCallCounterId::kJS_Execution);
value = Object(stub_entry.Call(isolate->isolate_data()->isolate_root(),
orig_func, func, recv, params.argc, argv));
} else {
@ -383,7 +383,7 @@ V8_WARN_UNUSED_RESULT MaybeHandle<Object> Invoke(Isolate* isolate,
JSEntryFunction stub_entry =
JSEntryFunction::FromAddress(isolate, code->InstructionStart());
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kJS_Execution);
RCS_SCOPE(isolate, RuntimeCallCounterId::kJS_Execution);
value = Object(stub_entry.Call(isolate->isolate_data()->isolate_root(),
params.microtask_queue));
}
@ -552,7 +552,7 @@ void Execution::CallWasm(Isolate* isolate, Handle<Code> wrapper_code,
trap_handler::SetThreadInWasm();
{
RuntimeCallTimerScope timer(isolate, RuntimeCallCounterId::kJS_Execution);
RCS_SCOPE(isolate, RuntimeCallCounterId::kJS_Execution);
STATIC_ASSERT(compiler::CWasmEntryParameters::kCodeEntry == 0);
STATIC_ASSERT(compiler::CWasmEntryParameters::kObjectRef == 1);
STATIC_ASSERT(compiler::CWasmEntryParameters::kArgumentsBuffer == 2);

View File

@ -1471,8 +1471,7 @@ void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
}
void Isolate::InvokeApiInterruptCallbacks() {
RuntimeCallTimerScope runtimeTimer(
this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
RCS_SCOPE(this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
// Note: callback below should be called outside of execution access lock.
while (true) {
InterruptEntry entry;
@ -3869,6 +3868,7 @@ void Isolate::DumpAndResetStats() {
wasm_engine()->DumpAndResetTurboStatistics();
}
#endif // V8_ENABLE_WEBASSEMBLY
#if V8_RUNTIME_CALL_STATS
if (V8_UNLIKELY(TracingFlags::runtime_stats.load(std::memory_order_relaxed) ==
v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) {
counters()->worker_thread_runtime_call_stats()->AddToMainTable(
@ -3876,6 +3876,7 @@ void Isolate::DumpAndResetStats() {
counters()->runtime_call_stats()->Print();
counters()->runtime_call_stats()->Reset();
}
#endif // V8_RUNTIME_CALL_STATS
if (BasicBlockProfiler::Get()->HasData(this)) {
StdoutStream out;
BasicBlockProfiler::Get()->Print(out, this);

View File

@ -186,8 +186,7 @@ void MessageHandler::ReportMessageNoExceptions(
FUNCTION_CAST<v8::MessageCallback>(callback_obj.foreign_address());
Handle<Object> callback_data(listener.get(1), isolate);
{
RuntimeCallTimerScope timer(
isolate, RuntimeCallCounterId::kMessageListenerCallback);
RCS_SCOPE(isolate, RuntimeCallCounterId::kMessageListenerCallback);
// Do not allow exceptions to propagate.
v8::TryCatch try_catch(reinterpret_cast<v8::Isolate*>(isolate));
callback(api_message_obj, callback_data->IsUndefined(isolate)

View File

@ -12,6 +12,7 @@
#include "src/logging/counters.h"
#include "src/objects/backing-store.h"
#include "src/roots/roots-inl.h"
#include "src/tracing/trace-event.h"
#include "src/utils/memcopy.h"
#if V8_ENABLE_WEBASSEMBLY

View File

@ -16,6 +16,8 @@
#include "src/base/platform/platform.h"
#include "src/codegen/cpu-features.h"
#include "src/logging/counters.h"
#include "src/logging/tracing-flags.h"
#include "src/tracing/tracing-category-observer.h"
#include "src/utils/allocation.h"
#include "src/utils/memcopy.h"
#include "src/utils/ostreams.h"

View File

@ -13,7 +13,9 @@
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/spaces.h"
#include "src/logging/counters-inl.h"
#include "src/logging/counters.h"
#include "src/logging/tracing-flags.h"
#include "src/tracing/tracing-category-observer.h"
namespace v8 {
namespace internal {
@ -28,6 +30,8 @@ static size_t CountTotalHolesSize(Heap* heap) {
}
return holes_size;
}
#ifdef V8_RUNTIME_CALL_STATS
WorkerThreadRuntimeCallStats* GCTracer::worker_thread_runtime_call_stats() {
return heap_->isolate()->counters()->worker_thread_runtime_call_stats();
}
@ -38,6 +42,7 @@ RuntimeCallCounterId GCTracer::RCSCounterFromScope(Scope::ScopeId id) {
static_cast<int>(RuntimeCallCounterId::kGC_MC_INCREMENTAL) +
static_cast<int>(id));
}
#endif // defined(V8_RUNTIME_CALL_STATS)
double GCTracer::MonotonicallyIncreasingTimeInMs() {
if (V8_UNLIKELY(FLAG_predictable)) {
@ -61,6 +66,7 @@ GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
start_time_ = tracer_->MonotonicallyIncreasingTimeInMs();
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
#ifdef V8_RUNTIME_CALL_STATS
if (thread_kind_ == ThreadKind::kMain) {
DCHECK_EQ(tracer_->heap_->isolate()->thread_id(), ThreadId::Current());
runtime_stats_ =
@ -72,6 +78,7 @@ GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope, ThreadKind thread_kind)
runtime_stats_ = runtime_call_stats_scope_->Get();
runtime_stats_->Enter(&timer_, GCTracer::RCSCounterFromScope(scope));
}
#endif // defined(V8_RUNTIME_CALL_STATS)
}
GCTracer::Scope::~Scope() {
@ -84,8 +91,10 @@ GCTracer::Scope::~Scope() {
tracer_->AddScopeSampleBackground(scope_, duration_ms);
}
#ifdef V8_RUNTIME_CALL_STATS
if (V8_LIKELY(runtime_stats_ == nullptr)) return;
runtime_stats_->Leave(&timer_);
#endif // defined(V8_RUNTIME_CALL_STATS)
}
const char* GCTracer::Scope::Name(ScopeId id) {

View File

@ -113,9 +113,11 @@ class V8_EXPORT_PRIVATE GCTracer {
ScopeId scope_;
ThreadKind thread_kind_;
double start_time_;
#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallTimer timer_;
RuntimeCallStats* runtime_stats_ = nullptr;
base::Optional<WorkerThreadRuntimeCallStatsScope> runtime_call_stats_scope_;
#endif // defined(V8_RUNTIME_CALL_STATS)
};
class Event {
@ -195,7 +197,9 @@ class V8_EXPORT_PRIVATE GCTracer {
static double CombineSpeedsInBytesPerMillisecond(double default_speed,
double optional_speed);
#ifdef V8_RUNTIME_CALL_STATS
static RuntimeCallCounterId RCSCounterFromScope(Scope::ScopeId id);
#endif // defined(V8_RUNTIME_CALL_STATS)
explicit GCTracer(Heap* heap);
@ -335,7 +339,9 @@ class V8_EXPORT_PRIVATE GCTracer {
double AverageTimeToIncrementalMarkingTask() const;
void RecordTimeToIncrementalMarkingTask(double time_to_task);
#ifdef V8_RUNTIME_CALL_STATS
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats();
#endif // defined(V8_RUNTIME_CALL_STATS)
CollectionEpoch CurrentEpoch(Scope::ScopeId id);

View File

@ -1403,8 +1403,7 @@ void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) {
if (gc_reason == GarbageCollectionReason::kLastResort) {
InvokeNearHeapLimitCallback();
}
RuntimeCallTimerScope runtime_timer(
isolate(), RuntimeCallCounterId::kGC_Custom_AllAvailableGarbage);
RCS_SCOPE(isolate(), RuntimeCallCounterId::kGC_Custom_AllAvailableGarbage);
// The optimizing compiler may be unnecessarily holding on to memory.
isolate()->AbortConcurrentOptimization(BlockingBehavior::kDontBlock);
@ -2214,8 +2213,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) {
}
void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
RuntimeCallTimerScope runtime_timer(
isolate(), RuntimeCallCounterId::kGCPrologueCallback);
RCS_SCOPE(isolate(), RuntimeCallCounterId::kGCPrologueCallback);
for (const GCCallbackTuple& info : gc_prologue_callbacks_) {
if (gc_type & info.gc_type) {
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
@ -2225,8 +2223,7 @@ void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) {
}
void Heap::CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags) {
RuntimeCallTimerScope runtime_timer(
isolate(), RuntimeCallCounterId::kGCEpilogueCallback);
RCS_SCOPE(isolate(), RuntimeCallCounterId::kGCEpilogueCallback);
for (const GCCallbackTuple& info : gc_epilogue_callbacks_) {
if (gc_type & info.gc_type) {
v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());

View File

@ -39,9 +39,8 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
size_t size) {
Heap* heap = incremental_marking_->heap();
VMState<GC> state(heap->isolate());
RuntimeCallTimerScope runtime_timer(
heap->isolate(),
RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver);
RCS_SCOPE(heap->isolate(),
RuntimeCallCounterId::kGC_Custom_IncrementalMarkingObserver);
incremental_marking_->AdvanceOnAllocation();
// AdvanceIncrementalMarkingOnAllocation can start incremental marking.
incremental_marking_->EnsureBlackAllocated(addr, size);

View File

@ -42,6 +42,7 @@
#include "src/heap/worklist.h"
#include "src/ic/stub-cache.h"
#include "src/init/v8.h"
#include "src/logging/tracing-flags.h"
#include "src/objects/embedder-data-array-inl.h"
#include "src/objects/foreign.h"
#include "src/objects/hash-table-inl.h"
@ -51,6 +52,7 @@
#include "src/objects/slots-inl.h"
#include "src/objects/transitions-inl.h"
#include "src/tasks/cancelable-task.h"
#include "src/tracing/tracing-category-observer.h"
#include "src/utils/utils-inl.h"
namespace v8 {

View File

@ -832,8 +832,8 @@ void PagedSpace::PrepareForMarkCompact() {
bool PagedSpace::RefillLabMain(int size_in_bytes, AllocationOrigin origin) {
VMState<GC> state(heap()->isolate());
RuntimeCallTimerScope runtime_timer(
heap()->isolate(), RuntimeCallCounterId::kGC_Custom_SlowAllocateRaw);
RCS_SCOPE(heap()->isolate(),
RuntimeCallCounterId::kGC_Custom_SlowAllocateRaw);
return RawRefillLabMain(size_in_bytes, origin);
}

View File

@ -5310,7 +5310,7 @@ Genesis::Genesis(
v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer,
v8::MicrotaskQueue* microtask_queue)
: isolate_(isolate), active_(isolate->bootstrapper()) {
RuntimeCallTimerScope rcs_timer(isolate, RuntimeCallCounterId::kGenesis);
RCS_SCOPE(isolate, RuntimeCallCounterId::kGenesis);
result_ = Handle<Context>::null();
global_proxy_ = Handle<JSGlobalProxy>::null();

View File

@ -18,7 +18,6 @@
#include "src/init/setup-isolate.h"
#include "src/interpreter/bytecode-generator.h"
#include "src/interpreter/bytecodes.h"
#include "src/logging/counters-inl.h"
#include "src/objects/objects-inl.h"
#include "src/objects/shared-function-info.h"
#include "src/objects/slots.h"
@ -177,10 +176,9 @@ InterpreterCompilationJob::InterpreterCompilationJob(
eager_inner_literals) {}
InterpreterCompilationJob::Status InterpreterCompilationJob::ExecuteJobImpl() {
RuntimeCallTimerScope runtimeTimerScope(
parse_info()->runtime_call_stats(),
RuntimeCallCounterId::kCompileIgnition,
RuntimeCallStats::kThreadSpecific);
RCS_SCOPE(parse_info()->runtime_call_stats(),
RuntimeCallCounterId::kCompileIgnition,
RuntimeCallStats::kThreadSpecific);
// TODO(lpy): add support for background compilation RCS trace.
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileIgnition");
@ -243,9 +241,8 @@ void InterpreterCompilationJob::CheckAndPrintBytecodeMismatch(
InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl(
Handle<SharedFunctionInfo> shared_info, Isolate* isolate) {
RuntimeCallTimerScope runtimeTimerScope(
parse_info()->runtime_call_stats(),
RuntimeCallCounterId::kCompileIgnitionFinalization);
RCS_SCOPE(parse_info()->runtime_call_stats(),
RuntimeCallCounterId::kCompileIgnitionFinalization);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompileIgnitionFinalization");
return DoFinalizeJobImpl(shared_info, isolate);
@ -253,9 +250,8 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl(
InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl(
Handle<SharedFunctionInfo> shared_info, LocalIsolate* isolate) {
RuntimeCallTimerScope runtimeTimerScope(
parse_info()->runtime_call_stats(),
RuntimeCallCounterId::kCompileBackgroundIgnitionFinalization);
RCS_SCOPE(parse_info()->runtime_call_stats(),
RuntimeCallCounterId::kCompileBackgroundIgnitionFinalization);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompileIgnitionFinalization");
return DoFinalizeJobImpl(shared_info, isolate);

View File

@ -1,64 +0,0 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_LOGGING_COUNTERS_INL_H_
#define V8_LOGGING_COUNTERS_INL_H_
#include "src/logging/counters.h"
#include "src/logging/tracing-flags.h"
namespace v8 {
namespace internal {
void RuntimeCallTimer::Start(RuntimeCallCounter* counter,
RuntimeCallTimer* parent) {
DCHECK(!IsStarted());
counter_ = counter;
parent_.SetValue(parent);
if (TracingFlags::runtime_stats.load(std::memory_order_relaxed) ==
v8::tracing::TracingCategoryObserver::ENABLED_BY_SAMPLING) {
return;
}
base::TimeTicks now = RuntimeCallTimer::Now();
if (parent) parent->Pause(now);
Resume(now);
DCHECK(IsStarted());
}
void RuntimeCallTimer::Pause(base::TimeTicks now) {
DCHECK(IsStarted());
elapsed_ += (now - start_ticks_);
start_ticks_ = base::TimeTicks();
}
void RuntimeCallTimer::Resume(base::TimeTicks now) {
DCHECK(!IsStarted());
start_ticks_ = now;
}
RuntimeCallTimer* RuntimeCallTimer::Stop() {
if (!IsStarted()) return parent();
base::TimeTicks now = RuntimeCallTimer::Now();
Pause(now);
counter_->Increment();
CommitTimeToCounter();
RuntimeCallTimer* parent_timer = parent();
if (parent_timer) {
parent_timer->Resume(now);
}
return parent_timer;
}
void RuntimeCallTimer::CommitTimeToCounter() {
counter_->Add(elapsed_);
elapsed_ = base::TimeDelta();
}
bool RuntimeCallTimer::IsStarted() { return start_ticks_ != base::TimeTicks(); }
} // namespace internal
} // namespace v8
#endif // V8_LOGGING_COUNTERS_INL_H_

View File

@ -4,16 +4,13 @@
#include "src/logging/counters.h"
#include <iomanip>
#include "src/base/platform/platform.h"
#include "src/base/atomic-utils.h"
#include "src/base/platform/elapsed-timer.h"
#include "src/base/platform/time.h"
#include "src/builtins/builtins-definitions.h"
#include "src/execution/isolate.h"
#include "src/logging/counters-inl.h"
#include "src/logging/log-inl.h"
#include "src/logging/log.h"
#include "src/utils/ostreams.h"
namespace v8 {
namespace internal {
@ -121,15 +118,16 @@ void TimedHistogram::RecordAbandon(base::ElapsedTimer* timer,
}
Counters::Counters(Isolate* isolate)
: isolate_(isolate),
stats_table_(this),
// clang format off
:
#define SC(name, caption) name##_(this, "c:" #caption),
STATS_COUNTER_TS_LIST(SC)
#undef SC
// clang format on
runtime_call_stats_(RuntimeCallStats::kMainIsolateThread),
worker_thread_runtime_call_stats_() {
#ifdef V8_RUNTIME_CALL_STATS
runtime_call_stats_(RuntimeCallStats::kMainIsolateThread),
worker_thread_runtime_call_stats_(),
#endif
isolate_(isolate),
stats_table_(this) {
static const struct {
Histogram Counters::*member;
const char* caption;
@ -319,355 +317,5 @@ void Counters::ResetCreateHistogramFunction(CreateHistogramCallback f) {
#undef HM
}
base::TimeTicks (*RuntimeCallTimer::Now)() =
&base::TimeTicks::HighResolutionNow;
base::TimeTicks RuntimeCallTimer::NowCPUTime() {
base::ThreadTicks ticks = base::ThreadTicks::Now();
return base::TimeTicks::FromInternalValue(ticks.ToInternalValue());
}
class RuntimeCallStatEntries {
public:
void Print(std::ostream& os) {
if (total_call_count == 0) return;
std::sort(entries.rbegin(), entries.rend());
os << std::setw(50) << "Runtime Function/C++ Builtin" << std::setw(12)
<< "Time" << std::setw(18) << "Count" << std::endl
<< std::string(88, '=') << std::endl;
for (Entry& entry : entries) {
entry.SetTotal(total_time, total_call_count);
entry.Print(os);
}
os << std::string(88, '-') << std::endl;
Entry("Total", total_time, total_call_count).Print(os);
}
// By default, the compiler will usually inline this, which results in a large
// binary size increase: std::vector::push_back expands to a large amount of
// instructions, and this function is invoked repeatedly by macros.
V8_NOINLINE void Add(RuntimeCallCounter* counter) {
if (counter->count() == 0) return;
entries.push_back(
Entry(counter->name(), counter->time(), counter->count()));
total_time += counter->time();
total_call_count += counter->count();
}
private:
class Entry {
public:
Entry(const char* name, base::TimeDelta time, uint64_t count)
: name_(name),
time_(time.InMicroseconds()),
count_(count),
time_percent_(100),
count_percent_(100) {}
bool operator<(const Entry& other) const {
if (time_ < other.time_) return true;
if (time_ > other.time_) return false;
return count_ < other.count_;
}
V8_NOINLINE void Print(std::ostream& os) {
os.precision(2);
os << std::fixed << std::setprecision(2);
os << std::setw(50) << name_;
os << std::setw(10) << static_cast<double>(time_) / 1000 << "ms ";
os << std::setw(6) << time_percent_ << "%";
os << std::setw(10) << count_ << " ";
os << std::setw(6) << count_percent_ << "%";
os << std::endl;
}
V8_NOINLINE void SetTotal(base::TimeDelta total_time,
uint64_t total_count) {
if (total_time.InMicroseconds() == 0) {
time_percent_ = 0;
} else {
time_percent_ = 100.0 * time_ / total_time.InMicroseconds();
}
count_percent_ = 100.0 * count_ / total_count;
}
private:
const char* name_;
int64_t time_;
uint64_t count_;
double time_percent_;
double count_percent_;
};
uint64_t total_call_count = 0;
base::TimeDelta total_time;
std::vector<Entry> entries;
};
void RuntimeCallCounter::Reset() {
count_ = 0;
time_ = 0;
}
void RuntimeCallCounter::Dump(v8::tracing::TracedValue* value) {
value->BeginArray(name_);
value->AppendDouble(count_);
value->AppendDouble(time_);
value->EndArray();
}
void RuntimeCallCounter::Add(RuntimeCallCounter* other) {
count_ += other->count();
time_ += other->time().InMicroseconds();
}
void RuntimeCallTimer::Snapshot() {
base::TimeTicks now = Now();
// Pause only / topmost timer in the timer stack.
Pause(now);
// Commit all the timer's elapsed time to the counters.
RuntimeCallTimer* timer = this;
while (timer != nullptr) {
timer->CommitTimeToCounter();
timer = timer->parent();
}
Resume(now);
}
RuntimeCallStats::RuntimeCallStats(ThreadType thread_type)
: in_use_(false), thread_type_(thread_type) {
static const char* const kNames[] = {
#define CALL_BUILTIN_COUNTER(name) "GC_" #name,
FOR_EACH_GC_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_RUNTIME_COUNTER(name) #name,
FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_RUNTIME_COUNTER(name, nargs, ressize) #name,
FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_BUILTIN_COUNTER(name) #name,
BUILTIN_LIST_C(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) "API_" #name,
FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) #name,
FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define THREAD_SPECIFIC_COUNTER(name) #name,
FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) //
#undef THREAD_SPECIFIC_COUNTER
};
for (int i = 0; i < kNumberOfCounters; i++) {
this->counters_[i] = RuntimeCallCounter(kNames[i]);
}
if (FLAG_rcs_cpu_time) {
CHECK(base::ThreadTicks::IsSupported());
base::ThreadTicks::WaitUntilInitialized();
RuntimeCallTimer::Now = &RuntimeCallTimer::NowCPUTime;
}
}
namespace {
constexpr RuntimeCallCounterId FirstCounter(RuntimeCallCounterId first, ...) {
return first;
}
#define THREAD_SPECIFIC_COUNTER(name) k##name,
constexpr RuntimeCallCounterId kFirstThreadVariantCounter =
FirstCounter(FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) 0);
#undef THREAD_SPECIFIC_COUNTER
#define THREAD_SPECIFIC_COUNTER(name) +1
constexpr int kThreadVariantCounterCount =
0 FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER);
#undef THREAD_SPECIFIC_COUNTER
constexpr auto kLastThreadVariantCounter = static_cast<RuntimeCallCounterId>(
kFirstThreadVariantCounter + kThreadVariantCounterCount - 1);
} // namespace
bool RuntimeCallStats::HasThreadSpecificCounterVariants(
RuntimeCallCounterId id) {
// Check that it's in the range of the thread-specific variant counters and
// also that it's one of the background counters.
return id >= kFirstThreadVariantCounter && id <= kLastThreadVariantCounter;
}
bool RuntimeCallStats::IsBackgroundThreadSpecificVariant(
RuntimeCallCounterId id) {
return HasThreadSpecificCounterVariants(id) &&
(id - kFirstThreadVariantCounter) % 2 == 1;
}
void RuntimeCallStats::Enter(RuntimeCallTimer* timer,
RuntimeCallCounterId counter_id) {
DCHECK(IsCalledOnTheSameThread());
RuntimeCallCounter* counter = GetCounter(counter_id);
DCHECK_NOT_NULL(counter->name());
timer->Start(counter, current_timer());
current_timer_.SetValue(timer);
current_counter_.SetValue(counter);
}
void RuntimeCallStats::Leave(RuntimeCallTimer* timer) {
DCHECK(IsCalledOnTheSameThread());
RuntimeCallTimer* stack_top = current_timer();
if (stack_top == nullptr) return; // Missing timer is a result of Reset().
CHECK(stack_top == timer);
current_timer_.SetValue(timer->Stop());
RuntimeCallTimer* cur_timer = current_timer();
current_counter_.SetValue(cur_timer ? cur_timer->counter() : nullptr);
}
void RuntimeCallStats::Add(RuntimeCallStats* other) {
for (int i = 0; i < kNumberOfCounters; i++) {
GetCounter(i)->Add(other->GetCounter(i));
}
}
// static
void RuntimeCallStats::CorrectCurrentCounterId(RuntimeCallCounterId counter_id,
CounterMode mode) {
DCHECK(IsCalledOnTheSameThread());
if (mode == RuntimeCallStats::CounterMode::kThreadSpecific) {
counter_id = CounterIdForThread(counter_id);
}
DCHECK(IsCounterAppropriateForThread(counter_id));
RuntimeCallTimer* timer = current_timer();
if (timer == nullptr) return;
RuntimeCallCounter* counter = GetCounter(counter_id);
timer->set_counter(counter);
current_counter_.SetValue(counter);
}
bool RuntimeCallStats::IsCalledOnTheSameThread() {
if (thread_id_.IsValid()) return thread_id_ == ThreadId::Current();
thread_id_ = ThreadId::Current();
return true;
}
void RuntimeCallStats::Print() {
StdoutStream os;
Print(os);
}
void RuntimeCallStats::Print(std::ostream& os) {
RuntimeCallStatEntries entries;
if (current_timer_.Value() != nullptr) {
current_timer_.Value()->Snapshot();
}
for (int i = 0; i < kNumberOfCounters; i++) {
entries.Add(GetCounter(i));
}
entries.Print(os);
}
void RuntimeCallStats::EnumerateCounters(
debug::RuntimeCallCounterCallback callback) {
if (current_timer_.Value() != nullptr) {
current_timer_.Value()->Snapshot();
}
for (int i = 0; i < kNumberOfCounters; i++) {
RuntimeCallCounter* counter = GetCounter(i);
callback(counter->name(), counter->count(), counter->time());
}
}
void RuntimeCallStats::Reset() {
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
// In tracing, we only what to trace the time spent on top level trace events,
// if runtime counter stack is not empty, we should clear the whole runtime
// counter stack, and then reset counters so that we can dump counters into
// top level trace events accurately.
while (current_timer_.Value()) {
current_timer_.SetValue(current_timer_.Value()->Stop());
}
for (int i = 0; i < kNumberOfCounters; i++) {
GetCounter(i)->Reset();
}
in_use_ = true;
}
void RuntimeCallStats::Dump(v8::tracing::TracedValue* value) {
for (int i = 0; i < kNumberOfCounters; i++) {
if (GetCounter(i)->count() > 0) GetCounter(i)->Dump(value);
}
in_use_ = false;
}
WorkerThreadRuntimeCallStats::WorkerThreadRuntimeCallStats()
: isolate_thread_id_(ThreadId::Current()) {}
WorkerThreadRuntimeCallStats::~WorkerThreadRuntimeCallStats() {
if (tls_key_) base::Thread::DeleteThreadLocalKey(*tls_key_);
}
base::Thread::LocalStorageKey WorkerThreadRuntimeCallStats::GetKey() {
base::MutexGuard lock(&mutex_);
DCHECK(TracingFlags::is_runtime_stats_enabled());
if (!tls_key_) tls_key_ = base::Thread::CreateThreadLocalKey();
return *tls_key_;
}
RuntimeCallStats* WorkerThreadRuntimeCallStats::NewTable() {
DCHECK(TracingFlags::is_runtime_stats_enabled());
// Never create a new worker table on the isolate's main thread.
DCHECK_NE(ThreadId::Current(), isolate_thread_id_);
std::unique_ptr<RuntimeCallStats> new_table =
std::make_unique<RuntimeCallStats>(RuntimeCallStats::kWorkerThread);
RuntimeCallStats* result = new_table.get();
base::MutexGuard lock(&mutex_);
tables_.push_back(std::move(new_table));
return result;
}
void WorkerThreadRuntimeCallStats::AddToMainTable(
RuntimeCallStats* main_call_stats) {
base::MutexGuard lock(&mutex_);
for (auto& worker_stats : tables_) {
DCHECK_NE(main_call_stats, worker_stats.get());
main_call_stats->Add(worker_stats.get());
worker_stats->Reset();
}
}
WorkerThreadRuntimeCallStatsScope::WorkerThreadRuntimeCallStatsScope(
WorkerThreadRuntimeCallStats* worker_stats)
: table_(nullptr) {
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
table_ = reinterpret_cast<RuntimeCallStats*>(
base::Thread::GetThreadLocal(worker_stats->GetKey()));
if (table_ == nullptr) {
table_ = worker_stats->NewTable();
base::Thread::SetThreadLocal(worker_stats->GetKey(), table_);
}
if ((TracingFlags::runtime_stats.load(std::memory_order_relaxed) &
v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
table_->Reset();
}
}
WorkerThreadRuntimeCallStatsScope::~WorkerThreadRuntimeCallStatsScope() {
if (V8_LIKELY(table_ == nullptr)) return;
if ((TracingFlags::runtime_stats.load(std::memory_order_relaxed) &
v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
auto value = v8::tracing::TracedValue::Create();
table_->Dump(value.get());
TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.runtime_stats"),
"V8.RuntimeStats", TRACE_EVENT_SCOPE_THREAD,
"runtime-call-stats", std::move(value));
}
}
} // namespace internal
} // namespace v8

View File

@ -13,16 +13,10 @@
#include "src/base/platform/elapsed-timer.h"
#include "src/base/platform/time.h"
#include "src/common/globals.h"
#include "src/debug/debug-interface.h"
#include "src/execution/isolate.h"
#include "src/init/heap-symbols.h"
#include "src/logging/counters-definitions.h"
#include "src/logging/tracing-flags.h"
#include "src/logging/runtime-call-stats.h"
#include "src/objects/objects.h"
#include "src/runtime/runtime.h"
#include "src/tracing/trace-event.h"
#include "src/tracing/traced-value.h"
#include "src/tracing/tracing-category-observer.h"
#include "src/utils/allocation.h"
namespace v8 {
@ -648,668 +642,6 @@ double AggregatedMemoryHistogram<Histogram>::Aggregate(double current_ms,
value * ((current_ms - last_ms_) / interval_ms);
}
class RuntimeCallCounter final {
public:
RuntimeCallCounter() : RuntimeCallCounter(nullptr) {}
explicit RuntimeCallCounter(const char* name)
: name_(name), count_(0), time_(0) {}
V8_NOINLINE void Reset();
V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
void Add(RuntimeCallCounter* other);
const char* name() const { return name_; }
int64_t count() const { return count_; }
base::TimeDelta time() const {
return base::TimeDelta::FromMicroseconds(time_);
}
void Increment() { count_++; }
void Add(base::TimeDelta delta) { time_ += delta.InMicroseconds(); }
private:
friend class RuntimeCallStats;
const char* name_;
int64_t count_;
// Stored as int64_t so that its initialization can be deferred.
int64_t time_;
};
// RuntimeCallTimer is used to keep track of the stack of currently active
// timers used for properly measuring the own time of a RuntimeCallCounter.
class RuntimeCallTimer final {
public:
RuntimeCallCounter* counter() { return counter_; }
void set_counter(RuntimeCallCounter* counter) { counter_ = counter; }
RuntimeCallTimer* parent() const { return parent_.Value(); }
void set_parent(RuntimeCallTimer* timer) { parent_.SetValue(timer); }
const char* name() const { return counter_->name(); }
inline bool IsStarted();
inline void Start(RuntimeCallCounter* counter, RuntimeCallTimer* parent);
void Snapshot();
inline RuntimeCallTimer* Stop();
// Make the time source configurable for testing purposes.
V8_EXPORT_PRIVATE static base::TimeTicks (*Now)();
// Helper to switch over to CPU time.
static base::TimeTicks NowCPUTime();
private:
inline void Pause(base::TimeTicks now);
inline void Resume(base::TimeTicks now);
inline void CommitTimeToCounter();
RuntimeCallCounter* counter_ = nullptr;
base::AtomicValue<RuntimeCallTimer*> parent_;
base::TimeTicks start_ticks_;
base::TimeDelta elapsed_;
};
#define FOR_EACH_GC_COUNTER(V) \
TRACER_SCOPES(V) \
TRACER_BACKGROUND_SCOPES(V)
#define FOR_EACH_API_COUNTER(V) \
V(AccessorPair_New) \
V(ArrayBuffer_Cast) \
V(ArrayBuffer_Detach) \
V(ArrayBuffer_New) \
V(ArrayBuffer_NewBackingStore) \
V(ArrayBuffer_BackingStore_Reallocate) \
V(Array_CloneElementAt) \
V(Array_New) \
V(BigInt64Array_New) \
V(BigInt_NewFromWords) \
V(BigIntObject_BigIntValue) \
V(BigIntObject_New) \
V(BigUint64Array_New) \
V(BooleanObject_BooleanValue) \
V(BooleanObject_New) \
V(Context_New) \
V(Context_NewRemoteContext) \
V(DataView_New) \
V(Date_New) \
V(Date_NumberValue) \
V(Debug_Call) \
V(debug_GetPrivateMembers) \
V(Error_New) \
V(External_New) \
V(Float32Array_New) \
V(Float64Array_New) \
V(Function_Call) \
V(Function_New) \
V(Function_FunctionProtoToString) \
V(Function_NewInstance) \
V(FunctionTemplate_GetFunction) \
V(FunctionTemplate_New) \
V(FunctionTemplate_NewRemoteInstance) \
V(FunctionTemplate_NewWithCache) \
V(FunctionTemplate_NewWithFastHandler) \
V(Int16Array_New) \
V(Int32Array_New) \
V(Int8Array_New) \
V(Isolate_DateTimeConfigurationChangeNotification) \
V(Isolate_LocaleConfigurationChangeNotification) \
V(JSON_Parse) \
V(JSON_Stringify) \
V(Map_AsArray) \
V(Map_Clear) \
V(Map_Delete) \
V(Map_Get) \
V(Map_Has) \
V(Map_New) \
V(Map_Set) \
V(Message_GetEndColumn) \
V(Message_GetLineNumber) \
V(Message_GetSourceLine) \
V(Message_GetStartColumn) \
V(Module_Evaluate) \
V(Module_InstantiateModule) \
V(Module_SetSyntheticModuleExport) \
V(NumberObject_New) \
V(NumberObject_NumberValue) \
V(Object_CallAsConstructor) \
V(Object_CallAsFunction) \
V(Object_CreateDataProperty) \
V(Object_DefineOwnProperty) \
V(Object_DefineProperty) \
V(Object_Delete) \
V(Object_DeleteProperty) \
V(Object_ForceSet) \
V(Object_Get) \
V(Object_GetOwnPropertyDescriptor) \
V(Object_GetOwnPropertyNames) \
V(Object_GetPropertyAttributes) \
V(Object_GetPropertyNames) \
V(Object_GetRealNamedProperty) \
V(Object_GetRealNamedPropertyAttributes) \
V(Object_GetRealNamedPropertyAttributesInPrototypeChain) \
V(Object_GetRealNamedPropertyInPrototypeChain) \
V(Object_Has) \
V(Object_HasOwnProperty) \
V(Object_HasRealIndexedProperty) \
V(Object_HasRealNamedCallbackProperty) \
V(Object_HasRealNamedProperty) \
V(Object_IsCodeLike) \
V(Object_New) \
V(Object_ObjectProtoToString) \
V(Object_Set) \
V(Object_SetAccessor) \
V(Object_SetIntegrityLevel) \
V(Object_SetPrivate) \
V(Object_SetPrototype) \
V(ObjectTemplate_New) \
V(ObjectTemplate_NewInstance) \
V(Object_ToArrayIndex) \
V(Object_ToBigInt) \
V(Object_ToDetailString) \
V(Object_ToInt32) \
V(Object_ToInteger) \
V(Object_ToNumber) \
V(Object_ToObject) \
V(Object_ToString) \
V(Object_ToUint32) \
V(Persistent_New) \
V(Private_New) \
V(Promise_Catch) \
V(Promise_Chain) \
V(Promise_HasRejectHandler) \
V(Promise_Resolver_New) \
V(Promise_Resolver_Reject) \
V(Promise_Resolver_Resolve) \
V(Promise_Result) \
V(Promise_Status) \
V(Promise_Then) \
V(Proxy_New) \
V(RangeError_New) \
V(ReferenceError_New) \
V(RegExp_Exec) \
V(RegExp_New) \
V(ScriptCompiler_Compile) \
V(ScriptCompiler_CompileFunctionInContext) \
V(ScriptCompiler_CompileUnbound) \
V(Script_Run) \
V(Set_Add) \
V(Set_AsArray) \
V(Set_Clear) \
V(Set_Delete) \
V(Set_Has) \
V(Set_New) \
V(SharedArrayBuffer_New) \
V(SharedArrayBuffer_NewBackingStore) \
V(String_Concat) \
V(String_NewExternalOneByte) \
V(String_NewExternalTwoByte) \
V(String_NewFromOneByte) \
V(String_NewFromTwoByte) \
V(String_NewFromUtf8) \
V(String_NewFromUtf8Literal) \
V(StringObject_New) \
V(StringObject_StringValue) \
V(String_Write) \
V(String_WriteUtf8) \
V(Symbol_New) \
V(SymbolObject_New) \
V(SymbolObject_SymbolValue) \
V(SyntaxError_New) \
V(TracedGlobal_New) \
V(TryCatch_StackTrace) \
V(TypeError_New) \
V(Uint16Array_New) \
V(Uint32Array_New) \
V(Uint8Array_New) \
V(Uint8ClampedArray_New) \
V(UnboundScript_GetId) \
V(UnboundScript_GetLineNumber) \
V(UnboundScript_GetName) \
V(UnboundScript_GetSourceMappingURL) \
V(UnboundScript_GetSourceURL) \
V(ValueDeserializer_ReadHeader) \
V(ValueDeserializer_ReadValue) \
V(ValueSerializer_WriteValue) \
V(Value_Equals) \
V(Value_InstanceOf) \
V(Value_Int32Value) \
V(Value_IntegerValue) \
V(Value_NumberValue) \
V(Value_TypeOf) \
V(Value_Uint32Value) \
V(WasmCompileError_New) \
V(WasmLinkError_New) \
V(WasmRuntimeError_New) \
V(WeakMap_Get) \
V(WeakMap_New) \
V(WeakMap_Set)
#define ADD_THREAD_SPECIFIC_COUNTER(V, Prefix, Suffix) \
V(Prefix##Suffix) \
V(Prefix##Background##Suffix)
#define FOR_EACH_THREAD_SPECIFIC_COUNTER(V) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Analyse) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Eval) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Function) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Ignition) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, IgnitionFinalization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, RewriteReturnResult) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, ScopeAnalysis) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Script) \
\
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateFPRegisters) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateGeneralRegisters) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssembleCode) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssignSpillSlots) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildLiveRangeBundles) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildLiveRanges) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BytecodeGraphBuilder) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CommitAssignment) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ConnectRanges) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ControlFlowOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CSAEarlyOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CSAOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, DecideSpillingMode) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, DecompressionOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EarlyOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EarlyTrimming) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EffectLinearization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EscapeAnalysis) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, FinalizeCode) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, FrameElision) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, GenericLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Inlining) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, JumpThreading) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierPopulateReferenceMaps) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierRegisterAllocator) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierRegisterOutputDefinition) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierSpillSlotAllocator) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LateGraphTrimming) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LateOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoadElimination) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LocateSpillSlots) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoopExitElimination) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoopPeeling) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MachineOperatorOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MeetRegisterConstraints) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MemoryOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, OptimizeMoves) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, PopulatePointerMaps) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, PrintGraph) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ResolveControlFlow) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ResolvePhis) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, \
ScheduledEffectControlLinearization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ScheduledMachineLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Scheduling) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SelectInstructions) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SimplifiedLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, StoreStoreElimination) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TypeAssertions) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TypedLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Typer) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Untyper) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, VerifyGraph) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmBaseOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmInlining) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmLoopUnrolling) \
\
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, ArrowFunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, FunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, Program) \
ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, ArrowFunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, WithVariableResolution)
#define FOR_EACH_MANUAL_COUNTER(V) \
V(AccessorGetterCallback) \
V(AccessorSetterCallback) \
V(ArrayLengthGetter) \
V(ArrayLengthSetter) \
V(BoundFunctionLengthGetter) \
V(BoundFunctionNameGetter) \
V(CodeGenerationFromStringsCallbacks) \
V(CompileBackgroundCompileTask) \
V(CompileBaseline) \
V(CompileBaselineVisit) \
V(CompileBaselinePreVisit) \
V(CompileCollectSourcePositions) \
V(CompileDeserialize) \
V(CompileEnqueueOnDispatcher) \
V(CompileFinalizeBackgroundCompileTask) \
V(CompileFinishNowOnDispatcher) \
V(CompileGetFromOptimizedCodeMap) \
V(CompilePublishBackgroundFinalization) \
V(CompileSerialize) \
V(CompileWaitForDispatcher) \
V(ConfigureInstance) \
V(CreateApiFunction) \
V(DeoptimizeCode) \
V(DeserializeContext) \
V(DeserializeIsolate) \
V(FinalizationRegistryCleanupFromTask) \
V(FunctionCallback) \
V(FunctionLengthGetter) \
V(FunctionPrototypeGetter) \
V(FunctionPrototypeSetter) \
V(GC_Custom_AllAvailableGarbage) \
V(GC_Custom_IncrementalMarkingObserver) \
V(GC_Custom_SlowAllocateRaw) \
V(GCEpilogueCallback) \
V(GCPrologueCallback) \
V(Genesis) \
V(GetCompatibleReceiver) \
V(GetMoreDataCallback) \
V(IndexedDefinerCallback) \
V(IndexedDeleterCallback) \
V(IndexedDescriptorCallback) \
V(IndexedEnumeratorCallback) \
V(IndexedGetterCallback) \
V(IndexedQueryCallback) \
V(IndexedSetterCallback) \
V(InstantiateFunction) \
V(InstantiateObject) \
V(Invoke) \
V(InvokeApiFunction) \
V(InvokeApiInterruptCallbacks) \
V(IsCompatibleReceiver) \
V(IsCompatibleReceiverMap) \
V(IsTemplateFor) \
V(JS_Execution) \
V(Map_SetPrototype) \
V(Map_TransitionToAccessorProperty) \
V(Map_TransitionToDataProperty) \
V(MessageListenerCallback) \
V(NamedDefinerCallback) \
V(NamedDeleterCallback) \
V(NamedDescriptorCallback) \
V(NamedEnumeratorCallback) \
V(NamedGetterCallback) \
V(NamedQueryCallback) \
V(NamedSetterCallback) \
V(Object_DeleteProperty) \
V(ObjectVerify) \
V(OptimizeBackgroundDispatcherJob) \
V(OptimizeCode) \
V(OptimizeConcurrentFinalize) \
V(OptimizeConcurrentPrepare) \
V(OptimizeFinalizePipelineJob) \
V(OptimizeHeapBrokerInitialization) \
V(OptimizeNonConcurrent) \
V(OptimizeSerialization) \
V(OptimizeSerializeMetadata) \
V(ParseEval) \
V(ParseFunction) \
V(PropertyCallback) \
V(PrototypeMap_TransitionToAccessorProperty) \
V(PrototypeMap_TransitionToDataProperty) \
V(PrototypeObject_DeleteProperty) \
V(ReconfigureToDataProperty) \
V(UpdateProtector) \
V(StringLengthGetter) \
V(TestCounter1) \
V(TestCounter2) \
V(TestCounter3)
#define FOR_EACH_HANDLER_COUNTER(V) \
V(KeyedLoadIC_KeyedLoadSloppyArgumentsStub) \
V(KeyedLoadIC_LoadElementDH) \
V(KeyedLoadIC_LoadIndexedInterceptorStub) \
V(KeyedLoadIC_LoadIndexedStringDH) \
V(KeyedLoadIC_SlowStub) \
V(KeyedStoreIC_ElementsTransitionAndStoreStub) \
V(KeyedStoreIC_KeyedStoreSloppyArgumentsStub) \
V(KeyedStoreIC_SlowStub) \
V(KeyedStoreIC_StoreElementStub) \
V(KeyedStoreIC_StoreFastElementStub) \
V(LoadGlobalIC_LoadScriptContextField) \
V(LoadGlobalIC_SlowStub) \
V(LoadIC_FunctionPrototypeStub) \
V(LoadIC_HandlerCacheHit_Accessor) \
V(LoadIC_LoadAccessorDH) \
V(LoadIC_LoadAccessorFromPrototypeDH) \
V(LoadIC_LoadApiGetterFromPrototypeDH) \
V(LoadIC_LoadCallback) \
V(LoadIC_LoadConstantDH) \
V(LoadIC_LoadConstantFromPrototypeDH) \
V(LoadIC_LoadFieldDH) \
V(LoadIC_LoadFieldFromPrototypeDH) \
V(LoadIC_LoadGlobalDH) \
V(LoadIC_LoadGlobalFromPrototypeDH) \
V(LoadIC_LoadIntegerIndexedExoticDH) \
V(LoadIC_LoadInterceptorDH) \
V(LoadIC_LoadInterceptorFromPrototypeDH) \
V(LoadIC_LoadNativeDataPropertyDH) \
V(LoadIC_LoadNativeDataPropertyFromPrototypeDH) \
V(LoadIC_LoadNonexistentDH) \
V(LoadIC_LoadNonMaskingInterceptorDH) \
V(LoadIC_LoadNormalDH) \
V(LoadIC_LoadNormalFromPrototypeDH) \
V(LoadIC_NonReceiver) \
V(LoadIC_SlowStub) \
V(LoadIC_StringLength) \
V(LoadIC_StringWrapperLength) \
V(StoreGlobalIC_SlowStub) \
V(StoreGlobalIC_StoreScriptContextField) \
V(StoreIC_HandlerCacheHit_Accessor) \
V(StoreIC_NonReceiver) \
V(StoreIC_SlowStub) \
V(StoreIC_StoreAccessorDH) \
V(StoreIC_StoreAccessorOnPrototypeDH) \
V(StoreIC_StoreApiSetterOnPrototypeDH) \
V(StoreIC_StoreFieldDH) \
V(StoreIC_StoreGlobalDH) \
V(StoreIC_StoreGlobalTransitionDH) \
V(StoreIC_StoreInterceptorStub) \
V(StoreIC_StoreNativeDataPropertyDH) \
V(StoreIC_StoreNativeDataPropertyOnPrototypeDH) \
V(StoreIC_StoreNormalDH) \
V(StoreIC_StoreTransitionDH) \
V(StoreInArrayLiteralIC_SlowStub)
enum RuntimeCallCounterId {
#define CALL_RUNTIME_COUNTER(name) kGC_##name,
FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_RUNTIME_COUNTER(name) k##name,
FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_RUNTIME_COUNTER(name, nargs, ressize) kRuntime_##name,
FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_BUILTIN_COUNTER(name) kBuiltin_##name,
BUILTIN_LIST_C(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) kAPI_##name,
FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) kHandler_##name,
FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define THREAD_SPECIFIC_COUNTER(name) k##name,
FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) //
#undef THREAD_SPECIFIC_COUNTER
kNumberOfCounters,
};
class RuntimeCallStats final {
public:
enum ThreadType { kMainIsolateThread, kWorkerThread };
// If kExact is chosen the counter will be use as given. With kThreadSpecific,
// if the RuntimeCallStats was created for a worker thread, then the
// background specific version of the counter will be used instead.
enum CounterMode { kExact, kThreadSpecific };
explicit V8_EXPORT_PRIVATE RuntimeCallStats(ThreadType thread_type);
// Starting measuring the time for a function. This will establish the
// connection to the parent counter for properly calculating the own times.
V8_EXPORT_PRIVATE void Enter(RuntimeCallTimer* timer,
RuntimeCallCounterId counter_id);
// Leave a scope for a measured runtime function. This will properly add
// the time delta to the current_counter and subtract the delta from its
// parent.
V8_EXPORT_PRIVATE void Leave(RuntimeCallTimer* timer);
// Set counter id for the innermost measurement. It can be used to refine
// event kind when a runtime entry counter is too generic.
V8_EXPORT_PRIVATE void CorrectCurrentCounterId(
RuntimeCallCounterId counter_id, CounterMode mode = kExact);
V8_EXPORT_PRIVATE void Reset();
// Add all entries from another stats object.
void Add(RuntimeCallStats* other);
V8_EXPORT_PRIVATE void Print(std::ostream& os);
V8_EXPORT_PRIVATE void Print();
V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
V8_EXPORT_PRIVATE void EnumerateCounters(
debug::RuntimeCallCounterCallback callback);
ThreadId thread_id() const { return thread_id_; }
RuntimeCallTimer* current_timer() { return current_timer_.Value(); }
RuntimeCallCounter* current_counter() { return current_counter_.Value(); }
bool InUse() { return in_use_; }
bool IsCalledOnTheSameThread();
V8_EXPORT_PRIVATE bool IsBackgroundThreadSpecificVariant(
RuntimeCallCounterId id);
V8_EXPORT_PRIVATE bool HasThreadSpecificCounterVariants(
RuntimeCallCounterId id);
// This should only be called for counters with a dual Background variant. If
// on the main thread, this just returns the counter. If on a worker thread,
// it returns Background variant of the counter.
RuntimeCallCounterId CounterIdForThread(RuntimeCallCounterId id) {
DCHECK(HasThreadSpecificCounterVariants(id));
// All thread specific counters are laid out with the main thread variant
// first followed by the background variant.
return thread_type_ == kWorkerThread
? static_cast<RuntimeCallCounterId>(id + 1)
: id;
}
bool IsCounterAppropriateForThread(RuntimeCallCounterId id) {
// TODO(delphick): We should add background-only counters and ensure that
// all counters (not just the thread-specific variants) are only invoked on
// the correct thread.
if (!HasThreadSpecificCounterVariants(id)) return true;
return IsBackgroundThreadSpecificVariant(id) ==
(thread_type_ == kWorkerThread);
}
static const int kNumberOfCounters =
static_cast<int>(RuntimeCallCounterId::kNumberOfCounters);
RuntimeCallCounter* GetCounter(RuntimeCallCounterId counter_id) {
return &counters_[static_cast<int>(counter_id)];
}
RuntimeCallCounter* GetCounter(int counter_id) {
return &counters_[counter_id];
}
private:
// Top of a stack of active timers.
base::AtomicValue<RuntimeCallTimer*> current_timer_;
// Active counter object associated with current timer.
base::AtomicValue<RuntimeCallCounter*> current_counter_;
// Used to track nested tracing scopes.
bool in_use_;
ThreadType thread_type_;
ThreadId thread_id_;
RuntimeCallCounter counters_[kNumberOfCounters];
};
class WorkerThreadRuntimeCallStats final {
public:
WorkerThreadRuntimeCallStats();
~WorkerThreadRuntimeCallStats();
// Returns the TLS key associated with this WorkerThreadRuntimeCallStats.
base::Thread::LocalStorageKey GetKey();
// Returns a new worker thread runtime call stats table managed by this
// WorkerThreadRuntimeCallStats.
RuntimeCallStats* NewTable();
// Adds the counters from the worker thread tables to |main_call_stats|.
void AddToMainTable(RuntimeCallStats* main_call_stats);
private:
base::Mutex mutex_;
std::vector<std::unique_ptr<RuntimeCallStats>> tables_;
base::Optional<base::Thread::LocalStorageKey> tls_key_;
// Since this is for creating worker thread runtime-call stats, record the
// main thread ID to ensure we never create a worker RCS table for the main
// thread.
ThreadId isolate_thread_id_;
};
// Creating a WorkerThreadRuntimeCallStatsScope will provide a thread-local
// runtime call stats table, and will dump the table to an immediate trace event
// when it is destroyed.
class V8_NODISCARD WorkerThreadRuntimeCallStatsScope final {
public:
explicit WorkerThreadRuntimeCallStatsScope(
WorkerThreadRuntimeCallStats* off_thread_stats);
~WorkerThreadRuntimeCallStatsScope();
RuntimeCallStats* Get() const { return table_; }
private:
RuntimeCallStats* table_;
};
#define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_id) \
do { \
if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled()) && \
runtime_call_stats) { \
runtime_call_stats->CorrectCurrentCounterId(counter_id); \
} \
} while (false)
#define TRACE_HANDLER_STATS(isolate, counter_name) \
CHANGE_CURRENT_RUNTIME_COUNTER( \
isolate->counters()->runtime_call_stats(), \
RuntimeCallCounterId::kHandler_##counter_name)
// A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
// the time of C++ scope.
class V8_NODISCARD RuntimeCallTimerScope {
public:
inline RuntimeCallTimerScope(Isolate* isolate,
RuntimeCallCounterId counter_id);
inline RuntimeCallTimerScope(RuntimeCallStats* stats,
RuntimeCallCounterId counter_id,
RuntimeCallStats::CounterMode mode =
RuntimeCallStats::CounterMode::kExact) {
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled() ||
stats == nullptr)) {
return;
}
stats_ = stats;
if (mode == RuntimeCallStats::CounterMode::kThreadSpecific) {
counter_id = stats->CounterIdForThread(counter_id);
}
DCHECK(stats->IsCounterAppropriateForThread(counter_id));
stats_->Enter(&timer_, counter_id);
}
inline ~RuntimeCallTimerScope() {
if (V8_UNLIKELY(stats_ != nullptr)) {
stats_->Leave(&timer_);
}
}
RuntimeCallTimerScope(const RuntimeCallTimerScope&) = delete;
RuntimeCallTimerScope& operator=(const RuntimeCallTimerScope&) = delete;
private:
RuntimeCallStats* stats_ = nullptr;
RuntimeCallTimer timer_;
};
// This file contains all the v8 counters that are in use.
class Counters : public std::enable_shared_from_this<Counters> {
public:
@ -1410,11 +742,19 @@ class Counters : public std::enable_shared_from_this<Counters> {
};
// clang-format on
#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallStats* runtime_call_stats() { return &runtime_call_stats_; }
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats() {
return &worker_thread_runtime_call_stats_;
}
#else // V8_RUNTIME_CALL_STATS
RuntimeCallStats* runtime_call_stats() { return nullptr; }
WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats() {
return nullptr;
}
#endif // V8_RUNTIME_CALL_STATS
private:
friend class StatsTable;
@ -1422,9 +762,6 @@ class Counters : public std::enable_shared_from_this<Counters> {
friend class Histogram;
friend class HistogramTimer;
Isolate* isolate_;
StatsTable stats_table_;
int* FindLocation(const char* name) {
return stats_table_.FindLocation(name);
}
@ -1491,8 +828,12 @@ class Counters : public std::enable_shared_from_this<Counters> {
FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
#undef SC
#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallStats runtime_call_stats_;
WorkerThreadRuntimeCallStats worker_thread_runtime_call_stats_;
#endif
Isolate* isolate_;
StatsTable stats_table_;
DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
};
@ -1505,12 +846,14 @@ void HistogramTimer::Stop() {
TimedHistogram::Stop(&timer_, counters()->isolate());
}
#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallTimerScope::RuntimeCallTimerScope(Isolate* isolate,
RuntimeCallCounterId counter_id) {
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
stats_ = isolate->counters()->runtime_call_stats();
stats_->Enter(&timer_, counter_id);
}
#endif // defined(V8_RUNTIME_CALL_STATS)
} // namespace internal
} // namespace v8

View File

@ -1728,12 +1728,14 @@ bool Logger::EnsureLogScriptSource(Script script) {
}
void Logger::RuntimeCallTimerEvent() {
#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallStats* stats = isolate_->counters()->runtime_call_stats();
RuntimeCallCounter* counter = stats->current_counter();
if (counter == nullptr) return;
MSG_BUILDER();
msg << "active-runtime-timer" << kNext << counter->name();
msg.WriteToLogFile();
#endif // V8_RUNTIME_CALL_STATS
}
void Logger::TickEvent(TickSample* sample, bool overflow) {

View File

@ -0,0 +1,370 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifdef V8_RUNTIME_CALL_STATS
#include "src/logging/runtime-call-stats.h"
#include <iomanip>
#include "src/tracing/tracing-category-observer.h"
#include "src/utils/ostreams.h"
namespace v8 {
namespace internal {
base::TimeTicks (*RuntimeCallTimer::Now)() =
&base::TimeTicks::HighResolutionNow;
base::TimeTicks RuntimeCallTimer::NowCPUTime() {
base::ThreadTicks ticks = base::ThreadTicks::Now();
return base::TimeTicks::FromInternalValue(ticks.ToInternalValue());
}
class RuntimeCallStatEntries {
public:
void Print(std::ostream& os) {
if (total_call_count == 0) return;
std::sort(entries.rbegin(), entries.rend());
os << std::setw(50) << "Runtime Function/C++ Builtin" << std::setw(12)
<< "Time" << std::setw(18) << "Count" << std::endl
<< std::string(88, '=') << std::endl;
for (Entry& entry : entries) {
entry.SetTotal(total_time, total_call_count);
entry.Print(os);
}
os << std::string(88, '-') << std::endl;
Entry("Total", total_time, total_call_count).Print(os);
}
// By default, the compiler will usually inline this, which results in a large
// binary size increase: std::vector::push_back expands to a large amount of
// instructions, and this function is invoked repeatedly by macros.
V8_NOINLINE void Add(RuntimeCallCounter* counter) {
if (counter->count() == 0) return;
entries.push_back(
Entry(counter->name(), counter->time(), counter->count()));
total_time += counter->time();
total_call_count += counter->count();
}
private:
class Entry {
public:
Entry(const char* name, base::TimeDelta time, uint64_t count)
: name_(name),
time_(time.InMicroseconds()),
count_(count),
time_percent_(100),
count_percent_(100) {}
bool operator<(const Entry& other) const {
if (time_ < other.time_) return true;
if (time_ > other.time_) return false;
return count_ < other.count_;
}
V8_NOINLINE void Print(std::ostream& os) {
os.precision(2);
os << std::fixed << std::setprecision(2);
os << std::setw(50) << name_;
os << std::setw(10) << static_cast<double>(time_) / 1000 << "ms ";
os << std::setw(6) << time_percent_ << "%";
os << std::setw(10) << count_ << " ";
os << std::setw(6) << count_percent_ << "%";
os << std::endl;
}
V8_NOINLINE void SetTotal(base::TimeDelta total_time,
uint64_t total_count) {
if (total_time.InMicroseconds() == 0) {
time_percent_ = 0;
} else {
time_percent_ = 100.0 * time_ / total_time.InMicroseconds();
}
count_percent_ = 100.0 * count_ / total_count;
}
private:
const char* name_;
int64_t time_;
uint64_t count_;
double time_percent_;
double count_percent_;
};
uint64_t total_call_count = 0;
base::TimeDelta total_time;
std::vector<Entry> entries;
};
void RuntimeCallCounter::Reset() {
count_ = 0;
time_ = 0;
}
void RuntimeCallCounter::Dump(v8::tracing::TracedValue* value) {
value->BeginArray(name_);
value->AppendDouble(count_);
value->AppendDouble(time_);
value->EndArray();
}
void RuntimeCallCounter::Add(RuntimeCallCounter* other) {
count_ += other->count();
time_ += other->time().InMicroseconds();
}
void RuntimeCallTimer::Snapshot() {
base::TimeTicks now = Now();
// Pause only / topmost timer in the timer stack.
Pause(now);
// Commit all the timer's elapsed time to the counters.
RuntimeCallTimer* timer = this;
while (timer != nullptr) {
timer->CommitTimeToCounter();
timer = timer->parent();
}
Resume(now);
}
RuntimeCallStats::RuntimeCallStats(ThreadType thread_type)
: in_use_(false), thread_type_(thread_type) {
static const char* const kNames[] = {
#define CALL_BUILTIN_COUNTER(name) "GC_" #name,
FOR_EACH_GC_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_RUNTIME_COUNTER(name) #name,
FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_RUNTIME_COUNTER(name, nargs, ressize) #name,
FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER) //
#undef CALL_RUNTIME_COUNTER
#define CALL_BUILTIN_COUNTER(name) #name,
BUILTIN_LIST_C(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) "API_" #name,
FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) #name,
FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) //
#undef CALL_BUILTIN_COUNTER
#define THREAD_SPECIFIC_COUNTER(name) #name,
FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) //
#undef THREAD_SPECIFIC_COUNTER
};
for (int i = 0; i < kNumberOfCounters; i++) {
this->counters_[i] = RuntimeCallCounter(kNames[i]);
}
if (FLAG_rcs_cpu_time) {
CHECK(base::ThreadTicks::IsSupported());
base::ThreadTicks::WaitUntilInitialized();
RuntimeCallTimer::Now = &RuntimeCallTimer::NowCPUTime;
}
}
namespace {
constexpr RuntimeCallCounterId FirstCounter(RuntimeCallCounterId first, ...) {
return first;
}
#define THREAD_SPECIFIC_COUNTER(name) k##name,
constexpr RuntimeCallCounterId kFirstThreadVariantCounter =
FirstCounter(FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) 0);
#undef THREAD_SPECIFIC_COUNTER
#define THREAD_SPECIFIC_COUNTER(name) +1
constexpr int kThreadVariantCounterCount =
0 FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER);
#undef THREAD_SPECIFIC_COUNTER
constexpr auto kLastThreadVariantCounter = static_cast<RuntimeCallCounterId>(
kFirstThreadVariantCounter + kThreadVariantCounterCount - 1);
} // namespace
bool RuntimeCallStats::HasThreadSpecificCounterVariants(
RuntimeCallCounterId id) {
// Check that it's in the range of the thread-specific variant counters and
// also that it's one of the background counters.
return id >= kFirstThreadVariantCounter && id <= kLastThreadVariantCounter;
}
bool RuntimeCallStats::IsBackgroundThreadSpecificVariant(
RuntimeCallCounterId id) {
return HasThreadSpecificCounterVariants(id) &&
(id - kFirstThreadVariantCounter) % 2 == 1;
}
void RuntimeCallStats::Enter(RuntimeCallTimer* timer,
RuntimeCallCounterId counter_id) {
DCHECK(IsCalledOnTheSameThread());
RuntimeCallCounter* counter = GetCounter(counter_id);
DCHECK_NOT_NULL(counter->name());
timer->Start(counter, current_timer());
current_timer_.SetValue(timer);
current_counter_.SetValue(counter);
}
void RuntimeCallStats::Leave(RuntimeCallTimer* timer) {
DCHECK(IsCalledOnTheSameThread());
RuntimeCallTimer* stack_top = current_timer();
if (stack_top == nullptr) return; // Missing timer is a result of Reset().
CHECK(stack_top == timer);
current_timer_.SetValue(timer->Stop());
RuntimeCallTimer* cur_timer = current_timer();
current_counter_.SetValue(cur_timer ? cur_timer->counter() : nullptr);
}
void RuntimeCallStats::Add(RuntimeCallStats* other) {
for (int i = 0; i < kNumberOfCounters; i++) {
GetCounter(i)->Add(other->GetCounter(i));
}
}
// static
void RuntimeCallStats::CorrectCurrentCounterId(RuntimeCallCounterId counter_id,
CounterMode mode) {
DCHECK(IsCalledOnTheSameThread());
if (mode == RuntimeCallStats::CounterMode::kThreadSpecific) {
counter_id = CounterIdForThread(counter_id);
}
DCHECK(IsCounterAppropriateForThread(counter_id));
RuntimeCallTimer* timer = current_timer();
if (timer == nullptr) return;
RuntimeCallCounter* counter = GetCounter(counter_id);
timer->set_counter(counter);
current_counter_.SetValue(counter);
}
bool RuntimeCallStats::IsCalledOnTheSameThread() {
if (thread_id_.IsValid()) return thread_id_ == ThreadId::Current();
thread_id_ = ThreadId::Current();
return true;
}
void RuntimeCallStats::Print() {
StdoutStream os;
Print(os);
}
void RuntimeCallStats::Print(std::ostream& os) {
RuntimeCallStatEntries entries;
if (current_timer_.Value() != nullptr) {
current_timer_.Value()->Snapshot();
}
for (int i = 0; i < kNumberOfCounters; i++) {
entries.Add(GetCounter(i));
}
entries.Print(os);
}
void RuntimeCallStats::EnumerateCounters(
debug::RuntimeCallCounterCallback callback) {
if (current_timer_.Value() != nullptr) {
current_timer_.Value()->Snapshot();
}
for (int i = 0; i < kNumberOfCounters; i++) {
RuntimeCallCounter* counter = GetCounter(i);
callback(counter->name(), counter->count(), counter->time());
}
}
void RuntimeCallStats::Reset() {
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
// In tracing, we only what to trace the time spent on top level trace events,
// if runtime counter stack is not empty, we should clear the whole runtime
// counter stack, and then reset counters so that we can dump counters into
// top level trace events accurately.
while (current_timer_.Value()) {
current_timer_.SetValue(current_timer_.Value()->Stop());
}
for (int i = 0; i < kNumberOfCounters; i++) {
GetCounter(i)->Reset();
}
in_use_ = true;
}
void RuntimeCallStats::Dump(v8::tracing::TracedValue* value) {
for (int i = 0; i < kNumberOfCounters; i++) {
if (GetCounter(i)->count() > 0) GetCounter(i)->Dump(value);
}
in_use_ = false;
}
WorkerThreadRuntimeCallStats::WorkerThreadRuntimeCallStats()
: isolate_thread_id_(ThreadId::Current()) {}
WorkerThreadRuntimeCallStats::~WorkerThreadRuntimeCallStats() {
if (tls_key_) base::Thread::DeleteThreadLocalKey(*tls_key_);
}
base::Thread::LocalStorageKey WorkerThreadRuntimeCallStats::GetKey() {
base::MutexGuard lock(&mutex_);
DCHECK(TracingFlags::is_runtime_stats_enabled());
if (!tls_key_) tls_key_ = base::Thread::CreateThreadLocalKey();
return *tls_key_;
}
RuntimeCallStats* WorkerThreadRuntimeCallStats::NewTable() {
DCHECK(TracingFlags::is_runtime_stats_enabled());
// Never create a new worker table on the isolate's main thread.
DCHECK_NE(ThreadId::Current(), isolate_thread_id_);
std::unique_ptr<RuntimeCallStats> new_table =
std::make_unique<RuntimeCallStats>(RuntimeCallStats::kWorkerThread);
RuntimeCallStats* result = new_table.get();
base::MutexGuard lock(&mutex_);
tables_.push_back(std::move(new_table));
return result;
}
void WorkerThreadRuntimeCallStats::AddToMainTable(
RuntimeCallStats* main_call_stats) {
base::MutexGuard lock(&mutex_);
for (auto& worker_stats : tables_) {
DCHECK_NE(main_call_stats, worker_stats.get());
main_call_stats->Add(worker_stats.get());
worker_stats->Reset();
}
}
WorkerThreadRuntimeCallStatsScope::WorkerThreadRuntimeCallStatsScope(
WorkerThreadRuntimeCallStats* worker_stats)
: table_(nullptr) {
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
table_ = reinterpret_cast<RuntimeCallStats*>(
base::Thread::GetThreadLocal(worker_stats->GetKey()));
if (table_ == nullptr) {
table_ = worker_stats->NewTable();
base::Thread::SetThreadLocal(worker_stats->GetKey(), table_);
}
if ((TracingFlags::runtime_stats.load(std::memory_order_relaxed) &
v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
table_->Reset();
}
}
WorkerThreadRuntimeCallStatsScope::~WorkerThreadRuntimeCallStatsScope() {
if (V8_LIKELY(table_ == nullptr)) return;
if ((TracingFlags::runtime_stats.load(std::memory_order_relaxed) &
v8::tracing::TracingCategoryObserver::ENABLED_BY_TRACING)) {
auto value = v8::tracing::TracedValue::Create();
table_->Dump(value.get());
TRACE_EVENT_INSTANT1(TRACE_DISABLED_BY_DEFAULT("v8.runtime_stats"),
"V8.RuntimeStats", TRACE_EVENT_SCOPE_THREAD,
"runtime-call-stats", std::move(value));
}
}
} // namespace internal
} // namespace v8
#endif // V8_RUNTIME_CALL_STATS

View File

@ -0,0 +1,763 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_LOGGING_RUNTIME_CALL_STATS_H_
#define V8_LOGGING_RUNTIME_CALL_STATS_H_
#include "include/v8.h"
#ifdef V8_RUNTIME_CALL_STATS
#include "src/base/atomic-utils.h"
#include "src/base/optional.h"
#include "src/base/platform/elapsed-timer.h"
#include "src/base/platform/time.h"
#include "src/builtins/builtins-definitions.h"
#include "src/debug/debug-interface.h"
#include "src/execution/thread-id.h"
#include "src/init/heap-symbols.h"
#include "src/logging/tracing-flags.h"
#include "src/runtime/runtime.h"
#include "src/tracing/traced-value.h"
#include "src/tracing/tracing-category-observer.h"
#endif // V8_RUNTIME_CALL_STATS
namespace v8 {
namespace internal {
#ifdef V8_RUNTIME_CALL_STATS
#define RCS_SCOPE(...) \
v8::internal::RuntimeCallTimerScope rcs_timer_scope(__VA_ARGS__)
class RuntimeCallCounter final {
public:
RuntimeCallCounter() : RuntimeCallCounter(nullptr) {}
explicit RuntimeCallCounter(const char* name)
: name_(name), count_(0), time_(0) {}
V8_NOINLINE void Reset();
V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
void Add(RuntimeCallCounter* other);
const char* name() const { return name_; }
int64_t count() const { return count_; }
base::TimeDelta time() const {
return base::TimeDelta::FromMicroseconds(time_);
}
void Increment() { count_++; }
void Add(base::TimeDelta delta) { time_ += delta.InMicroseconds(); }
private:
friend class RuntimeCallStats;
const char* name_;
int64_t count_;
// Stored as int64_t so that its initialization can be deferred.
int64_t time_;
};
// RuntimeCallTimer is used to keep track of the stack of currently active
// timers used for properly measuring the own time of a RuntimeCallCounter.
class RuntimeCallTimer final {
public:
RuntimeCallCounter* counter() { return counter_; }
void set_counter(RuntimeCallCounter* counter) { counter_ = counter; }
RuntimeCallTimer* parent() const { return parent_.Value(); }
void set_parent(RuntimeCallTimer* timer) { parent_.SetValue(timer); }
const char* name() const { return counter_->name(); }
inline bool IsStarted() const { return start_ticks_ != base::TimeTicks(); }
inline void Start(RuntimeCallCounter* counter, RuntimeCallTimer* parent) {
DCHECK(!IsStarted());
counter_ = counter;
parent_.SetValue(parent);
if (TracingFlags::runtime_stats.load(std::memory_order_relaxed) ==
v8::tracing::TracingCategoryObserver::ENABLED_BY_SAMPLING) {
return;
}
base::TimeTicks now = RuntimeCallTimer::Now();
if (parent) parent->Pause(now);
Resume(now);
DCHECK(IsStarted());
}
void Snapshot();
inline RuntimeCallTimer* Stop() {
if (!IsStarted()) return parent();
base::TimeTicks now = RuntimeCallTimer::Now();
Pause(now);
counter_->Increment();
CommitTimeToCounter();
RuntimeCallTimer* parent_timer = parent();
if (parent_timer) {
parent_timer->Resume(now);
}
return parent_timer;
}
// Make the time source configurable for testing purposes.
V8_EXPORT_PRIVATE static base::TimeTicks (*Now)();
// Helper to switch over to CPU time.
static base::TimeTicks NowCPUTime();
private:
inline void Pause(base::TimeTicks now) {
DCHECK(IsStarted());
elapsed_ += (now - start_ticks_);
start_ticks_ = base::TimeTicks();
}
inline void Resume(base::TimeTicks now) {
DCHECK(!IsStarted());
start_ticks_ = now;
}
inline void CommitTimeToCounter() {
counter_->Add(elapsed_);
elapsed_ = base::TimeDelta();
}
RuntimeCallCounter* counter_ = nullptr;
base::AtomicValue<RuntimeCallTimer*> parent_;
base::TimeTicks start_ticks_;
base::TimeDelta elapsed_;
};
#define FOR_EACH_GC_COUNTER(V) \
TRACER_SCOPES(V) \
TRACER_BACKGROUND_SCOPES(V)
#define FOR_EACH_API_COUNTER(V) \
V(AccessorPair_New) \
V(ArrayBuffer_Cast) \
V(ArrayBuffer_Detach) \
V(ArrayBuffer_New) \
V(ArrayBuffer_NewBackingStore) \
V(ArrayBuffer_BackingStore_Reallocate) \
V(Array_CloneElementAt) \
V(Array_New) \
V(BigInt64Array_New) \
V(BigInt_NewFromWords) \
V(BigIntObject_BigIntValue) \
V(BigIntObject_New) \
V(BigUint64Array_New) \
V(BooleanObject_BooleanValue) \
V(BooleanObject_New) \
V(Context_New) \
V(Context_NewRemoteContext) \
V(DataView_New) \
V(Date_New) \
V(Date_NumberValue) \
V(Debug_Call) \
V(debug_GetPrivateMembers) \
V(Error_New) \
V(External_New) \
V(Float32Array_New) \
V(Float64Array_New) \
V(Function_Call) \
V(Function_New) \
V(Function_FunctionProtoToString) \
V(Function_NewInstance) \
V(FunctionTemplate_GetFunction) \
V(FunctionTemplate_New) \
V(FunctionTemplate_NewRemoteInstance) \
V(FunctionTemplate_NewWithCache) \
V(FunctionTemplate_NewWithFastHandler) \
V(Int16Array_New) \
V(Int32Array_New) \
V(Int8Array_New) \
V(Isolate_DateTimeConfigurationChangeNotification) \
V(Isolate_LocaleConfigurationChangeNotification) \
V(JSON_Parse) \
V(JSON_Stringify) \
V(Map_AsArray) \
V(Map_Clear) \
V(Map_Delete) \
V(Map_Get) \
V(Map_Has) \
V(Map_New) \
V(Map_Set) \
V(Message_GetEndColumn) \
V(Message_GetLineNumber) \
V(Message_GetSourceLine) \
V(Message_GetStartColumn) \
V(Module_Evaluate) \
V(Module_InstantiateModule) \
V(Module_SetSyntheticModuleExport) \
V(NumberObject_New) \
V(NumberObject_NumberValue) \
V(Object_CallAsConstructor) \
V(Object_CallAsFunction) \
V(Object_CreateDataProperty) \
V(Object_DefineOwnProperty) \
V(Object_DefineProperty) \
V(Object_Delete) \
V(Object_DeleteProperty) \
V(Object_ForceSet) \
V(Object_Get) \
V(Object_GetOwnPropertyDescriptor) \
V(Object_GetOwnPropertyNames) \
V(Object_GetPropertyAttributes) \
V(Object_GetPropertyNames) \
V(Object_GetRealNamedProperty) \
V(Object_GetRealNamedPropertyAttributes) \
V(Object_GetRealNamedPropertyAttributesInPrototypeChain) \
V(Object_GetRealNamedPropertyInPrototypeChain) \
V(Object_Has) \
V(Object_HasOwnProperty) \
V(Object_HasRealIndexedProperty) \
V(Object_HasRealNamedCallbackProperty) \
V(Object_HasRealNamedProperty) \
V(Object_IsCodeLike) \
V(Object_New) \
V(Object_ObjectProtoToString) \
V(Object_Set) \
V(Object_SetAccessor) \
V(Object_SetIntegrityLevel) \
V(Object_SetPrivate) \
V(Object_SetPrototype) \
V(ObjectTemplate_New) \
V(ObjectTemplate_NewInstance) \
V(Object_ToArrayIndex) \
V(Object_ToBigInt) \
V(Object_ToDetailString) \
V(Object_ToInt32) \
V(Object_ToInteger) \
V(Object_ToNumber) \
V(Object_ToObject) \
V(Object_ToString) \
V(Object_ToUint32) \
V(Persistent_New) \
V(Private_New) \
V(Promise_Catch) \
V(Promise_Chain) \
V(Promise_HasRejectHandler) \
V(Promise_Resolver_New) \
V(Promise_Resolver_Reject) \
V(Promise_Resolver_Resolve) \
V(Promise_Result) \
V(Promise_Status) \
V(Promise_Then) \
V(Proxy_New) \
V(RangeError_New) \
V(ReferenceError_New) \
V(RegExp_Exec) \
V(RegExp_New) \
V(ScriptCompiler_Compile) \
V(ScriptCompiler_CompileFunctionInContext) \
V(ScriptCompiler_CompileUnbound) \
V(Script_Run) \
V(Set_Add) \
V(Set_AsArray) \
V(Set_Clear) \
V(Set_Delete) \
V(Set_Has) \
V(Set_New) \
V(SharedArrayBuffer_New) \
V(SharedArrayBuffer_NewBackingStore) \
V(String_Concat) \
V(String_NewExternalOneByte) \
V(String_NewExternalTwoByte) \
V(String_NewFromOneByte) \
V(String_NewFromTwoByte) \
V(String_NewFromUtf8) \
V(String_NewFromUtf8Literal) \
V(StringObject_New) \
V(StringObject_StringValue) \
V(String_Write) \
V(String_WriteUtf8) \
V(Symbol_New) \
V(SymbolObject_New) \
V(SymbolObject_SymbolValue) \
V(SyntaxError_New) \
V(TracedGlobal_New) \
V(TryCatch_StackTrace) \
V(TypeError_New) \
V(Uint16Array_New) \
V(Uint32Array_New) \
V(Uint8Array_New) \
V(Uint8ClampedArray_New) \
V(UnboundScript_GetId) \
V(UnboundScript_GetLineNumber) \
V(UnboundScript_GetName) \
V(UnboundScript_GetSourceMappingURL) \
V(UnboundScript_GetSourceURL) \
V(ValueDeserializer_ReadHeader) \
V(ValueDeserializer_ReadValue) \
V(ValueSerializer_WriteValue) \
V(Value_Equals) \
V(Value_InstanceOf) \
V(Value_Int32Value) \
V(Value_IntegerValue) \
V(Value_NumberValue) \
V(Value_TypeOf) \
V(Value_Uint32Value) \
V(WasmCompileError_New) \
V(WasmLinkError_New) \
V(WasmRuntimeError_New) \
V(WeakMap_Get) \
V(WeakMap_New) \
V(WeakMap_Set)
#define ADD_THREAD_SPECIFIC_COUNTER(V, Prefix, Suffix) \
V(Prefix##Suffix) \
V(Prefix##Background##Suffix)
#define FOR_EACH_THREAD_SPECIFIC_COUNTER(V) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Analyse) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Eval) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Function) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Ignition) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, IgnitionFinalization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, RewriteReturnResult) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, ScopeAnalysis) \
ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Script) \
\
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateFPRegisters) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateGeneralRegisters) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssembleCode) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssignSpillSlots) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildLiveRangeBundles) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildLiveRanges) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BytecodeGraphBuilder) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CommitAssignment) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ConnectRanges) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ControlFlowOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CSAEarlyOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CSAOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, DecideSpillingMode) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, DecompressionOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EarlyOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EarlyTrimming) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EffectLinearization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EscapeAnalysis) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, FinalizeCode) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, FrameElision) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, GenericLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Inlining) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, JumpThreading) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierPopulateReferenceMaps) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierRegisterAllocator) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierRegisterOutputDefinition) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierSpillSlotAllocator) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LateGraphTrimming) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LateOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoadElimination) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LocateSpillSlots) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoopExitElimination) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoopPeeling) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MachineOperatorOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MeetRegisterConstraints) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MemoryOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, OptimizeMoves) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, PopulatePointerMaps) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, PrintGraph) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ResolveControlFlow) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ResolvePhis) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, \
ScheduledEffectControlLinearization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ScheduledMachineLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Scheduling) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SelectInstructions) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SimplifiedLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, StoreStoreElimination) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TypeAssertions) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TypedLowering) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Typer) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Untyper) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, VerifyGraph) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmBaseOptimization) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmInlining) \
ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmLoopUnrolling) \
\
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, ArrowFunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, FunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, Parse, Program) \
ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, ArrowFunctionLiteral) \
ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, WithVariableResolution)
#define FOR_EACH_MANUAL_COUNTER(V) \
V(AccessorGetterCallback) \
V(AccessorSetterCallback) \
V(ArrayLengthGetter) \
V(ArrayLengthSetter) \
V(BoundFunctionLengthGetter) \
V(BoundFunctionNameGetter) \
V(CodeGenerationFromStringsCallbacks) \
V(CompileBackgroundCompileTask) \
V(CompileBaseline) \
V(CompileBaselineVisit) \
V(CompileBaselinePreVisit) \
V(CompileCollectSourcePositions) \
V(CompileDeserialize) \
V(CompileEnqueueOnDispatcher) \
V(CompileFinalizeBackgroundCompileTask) \
V(CompileFinishNowOnDispatcher) \
V(CompileGetFromOptimizedCodeMap) \
V(CompilePublishBackgroundFinalization) \
V(CompileSerialize) \
V(CompileWaitForDispatcher) \
V(ConfigureInstance) \
V(CreateApiFunction) \
V(DeoptimizeCode) \
V(DeserializeContext) \
V(DeserializeIsolate) \
V(FinalizationRegistryCleanupFromTask) \
V(FunctionCallback) \
V(FunctionLengthGetter) \
V(FunctionPrototypeGetter) \
V(FunctionPrototypeSetter) \
V(GC_Custom_AllAvailableGarbage) \
V(GC_Custom_IncrementalMarkingObserver) \
V(GC_Custom_SlowAllocateRaw) \
V(GCEpilogueCallback) \
V(GCPrologueCallback) \
V(Genesis) \
V(GetCompatibleReceiver) \
V(GetMoreDataCallback) \
V(IndexedDefinerCallback) \
V(IndexedDeleterCallback) \
V(IndexedDescriptorCallback) \
V(IndexedEnumeratorCallback) \
V(IndexedGetterCallback) \
V(IndexedQueryCallback) \
V(IndexedSetterCallback) \
V(InstantiateFunction) \
V(InstantiateObject) \
V(Invoke) \
V(InvokeApiFunction) \
V(InvokeApiInterruptCallbacks) \
V(IsCompatibleReceiver) \
V(IsCompatibleReceiverMap) \
V(IsTemplateFor) \
V(JS_Execution) \
V(Map_SetPrototype) \
V(Map_TransitionToAccessorProperty) \
V(Map_TransitionToDataProperty) \
V(MessageListenerCallback) \
V(NamedDefinerCallback) \
V(NamedDeleterCallback) \
V(NamedDescriptorCallback) \
V(NamedEnumeratorCallback) \
V(NamedGetterCallback) \
V(NamedQueryCallback) \
V(NamedSetterCallback) \
V(Object_DeleteProperty) \
V(ObjectVerify) \
V(OptimizeBackgroundDispatcherJob) \
V(OptimizeCode) \
V(OptimizeConcurrentFinalize) \
V(OptimizeConcurrentPrepare) \
V(OptimizeFinalizePipelineJob) \
V(OptimizeHeapBrokerInitialization) \
V(OptimizeNonConcurrent) \
V(OptimizeSerialization) \
V(OptimizeSerializeMetadata) \
V(ParseEval) \
V(ParseFunction) \
V(PropertyCallback) \
V(PrototypeMap_TransitionToAccessorProperty) \
V(PrototypeMap_TransitionToDataProperty) \
V(PrototypeObject_DeleteProperty) \
V(ReconfigureToDataProperty) \
V(UpdateProtector) \
V(StringLengthGetter) \
V(TestCounter1) \
V(TestCounter2) \
V(TestCounter3)
#define FOR_EACH_HANDLER_COUNTER(V) \
V(KeyedLoadIC_KeyedLoadSloppyArgumentsStub) \
V(KeyedLoadIC_LoadElementDH) \
V(KeyedLoadIC_LoadIndexedInterceptorStub) \
V(KeyedLoadIC_LoadIndexedStringDH) \
V(KeyedLoadIC_SlowStub) \
V(KeyedStoreIC_ElementsTransitionAndStoreStub) \
V(KeyedStoreIC_KeyedStoreSloppyArgumentsStub) \
V(KeyedStoreIC_SlowStub) \
V(KeyedStoreIC_StoreElementStub) \
V(KeyedStoreIC_StoreFastElementStub) \
V(LoadGlobalIC_LoadScriptContextField) \
V(LoadGlobalIC_SlowStub) \
V(LoadIC_FunctionPrototypeStub) \
V(LoadIC_HandlerCacheHit_Accessor) \
V(LoadIC_LoadAccessorDH) \
V(LoadIC_LoadAccessorFromPrototypeDH) \
V(LoadIC_LoadApiGetterFromPrototypeDH) \
V(LoadIC_LoadCallback) \
V(LoadIC_LoadConstantDH) \
V(LoadIC_LoadConstantFromPrototypeDH) \
V(LoadIC_LoadFieldDH) \
V(LoadIC_LoadFieldFromPrototypeDH) \
V(LoadIC_LoadGlobalDH) \
V(LoadIC_LoadGlobalFromPrototypeDH) \
V(LoadIC_LoadIntegerIndexedExoticDH) \
V(LoadIC_LoadInterceptorDH) \
V(LoadIC_LoadInterceptorFromPrototypeDH) \
V(LoadIC_LoadNativeDataPropertyDH) \
V(LoadIC_LoadNativeDataPropertyFromPrototypeDH) \
V(LoadIC_LoadNonexistentDH) \
V(LoadIC_LoadNonMaskingInterceptorDH) \
V(LoadIC_LoadNormalDH) \
V(LoadIC_LoadNormalFromPrototypeDH) \
V(LoadIC_NonReceiver) \
V(LoadIC_SlowStub) \
V(LoadIC_StringLength) \
V(LoadIC_StringWrapperLength) \
V(StoreGlobalIC_SlowStub) \
V(StoreGlobalIC_StoreScriptContextField) \
V(StoreIC_HandlerCacheHit_Accessor) \
V(StoreIC_NonReceiver) \
V(StoreIC_SlowStub) \
V(StoreIC_StoreAccessorDH) \
V(StoreIC_StoreAccessorOnPrototypeDH) \
V(StoreIC_StoreApiSetterOnPrototypeDH) \
V(StoreIC_StoreFieldDH) \
V(StoreIC_StoreGlobalDH) \
V(StoreIC_StoreGlobalTransitionDH) \
V(StoreIC_StoreInterceptorStub) \
V(StoreIC_StoreNativeDataPropertyDH) \
V(StoreIC_StoreNativeDataPropertyOnPrototypeDH) \
V(StoreIC_StoreNormalDH) \
V(StoreIC_StoreTransitionDH) \
V(StoreInArrayLiteralIC_SlowStub)
enum RuntimeCallCounterId {
#define CALL_RUNTIME_COUNTER(name) kGC_##name,
FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER)
#undef CALL_RUNTIME_COUNTER
#define CALL_RUNTIME_COUNTER(name) k##name,
FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER)
#undef CALL_RUNTIME_COUNTER
#define CALL_RUNTIME_COUNTER(name, nargs, ressize) kRuntime_##name,
FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER)
#undef CALL_RUNTIME_COUNTER
#define CALL_BUILTIN_COUNTER(name) kBuiltin_##name,
BUILTIN_LIST_C(CALL_BUILTIN_COUNTER)
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) kAPI_##name,
FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER)
#undef CALL_BUILTIN_COUNTER
#define CALL_BUILTIN_COUNTER(name) kHandler_##name,
FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER)
#undef CALL_BUILTIN_COUNTER
#define THREAD_SPECIFIC_COUNTER(name) k##name,
FOR_EACH_THREAD_SPECIFIC_COUNTER(
THREAD_SPECIFIC_COUNTER)
#undef THREAD_SPECIFIC_COUNTER
kNumberOfCounters,
};
class RuntimeCallStats final {
public:
enum ThreadType { kMainIsolateThread, kWorkerThread };
// If kExact is chosen the counter will be use as given. With kThreadSpecific,
// if the RuntimeCallStats was created for a worker thread, then the
// background specific version of the counter will be used instead.
enum CounterMode { kExact, kThreadSpecific };
explicit V8_EXPORT_PRIVATE RuntimeCallStats(ThreadType thread_type);
// Starting measuring the time for a function. This will establish the
// connection to the parent counter for properly calculating the own times.
V8_EXPORT_PRIVATE void Enter(RuntimeCallTimer* timer,
RuntimeCallCounterId counter_id);
// Leave a scope for a measured runtime function. This will properly add
// the time delta to the current_counter and subtract the delta from its
// parent.
V8_EXPORT_PRIVATE void Leave(RuntimeCallTimer* timer);
// Set counter id for the innermost measurement. It can be used to refine
// event kind when a runtime entry counter is too generic.
V8_EXPORT_PRIVATE void CorrectCurrentCounterId(
RuntimeCallCounterId counter_id, CounterMode mode = kExact);
V8_EXPORT_PRIVATE void Reset();
// Add all entries from another stats object.
void Add(RuntimeCallStats* other);
V8_EXPORT_PRIVATE void Print(std::ostream& os);
V8_EXPORT_PRIVATE void Print();
V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
V8_EXPORT_PRIVATE void EnumerateCounters(
debug::RuntimeCallCounterCallback callback);
ThreadId thread_id() const { return thread_id_; }
RuntimeCallTimer* current_timer() { return current_timer_.Value(); }
RuntimeCallCounter* current_counter() { return current_counter_.Value(); }
bool InUse() { return in_use_; }
bool IsCalledOnTheSameThread();
V8_EXPORT_PRIVATE bool IsBackgroundThreadSpecificVariant(
RuntimeCallCounterId id);
V8_EXPORT_PRIVATE bool HasThreadSpecificCounterVariants(
RuntimeCallCounterId id);
// This should only be called for counters with a dual Background variant. If
// on the main thread, this just returns the counter. If on a worker thread,
// it returns Background variant of the counter.
RuntimeCallCounterId CounterIdForThread(RuntimeCallCounterId id) {
DCHECK(HasThreadSpecificCounterVariants(id));
// All thread specific counters are laid out with the main thread variant
// first followed by the background variant.
return thread_type_ == kWorkerThread
? static_cast<RuntimeCallCounterId>(id + 1)
: id;
}
bool IsCounterAppropriateForThread(RuntimeCallCounterId id) {
// TODO(delphick): We should add background-only counters and ensure that
// all counters (not just the thread-specific variants) are only invoked on
// the correct thread.
if (!HasThreadSpecificCounterVariants(id)) return true;
return IsBackgroundThreadSpecificVariant(id) ==
(thread_type_ == kWorkerThread);
}
static const int kNumberOfCounters =
static_cast<int>(RuntimeCallCounterId::kNumberOfCounters);
RuntimeCallCounter* GetCounter(RuntimeCallCounterId counter_id) {
return &counters_[static_cast<int>(counter_id)];
}
RuntimeCallCounter* GetCounter(int counter_id) {
return &counters_[counter_id];
}
private:
// Top of a stack of active timers.
base::AtomicValue<RuntimeCallTimer*> current_timer_;
// Active counter object associated with current timer.
base::AtomicValue<RuntimeCallCounter*> current_counter_;
// Used to track nested tracing scopes.
bool in_use_;
ThreadType thread_type_;
ThreadId thread_id_;
RuntimeCallCounter counters_[kNumberOfCounters];
};
class WorkerThreadRuntimeCallStats final {
public:
WorkerThreadRuntimeCallStats();
~WorkerThreadRuntimeCallStats();
// Returns the TLS key associated with this WorkerThreadRuntimeCallStats.
base::Thread::LocalStorageKey GetKey();
// Returns a new worker thread runtime call stats table managed by this
// WorkerThreadRuntimeCallStats.
RuntimeCallStats* NewTable();
// Adds the counters from the worker thread tables to |main_call_stats|.
void AddToMainTable(RuntimeCallStats* main_call_stats);
private:
base::Mutex mutex_;
std::vector<std::unique_ptr<RuntimeCallStats>> tables_;
base::Optional<base::Thread::LocalStorageKey> tls_key_;
// Since this is for creating worker thread runtime-call stats, record the
// main thread ID to ensure we never create a worker RCS table for the main
// thread.
ThreadId isolate_thread_id_;
};
// Creating a WorkerThreadRuntimeCallStatsScope will provide a thread-local
// runtime call stats table, and will dump the table to an immediate trace event
// when it is destroyed.
class V8_NODISCARD WorkerThreadRuntimeCallStatsScope final {
public:
explicit WorkerThreadRuntimeCallStatsScope(
WorkerThreadRuntimeCallStats* off_thread_stats);
~WorkerThreadRuntimeCallStatsScope();
RuntimeCallStats* Get() const { return table_; }
private:
RuntimeCallStats* table_;
};
#define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_id) \
do { \
if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled()) && \
runtime_call_stats) { \
runtime_call_stats->CorrectCurrentCounterId(counter_id); \
} \
} while (false)
#define TRACE_HANDLER_STATS(isolate, counter_name) \
CHANGE_CURRENT_RUNTIME_COUNTER( \
isolate->counters()->runtime_call_stats(), \
RuntimeCallCounterId::kHandler_##counter_name)
// A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
// the time of C++ scope.
class V8_NODISCARD RuntimeCallTimerScope {
public:
inline RuntimeCallTimerScope(Isolate* isolate,
RuntimeCallCounterId counter_id);
inline RuntimeCallTimerScope(RuntimeCallStats* stats,
RuntimeCallCounterId counter_id,
RuntimeCallStats::CounterMode mode =
RuntimeCallStats::CounterMode::kExact) {
if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled() ||
stats == nullptr)) {
return;
}
stats_ = stats;
if (mode == RuntimeCallStats::CounterMode::kThreadSpecific) {
counter_id = stats->CounterIdForThread(counter_id);
}
DCHECK(stats->IsCounterAppropriateForThread(counter_id));
stats_->Enter(&timer_, counter_id);
}
inline ~RuntimeCallTimerScope() {
if (V8_UNLIKELY(stats_ != nullptr)) {
stats_->Leave(&timer_);
}
}
RuntimeCallTimerScope(const RuntimeCallTimerScope&) = delete;
RuntimeCallTimerScope& operator=(const RuntimeCallTimerScope&) = delete;
private:
RuntimeCallStats* stats_ = nullptr;
RuntimeCallTimer timer_;
};
#else // RUNTIME_CALL_STATS
#define RCS_SCOPE(...)
#define TRACE_HANDLER_STATS(...)
#define CHANGE_CURRENT_RUNTIME_COUNTER(...)
// Create dummy types to limit code changes
class WorkerThreadRuntimeCallStats {};
class RuntimeCallStats {
public:
enum ThreadType { kMainIsolateThread, kWorkerThread };
explicit V8_EXPORT_PRIVATE RuntimeCallStats(ThreadType thread_type) {}
};
class WorkerThreadRuntimeCallStatsScope {
public:
explicit WorkerThreadRuntimeCallStatsScope(
WorkerThreadRuntimeCallStats* off_thread_stats) {}
RuntimeCallStats* Get() const { return nullptr; }
};
#endif // RUNTIME_CALL_STATS
} // namespace internal
} // namespace v8
#endif // V8_LOGGING_RUNTIME_CALL_STATS_H_

View File

@ -209,7 +209,7 @@ bool LookupIterator::IsCacheableTransition() {
// static
void LookupIterator::UpdateProtector(Isolate* isolate, Handle<Object> receiver,
Handle<Name> name) {
RuntimeCallTimerScope scope(isolate, RuntimeCallCounterId::kUpdateProtector);
RCS_SCOPE(isolate, RuntimeCallCounterId::kUpdateProtector);
// This list must be kept in sync with
// CodeStubAssembler::CheckForAssociatedProtector!

View File

@ -689,10 +689,10 @@ void LookupIterator::Delete() {
} else {
DCHECK(!name()->IsPrivateName(isolate_));
bool is_prototype_map = holder->map(isolate_).is_prototype_map();
RuntimeCallTimerScope stats_scope(
isolate_, is_prototype_map
? RuntimeCallCounterId::kPrototypeObject_DeleteProperty
: RuntimeCallCounterId::kObject_DeleteProperty);
RCS_SCOPE(isolate_,
is_prototype_map
? RuntimeCallCounterId::kPrototypeObject_DeleteProperty
: RuntimeCallCounterId::kObject_DeleteProperty);
PropertyNormalizationMode mode =
is_prototype_map ? KEEP_INOBJECT_PROPERTIES : CLEAR_INOBJECT_PROPERTIES;

View File

@ -10,7 +10,6 @@
#include "src/handles/maybe-handles.h"
#include "src/heap/heap-write-barrier-inl.h"
#include "src/init/bootstrapper.h"
#include "src/logging/counters-inl.h"
#include "src/logging/log.h"
#include "src/objects/arguments-inl.h"
#include "src/objects/descriptor-array.h"
@ -1868,11 +1867,10 @@ Handle<Map> Map::TransitionToDataProperty(Isolate* isolate, Handle<Map> map,
PropertyAttributes attributes,
PropertyConstness constness,
StoreOrigin store_origin) {
RuntimeCallTimerScope stats_scope(
isolate,
map->IsDetached(isolate)
? RuntimeCallCounterId::kPrototypeMap_TransitionToDataProperty
: RuntimeCallCounterId::kMap_TransitionToDataProperty);
RCS_SCOPE(isolate,
map->IsDetached(isolate)
? RuntimeCallCounterId::kPrototypeMap_TransitionToDataProperty
: RuntimeCallCounterId::kMap_TransitionToDataProperty);
DCHECK(name->IsUniqueName());
DCHECK(!map->is_dictionary_map());
@ -1955,7 +1953,7 @@ Handle<Map> Map::TransitionToAccessorProperty(Isolate* isolate, Handle<Map> map,
Handle<Object> getter,
Handle<Object> setter,
PropertyAttributes attributes) {
RuntimeCallTimerScope stats_scope(
RCS_SCOPE(
isolate,
map->IsDetached(isolate)
? RuntimeCallCounterId::kPrototypeMap_TransitionToAccessorProperty
@ -2349,8 +2347,7 @@ bool Map::IsPrototypeChainInvalidated(Map map) {
void Map::SetPrototype(Isolate* isolate, Handle<Map> map,
Handle<HeapObject> prototype,
bool enable_prototype_setup_mode) {
RuntimeCallTimerScope stats_scope(isolate,
RuntimeCallCounterId::kMap_SetPrototype);
RCS_SCOPE(isolate, RuntimeCallCounterId::kMap_SetPrototype);
if (prototype->IsJSObject()) {
Handle<JSObject> prototype_jsobj = Handle<JSObject>::cast(prototype);

View File

@ -41,7 +41,6 @@
#include "src/heap/read-only-heap.h"
#include "src/ic/ic.h"
#include "src/init/bootstrapper.h"
#include "src/logging/counters-inl.h"
#include "src/logging/counters.h"
#include "src/logging/log.h"
#include "src/objects/allocation-site-inl.h"
@ -1321,7 +1320,7 @@ Handle<SharedFunctionInfo> FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
}
bool FunctionTemplateInfo::IsTemplateFor(Map map) {
RuntimeCallTimerScope timer(
RCS_SCOPE(
LocalHeap::Current() == nullptr
? GetIsolate()->counters()->runtime_call_stats()
: LocalIsolate::FromHeap(LocalHeap::Current())->runtime_call_stats(),

View File

@ -4436,12 +4436,11 @@ template <typename Impl>
typename ParserBase<Impl>::ExpressionT
ParserBase<Impl>::ParseArrowFunctionLiteral(
const FormalParametersT& formal_parameters) {
const RuntimeCallCounterId counters[2] = {
RuntimeCallCounterId::kParseArrowFunctionLiteral,
RuntimeCallCounterId::kPreParseArrowFunctionLiteral};
RuntimeCallTimerScope runtime_timer(runtime_call_stats_,
counters[Impl::IsPreParser()],
RuntimeCallStats::kThreadSpecific);
RCS_SCOPE(runtime_call_stats_,
Impl::IsPreParser()
? RuntimeCallCounterId::kPreParseArrowFunctionLiteral
: RuntimeCallCounterId::kParseArrowFunctionLiteral,
RuntimeCallStats::kThreadSpecific);
base::ElapsedTimer timer;
if (V8_UNLIKELY(FLAG_log_function_events)) timer.Start();

View File

@ -522,10 +522,9 @@ void Parser::ParseProgram(Isolate* isolate, Handle<Script> script,
// It's OK to use the Isolate & counters here, since this function is only
// called in the main thread.
DCHECK(parsing_on_main_thread_);
RuntimeCallTimerScope runtime_timer(
runtime_call_stats_, flags().is_eval()
? RuntimeCallCounterId::kParseEval
: RuntimeCallCounterId::kParseProgram);
RCS_SCOPE(runtime_call_stats_, flags().is_eval()
? RuntimeCallCounterId::kParseEval
: RuntimeCallCounterId::kParseProgram);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.ParseProgram");
base::ElapsedTimer timer;
if (V8_UNLIKELY(FLAG_log_function_events)) timer.Start();
@ -704,9 +703,8 @@ void Parser::PostProcessParseResult(Isolate* isolate, ParseInfo* info,
if (isolate) info->ast_value_factory()->Internalize(isolate);
{
RuntimeCallTimerScope runtimeTimer(info->runtime_call_stats(),
RuntimeCallCounterId::kCompileAnalyse,
RuntimeCallStats::kThreadSpecific);
RCS_SCOPE(info->runtime_call_stats(), RuntimeCallCounterId::kCompileAnalyse,
RuntimeCallStats::kThreadSpecific);
if (!Rewriter::Rewrite(info) || !DeclarationScope::Analyze(info)) {
// Null out the literal to indicate that something failed.
info->set_literal(nullptr);
@ -819,8 +817,7 @@ void Parser::ParseFunction(Isolate* isolate, ParseInfo* info,
// It's OK to use the Isolate & counters here, since this function is only
// called in the main thread.
DCHECK(parsing_on_main_thread_);
RuntimeCallTimerScope runtime_timer(runtime_call_stats_,
RuntimeCallCounterId::kParseFunction);
RCS_SCOPE(runtime_call_stats_, RuntimeCallCounterId::kParseFunction);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.ParseFunction");
base::ElapsedTimer timer;
if (V8_UNLIKELY(FLAG_log_function_events)) timer.Start();
@ -2569,9 +2566,8 @@ FunctionLiteral* Parser::ParseFunctionLiteral(
const bool is_lazy_top_level_function = is_lazy && is_top_level;
const bool is_lazy_inner_function = is_lazy && !is_top_level;
RuntimeCallTimerScope runtime_timer(
runtime_call_stats_, RuntimeCallCounterId::kParseFunctionLiteral,
RuntimeCallStats::kThreadSpecific);
RCS_SCOPE(runtime_call_stats_, RuntimeCallCounterId::kParseFunctionLiteral,
RuntimeCallStats::kThreadSpecific);
base::ElapsedTimer timer;
if (V8_UNLIKELY(FLAG_log_function_events)) timer.Start();
@ -2662,14 +2658,14 @@ FunctionLiteral* Parser::ParseFunctionLiteral(
reinterpret_cast<const char*>(function_name->raw_data()),
function_name->byte_length(), function_name->is_one_byte());
}
if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled()) &&
did_preparse_successfully) {
if (runtime_call_stats_) {
runtime_call_stats_->CorrectCurrentCounterId(
RuntimeCallCounterId::kPreParseWithVariableResolution,
RuntimeCallStats::kThreadSpecific);
}
#ifdef V8_RUNTIME_CALL_STATS
if (did_preparse_successfully && runtime_call_stats_ &&
V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled())) {
runtime_call_stats_->CorrectCurrentCounterId(
RuntimeCallCounterId::kPreParseWithVariableResolution,
RuntimeCallStats::kThreadSpecific);
}
#endif // V8_RUNTIME_CALL_STATS
// Validate function name. We can do this only after parsing the function,
// since the function can declare itself strict.
@ -3274,8 +3270,7 @@ void Parser::UpdateStatistics(Isolate* isolate, Handle<Script> script) {
void Parser::ParseOnBackground(ParseInfo* info, int start_position,
int end_position, int function_literal_id) {
RuntimeCallTimerScope runtimeTimer(
runtime_call_stats_, RuntimeCallCounterId::kParseBackgroundProgram);
RCS_SCOPE(runtime_call_stats_, RuntimeCallCounterId::kParseBackgroundProgram);
parsing_on_main_thread_ = false;
DCHECK_NULL(info->literal());

View File

@ -272,10 +272,9 @@ PreParser::Expression PreParser::ParseFunctionLiteral(
DCHECK_NE(FunctionSyntaxKind::kWrapped, function_syntax_kind);
// Function ::
// '(' FormalParameterList? ')' '{' FunctionBody '}'
RuntimeCallTimerScope runtime_timer(
runtime_call_stats_,
RuntimeCallCounterId::kPreParseWithVariableResolution,
RuntimeCallStats::kThreadSpecific);
RCS_SCOPE(runtime_call_stats_,
RuntimeCallCounterId::kPreParseWithVariableResolution,
RuntimeCallStats::kThreadSpecific);
base::ElapsedTimer timer;
if (V8_UNLIKELY(FLAG_log_function_events)) timer.Start();

View File

@ -382,10 +382,9 @@ DECLARATION_NODE_LIST(DEF_VISIT)
// Assumes code has been parsed. Mutates the AST, so the AST should not
// continue to be used in the case of failure.
bool Rewriter::Rewrite(ParseInfo* info) {
RuntimeCallTimerScope runtimeTimer(
info->runtime_call_stats(),
RuntimeCallCounterId::kCompileRewriteReturnResult,
RuntimeCallStats::kThreadSpecific);
RCS_SCOPE(info->runtime_call_stats(),
RuntimeCallCounterId::kCompileRewriteReturnResult,
RuntimeCallStats::kThreadSpecific);
FunctionLiteral* function = info->literal();
DCHECK_NOT_NULL(function);

View File

@ -215,8 +215,7 @@ class ChunkedStream {
const uint8_t* data = nullptr;
size_t length;
{
RuntimeCallTimerScope scope(stats,
RuntimeCallCounterId::kGetMoreDataCallback);
RCS_SCOPE(stats, RuntimeCallCounterId::kGetMoreDataCallback);
length = source_->GetMoreData(&data);
}
ProcessChunk(data, position, length);
@ -721,8 +720,7 @@ void Utf8ExternalStreamingStream::FillBufferFromCurrentChunk() {
}
bool Utf8ExternalStreamingStream::FetchChunk() {
RuntimeCallTimerScope scope(runtime_call_stats(),
RuntimeCallCounterId::kGetMoreDataCallback);
RCS_SCOPE(runtime_call_stats(), RuntimeCallCounterId::kGetMoreDataCallback);
DCHECK_EQ(current_.chunk_no, chunks_.size());
DCHECK(chunks_.empty() || chunks_.back().length != 0);

View File

@ -371,6 +371,7 @@ void ProfilerCodeObserver::CodeEventHandlerInternal(
}
void ProfilerCodeObserver::CreateEntriesForRuntimeCallStats() {
#ifdef V8_RUNTIME_CALL_STATS
RuntimeCallStats* rcs = isolate_->counters()->runtime_call_stats();
for (int i = 0; i < RuntimeCallStats::kNumberOfCounters; ++i) {
RuntimeCallCounter* counter = rcs->GetCounter(i);
@ -379,6 +380,7 @@ void ProfilerCodeObserver::CreateEntriesForRuntimeCallStats() {
"native V8Runtime");
code_map_.AddCode(reinterpret_cast<Address>(counter), entry, 1);
}
#endif // V8_RUNTIME_CALL_STATS
}
void ProfilerCodeObserver::LogBuiltins() {

View File

@ -287,15 +287,18 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs,
frames[i] = reinterpret_cast<void*>(isolate->c_function());
i++;
}
#ifdef V8_RUNTIME_CALL_STATS
i::RuntimeCallTimer* timer =
isolate->counters()->runtime_call_stats()->current_timer();
#endif // V8_RUNTIME_CALL_STATS
for (; !it.done() && i < frames_limit; it.Advance()) {
#ifdef V8_RUNTIME_CALL_STATS
while (timer && reinterpret_cast<i::Address>(timer) < it.frame()->fp() &&
i < frames_limit) {
frames[i++] = reinterpret_cast<void*>(timer->counter());
timer = timer->parent();
}
#endif // V8_RUNTIME_CALL_STATS
if (i == frames_limit) break;
if (it.frame()->is_interpreted()) {

View File

@ -528,6 +528,7 @@ RUNTIME_FUNCTION(Runtime_IncrementUseCounter) {
RUNTIME_FUNCTION(Runtime_GetAndResetRuntimeCallStats) {
HandleScope scope(isolate);
DCHECK_LE(args.length(), 2);
#ifdef V8_RUNTIME_CALL_STATS
// Append any worker thread runtime call stats to the main table before
// printing.
isolate->counters()->worker_thread_runtime_call_stats()->AddToMainTable(
@ -570,6 +571,7 @@ RUNTIME_FUNCTION(Runtime_GetAndResetRuntimeCallStats) {
} else {
std::fflush(f);
}
#endif // V8_RUNTIME_CALL_STATS
return ReadOnlyRoots(isolate).undefined_value();
}

View File

@ -44,8 +44,7 @@ ScriptCompiler::CachedData* CodeSerializer::Serialize(
Isolate* isolate = info->GetIsolate();
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
HistogramTimerScope histogram_timer(isolate->counters()->compile_serialize());
RuntimeCallTimerScope runtimeTimer(isolate,
RuntimeCallCounterId::kCompileSerialize);
RCS_SCOPE(isolate, RuntimeCallCounterId::kCompileSerialize);
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileSerialize");
base::ElapsedTimer timer;

View File

@ -144,8 +144,7 @@ bool Snapshot::VersionIsValid(const v8::StartupData* data) {
bool Snapshot::Initialize(Isolate* isolate) {
if (!isolate->snapshot_available()) return false;
RuntimeCallTimerScope rcs_timer(isolate,
RuntimeCallCounterId::kDeserializeIsolate);
RCS_SCOPE(isolate, RuntimeCallCounterId::kDeserializeIsolate);
base::ElapsedTimer timer;
if (FLAG_profile_deserialization) timer.Start();
@ -173,8 +172,7 @@ MaybeHandle<Context> Snapshot::NewContextFromSnapshot(
Isolate* isolate, Handle<JSGlobalProxy> global_proxy, size_t context_index,
v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer) {
if (!isolate->snapshot_available()) return Handle<Context>();
RuntimeCallTimerScope rcs_timer(isolate,
RuntimeCallCounterId::kDeserializeContext);
RCS_SCOPE(isolate, RuntimeCallCounterId::kDeserializeContext);
base::ElapsedTimer timer;
if (FLAG_profile_deserialization) timer.Start();

View File

@ -20,6 +20,8 @@ v8::TracingController* TraceEventHelper::GetTracingController() {
return v8::internal::V8::GetCurrentPlatform()->GetTracingController();
}
#ifdef V8_RUNTIME_CALL_STATS
void CallStatsScopedTracer::AddEndTraceEvent() {
if (!has_parent_scope_ && p_data_->isolate) {
auto value = v8::tracing::TracedValue::Create();
@ -52,6 +54,8 @@ void CallStatsScopedTracer::Initialize(v8::internal::Isolate* isolate,
v8::internal::tracing::kGlobalScope, v8::internal::tracing::kNoId,
TRACE_EVENT_FLAG_NONE, v8::internal::tracing::kNoId);
}
#endif // defined(V8_RUNTIME_CALL_STATS)
#endif // !defined(V8_USE_PERFETTO)
} // namespace tracing

View File

@ -279,6 +279,7 @@ enum CategoryGroupEnabledFlags {
#define TRACE_EVENT_CALL_STATS_SCOPED(isolate, category_group, name) \
INTERNAL_TRACE_EVENT_CALL_STATS_SCOPED(isolate, category_group, name)
#ifdef V8_RUNTIME_CALL_STATS
#define INTERNAL_TRACE_EVENT_CALL_STATS_SCOPED(isolate, category_group, name) \
INTERNAL_TRACE_EVENT_GET_CATEGORY_INFO(category_group); \
v8::internal::tracing::CallStatsScopedTracer INTERNAL_TRACE_EVENT_UID( \
@ -288,6 +289,9 @@ enum CategoryGroupEnabledFlags {
.Initialize(isolate, INTERNAL_TRACE_EVENT_UID(category_group_enabled), \
name); \
}
#else // V8_RUNTIME_CALL_STATS
#define INTERNAL_TRACE_EVENT_CALL_STATS_SCOPED(isolate, category_group, name)
#endif // V8_RUNTIME_CALL_STATS
namespace v8 {
namespace internal {
@ -588,6 +592,7 @@ class ScopedTracer {
Data data_;
};
#ifdef V8_RUNTIME_CALL_STATS
// Do not use directly.
class CallStatsScopedTracer {
public:
@ -612,6 +617,7 @@ class CallStatsScopedTracer {
Data* p_data_;
Data data_;
};
#endif // defined(V8_RUNTIME_CALL_STATS)
} // namespace tracing
} // namespace internal
@ -619,6 +625,8 @@ class CallStatsScopedTracer {
#else // defined(V8_USE_PERFETTO)
#ifdef V8_RUNTIME_CALL_STATS
#define TRACE_EVENT_CALL_STATS_SCOPED(isolate, category, name) \
struct PERFETTO_UID(ScopedEvent) { \
struct ScopedStats { \
@ -650,6 +658,7 @@ class CallStatsScopedTracer {
{ isolate, 0 } \
}
#endif // defined(V8_RUNTIME_CALL_STATS)
#endif // defined(V8_USE_PERFETTO)
#endif // V8_TRACING_TRACE_EVENT_H_

View File

@ -64,6 +64,7 @@
#include "src/objects/transitions.h"
#include "src/regexp/regexp.h"
#include "src/snapshot/snapshot.h"
#include "src/tracing/tracing-category-observer.h"
#include "src/utils/ostreams.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-tester.h"

View File

@ -387,6 +387,10 @@ v8_source_set("unittests_sources") {
"zone/zone-unittest.cc",
]
if (v8_enable_runtime_call_stats) {
sources += [ "logging/runtime-call-stats-unittest.cc" ]
}
if (v8_enable_webassembly) {
sources += [
"../../test/common/wasm/wasm-macro-gen.h",

View File

@ -8,7 +8,6 @@
#include "src/base/atomic-utils.h"
#include "src/base/platform/time.h"
#include "src/handles/handles-inl.h"
#include "src/logging/counters-inl.h"
#include "src/logging/counters.h"
#include "src/objects/objects-inl.h"
#include "src/tracing/tracing-category-observer.h"
@ -46,104 +45,6 @@ class AggregatedMemoryHistogramTest : public ::testing::Test {
MockHistogram mock_;
};
static base::TimeTicks runtime_call_stats_test_time_ = base::TimeTicks();
// Time source used for the RuntimeCallTimer during tests. We cannot rely on
// the native timer since it's too unpredictable on the build bots.
static base::TimeTicks RuntimeCallStatsTestNow() {
return runtime_call_stats_test_time_;
}
class RuntimeCallStatsTest : public TestWithNativeContext {
public:
RuntimeCallStatsTest() {
TracingFlags::runtime_stats.store(
v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE,
std::memory_order_relaxed);
// We need to set {time_} to a non-zero value since it would otherwise
// cause runtime call timers to think they are uninitialized.
Sleep(1);
stats()->Reset();
}
~RuntimeCallStatsTest() override {
// Disable RuntimeCallStats before tearing down the isolate to prevent
// printing the tests table. Comment the following line for debugging
// purposes.
TracingFlags::runtime_stats.store(0, std::memory_order_relaxed);
}
static void SetUpTestCase() {
TestWithIsolate::SetUpTestCase();
// Use a custom time source to precisly emulate system time.
RuntimeCallTimer::Now = &RuntimeCallStatsTestNow;
}
static void TearDownTestCase() {
TestWithIsolate::TearDownTestCase();
// Restore the original time source.
RuntimeCallTimer::Now = &base::TimeTicks::HighResolutionNow;
}
RuntimeCallStats* stats() {
return isolate()->counters()->runtime_call_stats();
}
RuntimeCallCounterId counter_id() {
return RuntimeCallCounterId::kTestCounter1;
}
RuntimeCallCounterId counter_id2() {
return RuntimeCallCounterId::kTestCounter2;
}
RuntimeCallCounterId counter_id3() {
return RuntimeCallCounterId::kTestCounter3;
}
RuntimeCallCounter* js_counter() {
return stats()->GetCounter(RuntimeCallCounterId::kJS_Execution);
}
RuntimeCallCounter* counter() { return stats()->GetCounter(counter_id()); }
RuntimeCallCounter* counter2() { return stats()->GetCounter(counter_id2()); }
RuntimeCallCounter* counter3() { return stats()->GetCounter(counter_id3()); }
void Sleep(int64_t microseconds) {
base::TimeDelta delta = base::TimeDelta::FromMicroseconds(microseconds);
time_ += delta;
runtime_call_stats_test_time_ =
base::TimeTicks::FromInternalValue(time_.InMicroseconds());
}
private:
base::TimeDelta time_;
};
// Temporarily use the native time to modify the test time.
class V8_NODISCARD ElapsedTimeScope {
public:
explicit ElapsedTimeScope(RuntimeCallStatsTest* test) : test_(test) {
timer_.Start();
}
~ElapsedTimeScope() { test_->Sleep(timer_.Elapsed().InMicroseconds()); }
private:
base::ElapsedTimer timer_;
RuntimeCallStatsTest* test_;
};
// Temporarily use the default time source.
class V8_NODISCARD NativeTimeScope {
public:
NativeTimeScope() {
CHECK_EQ(RuntimeCallTimer::Now, &RuntimeCallStatsTestNow);
RuntimeCallTimer::Now = &base::TimeTicks::HighResolutionNow;
}
~NativeTimeScope() {
CHECK_EQ(RuntimeCallTimer::Now, &base::TimeTicks::HighResolutionNow);
RuntimeCallTimer::Now = &RuntimeCallStatsTestNow;
}
};
class SnapshotNativeCounterTest : public TestWithNativeContextAndCounters {
public:
SnapshotNativeCounterTest() {}
@ -310,523 +211,6 @@ TEST_F(AggregatedMemoryHistogramTest, ManySamples2) {
}
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimer) {
RuntimeCallTimer timer;
Sleep(50);
stats()->Enter(&timer, counter_id());
EXPECT_EQ(counter(), timer.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_TRUE(timer.IsStarted());
EXPECT_EQ(&timer, stats()->current_timer());
Sleep(100);
stats()->Leave(&timer);
Sleep(50);
EXPECT_FALSE(timer.IsStarted());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimerSubTimer) {
RuntimeCallTimer timer;
RuntimeCallTimer timer2;
stats()->Enter(&timer, counter_id());
EXPECT_TRUE(timer.IsStarted());
EXPECT_FALSE(timer2.IsStarted());
EXPECT_EQ(counter(), timer.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_EQ(&timer, stats()->current_timer());
Sleep(50);
stats()->Enter(&timer2, counter_id2());
// timer 1 is paused, while timer 2 is active.
EXPECT_TRUE(timer2.IsStarted());
EXPECT_EQ(counter(), timer.counter());
EXPECT_EQ(counter2(), timer2.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_EQ(&timer, timer2.parent());
EXPECT_EQ(&timer2, stats()->current_timer());
Sleep(100);
stats()->Leave(&timer2);
// The subtimer subtracts its time from the parent timer.
EXPECT_TRUE(timer.IsStarted());
EXPECT_FALSE(timer2.IsStarted());
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
EXPECT_EQ(100, counter2()->time().InMicroseconds());
EXPECT_EQ(&timer, stats()->current_timer());
Sleep(100);
stats()->Leave(&timer);
EXPECT_FALSE(timer.IsStarted());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(150, counter()->time().InMicroseconds());
EXPECT_EQ(100, counter2()->time().InMicroseconds());
EXPECT_EQ(nullptr, stats()->current_timer());
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimerRecursive) {
RuntimeCallTimer timer;
RuntimeCallTimer timer2;
stats()->Enter(&timer, counter_id());
EXPECT_EQ(counter(), timer.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_TRUE(timer.IsStarted());
EXPECT_EQ(&timer, stats()->current_timer());
stats()->Enter(&timer2, counter_id());
EXPECT_EQ(counter(), timer2.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_EQ(&timer, timer2.parent());
EXPECT_TRUE(timer2.IsStarted());
EXPECT_EQ(&timer2, stats()->current_timer());
Sleep(50);
stats()->Leave(&timer2);
EXPECT_EQ(nullptr, timer.parent());
EXPECT_FALSE(timer2.IsStarted());
EXPECT_TRUE(timer.IsStarted());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(50, counter()->time().InMicroseconds());
Sleep(100);
stats()->Leave(&timer);
EXPECT_FALSE(timer.IsStarted());
EXPECT_EQ(2, counter()->count());
EXPECT_EQ(150, counter()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimerScope) {
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(50);
}
Sleep(100);
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(50, counter()->time().InMicroseconds());
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(50);
}
EXPECT_EQ(2, counter()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimerScopeRecursive) {
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(50);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(50);
}
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(50, counter()->time().InMicroseconds());
}
EXPECT_EQ(2, counter()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, RenameTimer) {
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(50);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(100);
}
CHANGE_CURRENT_RUNTIME_COUNTER(stats(),
RuntimeCallCounterId::kTestCounter2);
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
}
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(50, counter2()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, BasicPrintAndSnapshot) {
std::ostringstream out;
stats()->Print(out);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(0, counter3()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
EXPECT_EQ(0, counter3()->time().InMicroseconds());
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(50);
stats()->Print(out);
}
stats()->Print(out);
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(0, counter3()->count());
EXPECT_EQ(50, counter()->time().InMicroseconds());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
EXPECT_EQ(0, counter3()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, PrintAndSnapshot) {
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(100);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
{
RuntimeCallTimerScope scope(stats(), counter_id2());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
Sleep(50);
// This calls Snapshot on the current active timer and sychronizes and
// commits the whole timer stack.
std::ostringstream out;
stats()->Print(out);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(50, counter2()->time().InMicroseconds());
// Calling Print several times shouldn't have a (big) impact on the
// measured times.
stats()->Print(out);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(50, counter2()->time().InMicroseconds());
Sleep(50);
stats()->Print(out);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(100, counter2()->time().InMicroseconds());
Sleep(50);
}
Sleep(50);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(150, counter2()->time().InMicroseconds());
Sleep(50);
}
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(200, counter()->time().InMicroseconds());
EXPECT_EQ(150, counter2()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, NestedScopes) {
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(100);
{
RuntimeCallTimerScope scope(stats(), counter_id2());
Sleep(100);
{
RuntimeCallTimerScope scope(stats(), counter_id3());
Sleep(50);
}
Sleep(50);
{
RuntimeCallTimerScope scope(stats(), counter_id3());
Sleep(50);
}
Sleep(50);
}
Sleep(100);
{
RuntimeCallTimerScope scope(stats(), counter_id2());
Sleep(100);
}
Sleep(50);
}
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(2, counter2()->count());
EXPECT_EQ(2, counter3()->count());
EXPECT_EQ(250, counter()->time().InMicroseconds());
EXPECT_EQ(300, counter2()->time().InMicroseconds());
EXPECT_EQ(100, counter3()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, BasicJavaScript) {
RuntimeCallCounter* counter =
stats()->GetCounter(RuntimeCallCounterId::kJS_Execution);
EXPECT_EQ(0, counter->count());
EXPECT_EQ(0, counter->time().InMicroseconds());
{
NativeTimeScope native_timer_scope;
RunJS("function f() { return 1; };");
}
EXPECT_EQ(1, counter->count());
int64_t time = counter->time().InMicroseconds();
EXPECT_LT(0, time);
{
NativeTimeScope native_timer_scope;
RunJS("f();");
}
EXPECT_EQ(2, counter->count());
EXPECT_LE(time, counter->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, FunctionLengthGetter) {
RuntimeCallCounter* getter_counter =
stats()->GetCounter(RuntimeCallCounterId::kFunctionLengthGetter);
EXPECT_EQ(0, getter_counter->count());
EXPECT_EQ(0, js_counter()->count());
EXPECT_EQ(0, getter_counter->time().InMicroseconds());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
{
NativeTimeScope native_timer_scope;
RunJS("function f(array) { return array.length; };");
}
EXPECT_EQ(0, getter_counter->count());
EXPECT_EQ(1, js_counter()->count());
EXPECT_EQ(0, getter_counter->time().InMicroseconds());
int64_t js_time = js_counter()->time().InMicroseconds();
EXPECT_LT(0, js_time);
{
NativeTimeScope native_timer_scope;
RunJS("f.length;");
}
EXPECT_EQ(1, getter_counter->count());
EXPECT_EQ(2, js_counter()->count());
EXPECT_LE(0, getter_counter->time().InMicroseconds());
EXPECT_LE(js_time, js_counter()->time().InMicroseconds());
{
NativeTimeScope native_timer_scope;
RunJS("for (let i = 0; i < 50; i++) { f.length };");
}
EXPECT_EQ(51, getter_counter->count());
EXPECT_EQ(3, js_counter()->count());
{
NativeTimeScope native_timer_scope;
RunJS("for (let i = 0; i < 1000; i++) { f.length; };");
}
EXPECT_EQ(1051, getter_counter->count());
EXPECT_EQ(4, js_counter()->count());
}
namespace {
static RuntimeCallStatsTest* current_test;
static const int kCustomCallbackTime = 1234;
static void CustomCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
RuntimeCallTimerScope scope(current_test->stats(),
current_test->counter_id2());
current_test->Sleep(kCustomCallbackTime);
}
} // namespace
TEST_F(RuntimeCallStatsTest, CallbackFunction) {
FLAG_allow_natives_syntax = true;
FLAG_incremental_marking = false;
RuntimeCallCounter* callback_counter =
stats()->GetCounter(RuntimeCallCounterId::kFunctionCallback);
current_test = this;
// Set up a function template with a custom callback.
v8::Isolate* isolate = v8_isolate();
v8::HandleScope scope(isolate);
v8::Local<v8::ObjectTemplate> object_template =
v8::ObjectTemplate::New(isolate);
object_template->Set(isolate, "callback",
v8::FunctionTemplate::New(isolate, CustomCallback));
v8::Local<v8::Object> object =
object_template->NewInstance(v8_context()).ToLocalChecked();
SetGlobalProperty("custom_object", object);
EXPECT_EQ(0, js_counter()->count());
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, callback_counter->count());
EXPECT_EQ(0, counter2()->count());
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(100);
RunJS("custom_object.callback();");
}
EXPECT_EQ(1, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, callback_counter->count());
EXPECT_EQ(1, counter2()->count());
// Given that no native timers are used, only the two scopes explitly
// mentioned above will track the time.
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime, counter2()->time().InMicroseconds());
RunJS("for (let i = 0; i < 9; i++) { custom_object.callback(); };");
EXPECT_EQ(2, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(10, callback_counter->count());
EXPECT_EQ(10, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 10, counter2()->time().InMicroseconds());
RunJS("for (let i = 0; i < 4000; i++) { custom_object.callback(); };");
EXPECT_EQ(3, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(4010, callback_counter->count());
EXPECT_EQ(4010, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 4010, counter2()->time().InMicroseconds());
// Check that the FunctionCallback tracing also works properly
// when the `callback` is called from optimized code.
RunJS(
"function wrap(o) { return o.callback(); };\n"
"%PrepareFunctionForOptimization(wrap);\n"
"wrap(custom_object);\n"
"wrap(custom_object);\n"
"%OptimizeFunctionOnNextCall(wrap);\n"
"wrap(custom_object);\n");
EXPECT_EQ(4, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(4013, callback_counter->count());
EXPECT_EQ(4013, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 4013, counter2()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, ApiGetter) {
FLAG_allow_natives_syntax = true;
FLAG_incremental_marking = false;
RuntimeCallCounter* callback_counter =
stats()->GetCounter(RuntimeCallCounterId::kFunctionCallback);
current_test = this;
// Set up a function template with an api accessor.
v8::Isolate* isolate = v8_isolate();
v8::HandleScope scope(isolate);
v8::Local<v8::ObjectTemplate> object_template =
v8::ObjectTemplate::New(isolate);
object_template->SetAccessorProperty(
NewString("apiGetter"),
v8::FunctionTemplate::New(isolate, CustomCallback));
v8::Local<v8::Object> object =
object_template->NewInstance(v8_context()).ToLocalChecked();
SetGlobalProperty("custom_object", object);
// TODO(cbruni): Check api accessor timer (one above the custom callback).
EXPECT_EQ(0, js_counter()->count());
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, callback_counter->count());
EXPECT_EQ(0, counter2()->count());
{
RuntimeCallTimerScope scope(stats(), counter_id());
Sleep(100);
RunJS("custom_object.apiGetter;");
}
EXPECT_EQ(1, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, callback_counter->count());
EXPECT_EQ(1, counter2()->count());
// Given that no native timers are used, only the two scopes explitly
// mentioned above will track the time.
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime, counter2()->time().InMicroseconds());
RunJS("for (let i = 0; i < 9; i++) { custom_object.apiGetter };");
EXPECT_EQ(2, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(10, callback_counter->count());
EXPECT_EQ(10, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 10, counter2()->time().InMicroseconds());
RunJS("for (let i = 0; i < 4000; i++) { custom_object.apiGetter };");
EXPECT_EQ(3, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(4010, callback_counter->count());
EXPECT_EQ(4010, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 4010, counter2()->time().InMicroseconds());
// Check that the FunctionCallback tracing also works properly
// when the `apiGetter` is called from optimized code.
RunJS(
"function wrap(o) { return o.apiGetter; };\n"
"%PrepareFunctionForOptimization(wrap);\n"
"wrap(custom_object);\n"
"wrap(custom_object);\n"
"%OptimizeFunctionOnNextCall(wrap);\n"
"wrap(custom_object);\n");
EXPECT_EQ(4, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(4013, callback_counter->count());
EXPECT_EQ(4013, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 4013, counter2()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, GarbageCollection) {
FLAG_expose_gc = true;
v8::Isolate* isolate = v8_isolate();
RunJS(
"let root = [];"
"for (let i = 0; i < 10; i++) root.push((new Array(1000)).fill(0));"
"root.push((new Array(1000000)).fill(0));"
"((new Array(1000000)).fill(0));");
isolate->RequestGarbageCollectionForTesting(
v8::Isolate::kFullGarbageCollection);
isolate->RequestGarbageCollectionForTesting(
v8::Isolate::kFullGarbageCollection);
}
TEST_F(SnapshotNativeCounterTest, StringAddNative) {
RunJS("let s = 'hello, ' + 'world!'");

View File

@ -0,0 +1,639 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/logging/runtime-call-stats.h"
#include "src/api/api-inl.h"
#include "src/base/atomic-utils.h"
#include "src/base/platform/time.h"
#include "src/handles/handles-inl.h"
#include "src/logging/counters.h"
#include "src/objects/objects-inl.h"
#include "src/tracing/tracing-category-observer.h"
#include "test/unittests/test-utils.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
namespace internal {
namespace {
static base::TimeTicks runtime_call_stats_test_time_ = base::TimeTicks();
// Time source used for the RuntimeCallTimer during tests. We cannot rely on
// the native timer since it's too unpredictable on the build bots.
static base::TimeTicks RuntimeCallStatsTestNow() {
return runtime_call_stats_test_time_;
}
class RuntimeCallStatsTest : public TestWithNativeContext {
public:
RuntimeCallStatsTest() {
TracingFlags::runtime_stats.store(
v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE,
std::memory_order_relaxed);
// We need to set {time_} to a non-zero value since it would otherwise
// cause runtime call timers to think they are uninitialized.
Sleep(1);
stats()->Reset();
}
~RuntimeCallStatsTest() override {
// Disable RuntimeCallStats before tearing down the isolate to prevent
// printing the tests table. Comment the following line for debugging
// purposes.
TracingFlags::runtime_stats.store(0, std::memory_order_relaxed);
}
static void SetUpTestCase() {
TestWithIsolate::SetUpTestCase();
// Use a custom time source to precisly emulate system time.
RuntimeCallTimer::Now = &RuntimeCallStatsTestNow;
}
static void TearDownTestCase() {
TestWithIsolate::TearDownTestCase();
// Restore the original time source.
RuntimeCallTimer::Now = &base::TimeTicks::HighResolutionNow;
}
RuntimeCallStats* stats() {
return isolate()->counters()->runtime_call_stats();
}
RuntimeCallCounterId counter_id() {
return RuntimeCallCounterId::kTestCounter1;
}
RuntimeCallCounterId counter_id2() {
return RuntimeCallCounterId::kTestCounter2;
}
RuntimeCallCounterId counter_id3() {
return RuntimeCallCounterId::kTestCounter3;
}
RuntimeCallCounter* js_counter() {
return stats()->GetCounter(RuntimeCallCounterId::kJS_Execution);
}
RuntimeCallCounter* counter() { return stats()->GetCounter(counter_id()); }
RuntimeCallCounter* counter2() { return stats()->GetCounter(counter_id2()); }
RuntimeCallCounter* counter3() { return stats()->GetCounter(counter_id3()); }
void Sleep(int64_t microseconds) {
base::TimeDelta delta = base::TimeDelta::FromMicroseconds(microseconds);
time_ += delta;
runtime_call_stats_test_time_ =
base::TimeTicks::FromInternalValue(time_.InMicroseconds());
}
private:
base::TimeDelta time_;
};
// Temporarily use the native time to modify the test time.
class V8_NODISCARD ElapsedTimeScope {
public:
explicit ElapsedTimeScope(RuntimeCallStatsTest* test) : test_(test) {
timer_.Start();
}
~ElapsedTimeScope() { test_->Sleep(timer_.Elapsed().InMicroseconds()); }
private:
base::ElapsedTimer timer_;
RuntimeCallStatsTest* test_;
};
// Temporarily use the default time source.
class V8_NODISCARD NativeTimeScope {
public:
NativeTimeScope() {
CHECK_EQ(RuntimeCallTimer::Now, &RuntimeCallStatsTestNow);
RuntimeCallTimer::Now = &base::TimeTicks::HighResolutionNow;
}
~NativeTimeScope() {
CHECK_EQ(RuntimeCallTimer::Now, &base::TimeTicks::HighResolutionNow);
RuntimeCallTimer::Now = &RuntimeCallStatsTestNow;
}
};
} // namespace
TEST_F(RuntimeCallStatsTest, RuntimeCallTimer) {
RuntimeCallTimer timer;
Sleep(50);
stats()->Enter(&timer, counter_id());
EXPECT_EQ(counter(), timer.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_TRUE(timer.IsStarted());
EXPECT_EQ(&timer, stats()->current_timer());
Sleep(100);
stats()->Leave(&timer);
Sleep(50);
EXPECT_FALSE(timer.IsStarted());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimerSubTimer) {
RuntimeCallTimer timer;
RuntimeCallTimer timer2;
stats()->Enter(&timer, counter_id());
EXPECT_TRUE(timer.IsStarted());
EXPECT_FALSE(timer2.IsStarted());
EXPECT_EQ(counter(), timer.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_EQ(&timer, stats()->current_timer());
Sleep(50);
stats()->Enter(&timer2, counter_id2());
// timer 1 is paused, while timer 2 is active.
EXPECT_TRUE(timer2.IsStarted());
EXPECT_EQ(counter(), timer.counter());
EXPECT_EQ(counter2(), timer2.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_EQ(&timer, timer2.parent());
EXPECT_EQ(&timer2, stats()->current_timer());
Sleep(100);
stats()->Leave(&timer2);
// The subtimer subtracts its time from the parent timer.
EXPECT_TRUE(timer.IsStarted());
EXPECT_FALSE(timer2.IsStarted());
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
EXPECT_EQ(100, counter2()->time().InMicroseconds());
EXPECT_EQ(&timer, stats()->current_timer());
Sleep(100);
stats()->Leave(&timer);
EXPECT_FALSE(timer.IsStarted());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(150, counter()->time().InMicroseconds());
EXPECT_EQ(100, counter2()->time().InMicroseconds());
EXPECT_EQ(nullptr, stats()->current_timer());
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimerRecursive) {
RuntimeCallTimer timer;
RuntimeCallTimer timer2;
stats()->Enter(&timer, counter_id());
EXPECT_EQ(counter(), timer.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_TRUE(timer.IsStarted());
EXPECT_EQ(&timer, stats()->current_timer());
stats()->Enter(&timer2, counter_id());
EXPECT_EQ(counter(), timer2.counter());
EXPECT_EQ(nullptr, timer.parent());
EXPECT_EQ(&timer, timer2.parent());
EXPECT_TRUE(timer2.IsStarted());
EXPECT_EQ(&timer2, stats()->current_timer());
Sleep(50);
stats()->Leave(&timer2);
EXPECT_EQ(nullptr, timer.parent());
EXPECT_FALSE(timer2.IsStarted());
EXPECT_TRUE(timer.IsStarted());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(50, counter()->time().InMicroseconds());
Sleep(100);
stats()->Leave(&timer);
EXPECT_FALSE(timer.IsStarted());
EXPECT_EQ(2, counter()->count());
EXPECT_EQ(150, counter()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimerScope) {
{
RCS_SCOPE(stats(), counter_id());
Sleep(50);
}
Sleep(100);
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(50, counter()->time().InMicroseconds());
{
RCS_SCOPE(stats(), counter_id());
Sleep(50);
}
EXPECT_EQ(2, counter()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, RuntimeCallTimerScopeRecursive) {
{
RCS_SCOPE(stats(), counter_id());
Sleep(50);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
{
RCS_SCOPE(stats(), counter_id());
Sleep(50);
}
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(50, counter()->time().InMicroseconds());
}
EXPECT_EQ(2, counter()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, RenameTimer) {
{
RCS_SCOPE(stats(), counter_id());
Sleep(50);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
{
RCS_SCOPE(stats(), counter_id());
Sleep(100);
}
CHANGE_CURRENT_RUNTIME_COUNTER(stats(),
RuntimeCallCounterId::kTestCounter2);
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
}
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(50, counter2()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, BasicPrintAndSnapshot) {
std::ostringstream out;
stats()->Print(out);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(0, counter3()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
EXPECT_EQ(0, counter3()->time().InMicroseconds());
{
RCS_SCOPE(stats(), counter_id());
Sleep(50);
stats()->Print(out);
}
stats()->Print(out);
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(0, counter3()->count());
EXPECT_EQ(50, counter()->time().InMicroseconds());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
EXPECT_EQ(0, counter3()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, PrintAndSnapshot) {
{
RCS_SCOPE(stats(), counter_id());
Sleep(100);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter()->time().InMicroseconds());
{
RCS_SCOPE(stats(), counter_id2());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(0, counter2()->time().InMicroseconds());
Sleep(50);
// This calls Snapshot on the current active timer and sychronizes and
// commits the whole timer stack.
std::ostringstream out;
stats()->Print(out);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(50, counter2()->time().InMicroseconds());
// Calling Print several times shouldn't have a (big) impact on the
// measured times.
stats()->Print(out);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(50, counter2()->time().InMicroseconds());
Sleep(50);
stats()->Print(out);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(100, counter2()->time().InMicroseconds());
Sleep(50);
}
Sleep(50);
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(150, counter2()->time().InMicroseconds());
Sleep(50);
}
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, counter2()->count());
EXPECT_EQ(200, counter()->time().InMicroseconds());
EXPECT_EQ(150, counter2()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, NestedScopes) {
{
RCS_SCOPE(stats(), counter_id());
Sleep(100);
{
RCS_SCOPE(stats(), counter_id2());
Sleep(100);
{
RCS_SCOPE(stats(), counter_id3());
Sleep(50);
}
Sleep(50);
{
RCS_SCOPE(stats(), counter_id3());
Sleep(50);
}
Sleep(50);
}
Sleep(100);
{
RCS_SCOPE(stats(), counter_id2());
Sleep(100);
}
Sleep(50);
}
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(2, counter2()->count());
EXPECT_EQ(2, counter3()->count());
EXPECT_EQ(250, counter()->time().InMicroseconds());
EXPECT_EQ(300, counter2()->time().InMicroseconds());
EXPECT_EQ(100, counter3()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, BasicJavaScript) {
RuntimeCallCounter* counter =
stats()->GetCounter(RuntimeCallCounterId::kJS_Execution);
EXPECT_EQ(0, counter->count());
EXPECT_EQ(0, counter->time().InMicroseconds());
{
NativeTimeScope native_timer_scope;
RunJS("function f() { return 1; };");
}
EXPECT_EQ(1, counter->count());
int64_t time = counter->time().InMicroseconds();
EXPECT_LT(0, time);
{
NativeTimeScope native_timer_scope;
RunJS("f();");
}
EXPECT_EQ(2, counter->count());
EXPECT_LE(time, counter->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, FunctionLengthGetter) {
RuntimeCallCounter* getter_counter =
stats()->GetCounter(RuntimeCallCounterId::kFunctionLengthGetter);
EXPECT_EQ(0, getter_counter->count());
EXPECT_EQ(0, js_counter()->count());
EXPECT_EQ(0, getter_counter->time().InMicroseconds());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
{
NativeTimeScope native_timer_scope;
RunJS("function f(array) { return array.length; };");
}
EXPECT_EQ(0, getter_counter->count());
EXPECT_EQ(1, js_counter()->count());
EXPECT_EQ(0, getter_counter->time().InMicroseconds());
int64_t js_time = js_counter()->time().InMicroseconds();
EXPECT_LT(0, js_time);
{
NativeTimeScope native_timer_scope;
RunJS("f.length;");
}
EXPECT_EQ(1, getter_counter->count());
EXPECT_EQ(2, js_counter()->count());
EXPECT_LE(0, getter_counter->time().InMicroseconds());
EXPECT_LE(js_time, js_counter()->time().InMicroseconds());
{
NativeTimeScope native_timer_scope;
RunJS("for (let i = 0; i < 50; i++) { f.length };");
}
EXPECT_EQ(51, getter_counter->count());
EXPECT_EQ(3, js_counter()->count());
{
NativeTimeScope native_timer_scope;
RunJS("for (let i = 0; i < 1000; i++) { f.length; };");
}
EXPECT_EQ(1051, getter_counter->count());
EXPECT_EQ(4, js_counter()->count());
}
namespace {
static RuntimeCallStatsTest* current_test;
static const int kCustomCallbackTime = 1234;
static void CustomCallback(const v8::FunctionCallbackInfo<v8::Value>& info) {
RCS_SCOPE(current_test->stats(), current_test->counter_id2());
current_test->Sleep(kCustomCallbackTime);
}
} // namespace
TEST_F(RuntimeCallStatsTest, CallbackFunction) {
FLAG_allow_natives_syntax = true;
FLAG_incremental_marking = false;
RuntimeCallCounter* callback_counter =
stats()->GetCounter(RuntimeCallCounterId::kFunctionCallback);
current_test = this;
// Set up a function template with a custom callback.
v8::Isolate* isolate = v8_isolate();
v8::HandleScope scope(isolate);
v8::Local<v8::ObjectTemplate> object_template =
v8::ObjectTemplate::New(isolate);
object_template->Set(isolate, "callback",
v8::FunctionTemplate::New(isolate, CustomCallback));
v8::Local<v8::Object> object =
object_template->NewInstance(v8_context()).ToLocalChecked();
SetGlobalProperty("custom_object", object);
EXPECT_EQ(0, js_counter()->count());
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, callback_counter->count());
EXPECT_EQ(0, counter2()->count());
{
RCS_SCOPE(stats(), counter_id());
Sleep(100);
RunJS("custom_object.callback();");
}
EXPECT_EQ(1, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, callback_counter->count());
EXPECT_EQ(1, counter2()->count());
// Given that no native timers are used, only the two scopes explitly
// mentioned above will track the time.
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime, counter2()->time().InMicroseconds());
RunJS("for (let i = 0; i < 9; i++) { custom_object.callback(); };");
EXPECT_EQ(2, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(10, callback_counter->count());
EXPECT_EQ(10, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 10, counter2()->time().InMicroseconds());
RunJS("for (let i = 0; i < 4000; i++) { custom_object.callback(); };");
EXPECT_EQ(3, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(4010, callback_counter->count());
EXPECT_EQ(4010, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 4010, counter2()->time().InMicroseconds());
// Check that the FunctionCallback tracing also works properly
// when the `callback` is called from optimized code.
RunJS(
"function wrap(o) { return o.callback(); };\n"
"%PrepareFunctionForOptimization(wrap);\n"
"wrap(custom_object);\n"
"wrap(custom_object);\n"
"%OptimizeFunctionOnNextCall(wrap);\n"
"wrap(custom_object);\n");
EXPECT_EQ(4, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(4013, callback_counter->count());
EXPECT_EQ(4013, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 4013, counter2()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, ApiGetter) {
FLAG_allow_natives_syntax = true;
FLAG_incremental_marking = false;
RuntimeCallCounter* callback_counter =
stats()->GetCounter(RuntimeCallCounterId::kFunctionCallback);
current_test = this;
// Set up a function template with an api accessor.
v8::Isolate* isolate = v8_isolate();
v8::HandleScope scope(isolate);
v8::Local<v8::ObjectTemplate> object_template =
v8::ObjectTemplate::New(isolate);
object_template->SetAccessorProperty(
NewString("apiGetter"),
v8::FunctionTemplate::New(isolate, CustomCallback));
v8::Local<v8::Object> object =
object_template->NewInstance(v8_context()).ToLocalChecked();
SetGlobalProperty("custom_object", object);
// TODO(cbruni): Check api accessor timer (one above the custom callback).
EXPECT_EQ(0, js_counter()->count());
EXPECT_EQ(0, counter()->count());
EXPECT_EQ(0, callback_counter->count());
EXPECT_EQ(0, counter2()->count());
{
RCS_SCOPE(stats(), counter_id());
Sleep(100);
RunJS("custom_object.apiGetter;");
}
EXPECT_EQ(1, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(1, callback_counter->count());
EXPECT_EQ(1, counter2()->count());
// Given that no native timers are used, only the two scopes explitly
// mentioned above will track the time.
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime, counter2()->time().InMicroseconds());
RunJS("for (let i = 0; i < 9; i++) { custom_object.apiGetter };");
EXPECT_EQ(2, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(10, callback_counter->count());
EXPECT_EQ(10, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 10, counter2()->time().InMicroseconds());
RunJS("for (let i = 0; i < 4000; i++) { custom_object.apiGetter };");
EXPECT_EQ(3, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(4010, callback_counter->count());
EXPECT_EQ(4010, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 4010, counter2()->time().InMicroseconds());
// Check that the FunctionCallback tracing also works properly
// when the `apiGetter` is called from optimized code.
RunJS(
"function wrap(o) { return o.apiGetter; };\n"
"%PrepareFunctionForOptimization(wrap);\n"
"wrap(custom_object);\n"
"wrap(custom_object);\n"
"%OptimizeFunctionOnNextCall(wrap);\n"
"wrap(custom_object);\n");
EXPECT_EQ(4, js_counter()->count());
EXPECT_EQ(1, counter()->count());
EXPECT_EQ(4013, callback_counter->count());
EXPECT_EQ(4013, counter2()->count());
EXPECT_EQ(0, js_counter()->time().InMicroseconds());
EXPECT_EQ(100, counter()->time().InMicroseconds());
EXPECT_EQ(0, callback_counter->time().InMicroseconds());
EXPECT_EQ(kCustomCallbackTime * 4013, counter2()->time().InMicroseconds());
}
TEST_F(RuntimeCallStatsTest, GarbageCollection) {
FLAG_expose_gc = true;
v8::Isolate* isolate = v8_isolate();
RunJS(
"let root = [];"
"for (let i = 0; i < 10; i++) root.push((new Array(1000)).fill(0));"
"root.push((new Array(1000000)).fill(0));"
"((new Array(1000000)).fill(0));");
isolate->RequestGarbageCollectionForTesting(
v8::Isolate::kFullGarbageCollection);
isolate->RequestGarbageCollectionForTesting(
v8::Isolate::kFullGarbageCollection);
}
} // namespace internal
} // namespace v8