[sparkplug] Rename bytecode tracing functions

Change the interpreter tracing functions to be generic unoptimized code
tracing functions. The type of the code is now inferred from the frame,
rather than passed in.

Also expands the set of gn flags to explicitly enable unoptimized
tracing, with a helper gn flags for enabling it for Ignition and for
baseline (both just set unoptimized tracing to be enabled for now, we
could split this up in the future though), and V8 flags separate tracing
Ignition and tracing baseline.

Bug: v8:11420, v8:11429
Change-Id: I040c2628fe5744dcb38ef8623df3e34f9c86a5b8
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2692817
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#72767}
This commit is contained in:
Leszek Swirski 2021-02-16 09:33:34 +01:00 committed by Commit Bot
parent f5cd26c8bf
commit 0067fbb1ac
9 changed files with 90 additions and 57 deletions

View File

@ -124,8 +124,10 @@ declare_args() {
# Sets -dV8_ENABLE_CHECKS.
v8_enable_v8_checks = ""
# Sets -dV8_TRACE_IGNITION.
# Sets -dV8_TRACE_UNOPTIMIZED.
v8_enable_trace_unoptimized = false
v8_enable_trace_ignition = false
v8_enable_trace_baseline = false
# Sets -dV8_TRACE_FEEDBACK_UPDATES.
v8_enable_trace_feedback_updates = false
@ -354,6 +356,14 @@ assert(!v8_enable_concurrent_marking || v8_enable_atomic_object_field_writes,
"Concurrent marking requires atomic object field writes.")
assert(!v8_enable_concurrent_marking || v8_enable_atomic_marking_state,
"Concurrent marking requires atomic marking state.")
if (v8_enable_trace_unoptimized == "") {
v8_enable_trace_unoptimized =
v8_enable_trace_ignition || v8_enable_trace_baseline
}
assert(!v8_enable_trace_ignition || v8_enable_trace_unoptimized,
"Ignition tracing requires unoptimized tracing to be enabled.")
assert(!v8_enable_trace_baseline || v8_enable_trace_unoptimized,
"Baseline tracing requires unoptimized tracing to be enabled.")
# Toggle pointer compression for correctness fuzzing when building the
# clang_x64_pointer_compression toolchain. We'll correctness-compare the
@ -680,8 +690,8 @@ config("features") {
if (v8_enable_trace_maps) {
defines += [ "V8_TRACE_MAPS" ]
}
if (v8_enable_trace_ignition) {
defines += [ "V8_TRACE_IGNITION" ]
if (v8_enable_trace_unoptimized) {
defines += [ "V8_TRACE_UNOPTIMIZED" ]
}
if (v8_enable_trace_feedback_updates) {
defines += [ "V8_TRACE_FEEDBACK_UPDATES" ]
@ -3415,7 +3425,6 @@ v8_source_set("v8_base_without_compiler") {
"src/runtime/runtime-futex.cc",
"src/runtime/runtime-generator.cc",
"src/runtime/runtime-internal.cc",
"src/runtime/runtime-interpreter.cc",
"src/runtime/runtime-intl.cc",
"src/runtime/runtime-literals.cc",
"src/runtime/runtime-module.cc",
@ -3429,6 +3438,7 @@ v8_source_set("v8_base_without_compiler") {
"src/runtime/runtime-strings.cc",
"src/runtime/runtime-symbol.cc",
"src/runtime/runtime-test.cc",
"src/runtime/runtime-trace.cc",
"src/runtime/runtime-typedarray.cc",
"src/runtime/runtime-utils.h",
"src/runtime/runtime-wasm.cc",

View File

@ -506,8 +506,8 @@ void BaselineCompiler::VisitSingleBytecode() {
VerifyFrame();
#ifdef V8_TRACE_IGNITION
TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
#ifdef V8_TRACE_UNOPTIMIZED
TraceBytecode(Runtime::kTraceUnoptimizedBytecodeEntry);
#endif
switch (accessor().current_bytecode()) {
@ -520,8 +520,8 @@ void BaselineCompiler::VisitSingleBytecode() {
}
__ RecordComment("]");
#ifdef V8_TRACE_IGNITION
TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
#ifdef V8_TRACE_UNOPTIMIZED
TraceBytecode(Runtime::kTraceUnoptimizedBytecodeExit);
#endif
}
@ -551,18 +551,18 @@ void BaselineCompiler::VerifyFrame() {
}
}
#ifdef V8_TRACE_IGNITION
#ifdef V8_TRACE_UNOPTIMIZED
void BaselineCompiler::TraceBytecode(Runtime::FunctionId function_id) {
if (!FLAG_trace_ignition) return;
if (!FLAG_trace_baseline) return;
__ RecordComment(function_id == Runtime::kInterpreterTraceBytecodeEntry
__ RecordComment(function_id == Runtime::kTraceUnoptimizedBytecodeEntry
? "[ Trace bytecode entry"
: "[ Trace bytecode exit");
SaveAccumulatorScope accumulator_scope(&basm_);
CallRuntime(function_id, bytecode_,
Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag +
accessor().current_offset()),
kInterpreterAccumulatorRegister, RootIndex::kTrueValue);
kInterpreterAccumulatorRegister);
__ RecordComment("]");
}
#endif

View File

@ -315,7 +315,7 @@ class BaselineCompiler {
void BuildCall(ConvertReceiverMode mode, uint32_t slot, uint32_t arg_count,
Args... args);
#ifdef V8_TRACE_IGNITION
#ifdef V8_TRACE_UNOPTIMIZED
void TraceBytecode(Runtime::FunctionId function_id);
#endif

View File

@ -9755,7 +9755,7 @@ void CodeStubAssembler::ReportFeedbackUpdate(
#ifdef V8_TRACE_FEEDBACK_UPDATES
// Trace the update.
CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
CallRuntime(Runtime::kTraceUpdateFeedback, NoContextConstant(),
LoadFromParentFrame(StandardFrameConstants::kFunctionOffset),
SmiTag(Signed(slot_id)), StringConstant(reason));
#endif // V8_TRACE_FEEDBACK_UPDATES

View File

@ -122,7 +122,7 @@ STATIC_ASSERT(V8_DEFAULT_STACK_SIZE_KB* KB +
#endif
// Some types of tracing require the SFI to store a unique ID.
#if defined(V8_TRACE_MAPS) || defined(V8_TRACE_IGNITION)
#if defined(V8_TRACE_MAPS) || defined(V8_TRACE_UNOPTIMIZED)
#define V8_SFI_HAS_UNIQUE_ID true
#else
#define V8_SFI_HAS_UNIQUE_ID false

View File

@ -533,9 +533,15 @@ DEFINE_BOOL(stress_lazy_source_positions, false,
"collect lazy source positions immediately after lazy compile")
DEFINE_STRING(print_bytecode_filter, "*",
"filter for selecting which functions to print bytecode")
#ifdef V8_TRACE_IGNITION
#ifdef V8_TRACE_UNOPTIMIZED
DEFINE_BOOL(trace_unoptimized, false,
"trace the bytecodes executed by all unoptimized execution")
DEFINE_BOOL(trace_ignition, false,
"trace the bytecodes executed by the ignition interpreter")
DEFINE_BOOL(trace_baseline, false,
"trace the bytecodes executed by the baseline code")
DEFINE_WEAK_IMPLICATION(trace_unoptimized, trace_ignition)
DEFINE_WEAK_IMPLICATION(trace_unoptimized, trace_baseline)
#endif
#ifdef V8_TRACE_FEEDBACK_UPDATES
DEFINE_BOOL(

View File

@ -46,8 +46,8 @@ InterpreterAssembler::InterpreterAssembler(CodeAssemblerState* state,
made_call_(false),
reloaded_frame_ptr_(false),
bytecode_array_valid_(true) {
#ifdef V8_TRACE_IGNITION
TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
#ifdef V8_TRACE_UNOPTIMIZED
TraceBytecode(Runtime::kTraceUnoptimizedBytecodeEntry);
#endif
RegisterCallGenerationCallbacks([this] { CallPrologue(); },
[this] { CallEpilogue(); });
@ -1048,8 +1048,8 @@ TNode<IntPtrT> InterpreterAssembler::Advance(int delta) {
TNode<IntPtrT> InterpreterAssembler::Advance(TNode<IntPtrT> delta,
bool backward) {
#ifdef V8_TRACE_IGNITION
TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
#ifdef V8_TRACE_UNOPTIMIZED
TraceBytecode(Runtime::kTraceUnoptimizedBytecodeExit);
#endif
TNode<IntPtrT> next_offset = backward ? IntPtrSub(BytecodeOffset(), delta)
: IntPtrAdd(BytecodeOffset(), delta);
@ -1134,8 +1134,8 @@ void InterpreterAssembler::InlineStar() {
bytecode_ = Bytecode::kStar;
implicit_register_use_ = ImplicitRegisterUse::kNone;
#ifdef V8_TRACE_IGNITION
TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
#ifdef V8_TRACE_UNOPTIMIZED
TraceBytecode(Runtime::kTraceUnoptimizedBytecodeEntry);
#endif
StoreRegister(GetAccumulator(),
BytecodeOperandReg(0, LoadSensitivity::kSafe));
@ -1288,8 +1288,7 @@ void InterpreterAssembler::MaybeDropFrames(TNode<Context> context) {
void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
SmiTag(BytecodeOffset()), GetAccumulatorUnchecked(),
FalseConstant());
SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
}
void InterpreterAssembler::TraceBytecodeDispatch(TNode<WordT> target_bytecode) {

View File

@ -21,7 +21,7 @@
namespace v8 {
namespace internal {
#ifdef V8_TRACE_IGNITION
#ifdef V8_TRACE_UNOPTIMIZED
namespace {
@ -40,9 +40,9 @@ void AdvanceToOffsetForTracing(
interpreter::OperandScale::kSingle));
}
void PrintRegisters(Isolate* isolate, std::ostream& os, bool is_input,
interpreter::BytecodeArrayIterator&
bytecode_iterator, // NOLINT(runtime/references)
void PrintRegisters(UnoptimizedFrame* frame, std::ostream& os, bool is_input,
interpreter::BytecodeArrayAccessor&
bytecode_accessor, // NOLINT(runtime/references)
Handle<Object> accumulator) {
static const char kAccumulator[] = "accumulator";
static const int kRegFieldWidth = static_cast<int>(sizeof(kAccumulator) - 1);
@ -54,7 +54,7 @@ void PrintRegisters(Isolate* isolate, std::ostream& os, bool is_input,
os << (is_input ? kInputColourCode : kOutputColourCode);
}
interpreter::Bytecode bytecode = bytecode_iterator.current_bytecode();
interpreter::Bytecode bytecode = bytecode_accessor.current_bytecode();
// Print accumulator.
if ((is_input && interpreter::Bytecodes::ReadsAccumulator(bytecode)) ||
@ -65,8 +65,6 @@ void PrintRegisters(Isolate* isolate, std::ostream& os, bool is_input,
}
// Print the registers.
JavaScriptFrameIterator frame_iterator(isolate);
UnoptimizedFrame* frame = UnoptimizedFrame::cast(frame_iterator.frame());
int operand_count = interpreter::Bytecodes::NumberOfOperands(bytecode);
for (int operand_index = 0; operand_index < operand_count; operand_index++) {
interpreter::OperandType operand_type =
@ -77,14 +75,14 @@ void PrintRegisters(Isolate* isolate, std::ostream& os, bool is_input,
: interpreter::Bytecodes::IsRegisterOutputOperandType(operand_type);
if (should_print) {
interpreter::Register first_reg =
bytecode_iterator.GetRegisterOperand(operand_index);
int range = bytecode_iterator.GetRegisterOperandRange(operand_index);
bytecode_accessor.GetRegisterOperand(operand_index);
int range = bytecode_accessor.GetRegisterOperandRange(operand_index);
for (int reg_index = first_reg.index();
reg_index < first_reg.index() + range; reg_index++) {
Object reg_object = frame->ReadInterpreterRegister(reg_index);
os << " [ " << std::setw(kRegFieldWidth)
<< interpreter::Register(reg_index).ToString(
bytecode_iterator.bytecode_array()->parameter_count())
bytecode_accessor.bytecode_array()->parameter_count())
<< kArrowDirection;
reg_object.ShortPrint(os);
os << " ]" << std::endl;
@ -98,10 +96,19 @@ void PrintRegisters(Isolate* isolate, std::ostream& os, bool is_input,
} // namespace
// TODO(v8:11429): Consider either renaming to not just be "Interpreter", or
// copying for Baseline.
RUNTIME_FUNCTION(Runtime_InterpreterTraceBytecodeEntry) {
if (!FLAG_trace_ignition) {
RUNTIME_FUNCTION(Runtime_TraceUnoptimizedBytecodeEntry) {
if (!FLAG_trace_ignition && !FLAG_trace_baseline) {
return ReadOnlyRoots(isolate).undefined_value();
}
JavaScriptFrameIterator frame_iterator(isolate);
UnoptimizedFrame* frame =
reinterpret_cast<UnoptimizedFrame*>(frame_iterator.frame());
if (frame->is_interpreter() && !FLAG_trace_ignition) {
return ReadOnlyRoots(isolate).undefined_value();
}
if (frame->is_baseline() && !FLAG_trace_baseline) {
return ReadOnlyRoots(isolate).undefined_value();
}
@ -110,7 +117,6 @@ RUNTIME_FUNCTION(Runtime_InterpreterTraceBytecodeEntry) {
CONVERT_ARG_HANDLE_CHECKED(BytecodeArray, bytecode_array, 0);
CONVERT_SMI_ARG_CHECKED(bytecode_offset, 1);
CONVERT_ARG_HANDLE_CHECKED(Object, accumulator, 2);
CONVERT_ARG_HANDLE_CHECKED(Object, is_baseline, 3);
int offset = bytecode_offset - BytecodeArray::kHeaderSize + kHeapObjectTag;
interpreter::BytecodeArrayIterator bytecode_iterator(bytecode_array);
@ -122,7 +128,8 @@ RUNTIME_FUNCTION(Runtime_InterpreterTraceBytecodeEntry) {
const uint8_t* base_address = reinterpret_cast<const uint8_t*>(
bytecode_array->GetFirstBytecodeAddress());
const uint8_t* bytecode_address = base_address + offset;
if (is_baseline->BooleanValue(isolate)) {
if (frame->is_baseline()) {
os << "B-> ";
} else {
os << " -> ";
@ -133,15 +140,26 @@ RUNTIME_FUNCTION(Runtime_InterpreterTraceBytecodeEntry) {
bytecode_array->parameter_count());
os << std::endl;
// Print all input registers and accumulator.
PrintRegisters(isolate, os, true, bytecode_iterator, accumulator);
PrintRegisters(frame, os, true, bytecode_iterator, accumulator);
os << std::flush;
}
return ReadOnlyRoots(isolate).undefined_value();
}
RUNTIME_FUNCTION(Runtime_InterpreterTraceBytecodeExit) {
if (!FLAG_trace_ignition) {
RUNTIME_FUNCTION(Runtime_TraceUnoptimizedBytecodeExit) {
if (!FLAG_trace_ignition && !FLAG_trace_baseline) {
return ReadOnlyRoots(isolate).undefined_value();
}
JavaScriptFrameIterator frame_iterator(isolate);
UnoptimizedFrame* frame =
reinterpret_cast<UnoptimizedFrame*>(frame_iterator.frame());
if (frame->is_interpreter() && !FLAG_trace_ignition) {
return ReadOnlyRoots(isolate).undefined_value();
}
if (frame->is_baseline() && !FLAG_trace_baseline) {
return ReadOnlyRoots(isolate).undefined_value();
}
@ -161,8 +179,9 @@ RUNTIME_FUNCTION(Runtime_InterpreterTraceBytecodeExit) {
interpreter::OperandScale::kSingle ||
offset > bytecode_iterator.current_offset()) {
StdoutStream os;
// Print all output registers and accumulator.
PrintRegisters(isolate, os, false, bytecode_iterator, accumulator);
PrintRegisters(frame, os, false, bytecode_iterator, accumulator);
os << std::flush;
}
return ReadOnlyRoots(isolate).undefined_value();
@ -172,7 +191,7 @@ RUNTIME_FUNCTION(Runtime_InterpreterTraceBytecodeExit) {
#ifdef V8_TRACE_FEEDBACK_UPDATES
RUNTIME_FUNCTION(Runtime_InterpreterTraceUpdateFeedback) {
RUNTIME_FUNCTION(Runtime_TraceUpdateFeedback) {
if (!FLAG_trace_feedback_updates) {
return ReadOnlyRoots(isolate).undefined_value();
}

View File

@ -152,24 +152,23 @@ namespace internal {
F(ForInEnumerate, 1, 1) \
F(ForInHasProperty, 2, 1)
#ifdef V8_TRACE_IGNITION
#define FOR_EACH_INTRINSIC_INTERPRETER_TRACE(F, I) \
F(InterpreterTraceBytecodeEntry, 4, 1) \
F(InterpreterTraceBytecodeExit, 4, 1)
#ifdef V8_TRACE_UNOPTIMIZED
#define FOR_EACH_INTRINSIC_TRACE_UNOPTIMIZED(F, I) \
F(TraceUnoptimizedBytecodeEntry, 3, 1) \
F(TraceUnoptimizedBytecodeExit, 3, 1)
#else
#define FOR_EACH_INTRINSIC_INTERPRETER_TRACE(F, I)
#define FOR_EACH_INTRINSIC_TRACE_UNOPTIMIZED(F, I)
#endif
#ifdef V8_TRACE_FEEDBACK_UPDATES
#define FOR_EACH_INTRINSIC_INTERPRETER_TRACE_FEEDBACK(F, I) \
F(InterpreterTraceUpdateFeedback, 3, 1)
#define FOR_EACH_INTRINSIC_TRACE_FEEDBACK(F, I) F(TraceUpdateFeedback, 3, 1)
#else
#define FOR_EACH_INTRINSIC_INTERPRETER_TRACE_FEEDBACK(F, I)
#define FOR_EACH_INTRINSIC_TRACE_FEEDBACK(F, I)
#endif
#define FOR_EACH_INTRINSIC_INTERPRETER(F, I) \
FOR_EACH_INTRINSIC_INTERPRETER_TRACE(F, I) \
FOR_EACH_INTRINSIC_INTERPRETER_TRACE_FEEDBACK(F, I)
#define FOR_EACH_INTRINSIC_TRACE(F, I) \
FOR_EACH_INTRINSIC_TRACE_UNOPTIMIZED(F, I) \
FOR_EACH_INTRINSIC_TRACE_FEEDBACK(F, I)
#define FOR_EACH_INTRINSIC_FUNCTION(F, I) \
I(Call, -1 /* >= 2 */, 1) \
@ -648,7 +647,7 @@ namespace internal {
FOR_EACH_INTRINSIC_GENERATOR(F, I) \
FOR_EACH_INTRINSIC_IC(F, I) \
FOR_EACH_INTRINSIC_INTERNAL(F, I) \
FOR_EACH_INTRINSIC_INTERPRETER(F, I) \
FOR_EACH_INTRINSIC_TRACE(F, I) \
FOR_EACH_INTRINSIC_INTL(F, I) \
FOR_EACH_INTRINSIC_LITERALS(F, I) \
FOR_EACH_INTRINSIC_MODULE(F, I) \