Reland "[sparkplug] Change bytecode offset mapping and introduce iterator."
This is a reland of a8b61ef521
The main reason for the revert was not related to this CL and was fixed
with https://crrev.com/c/2739646
In addition debug output in d8.test.verifySourcePositions was removed
due to TSAN complaints.
Original change's description:
> [sparkplug] Change bytecode offset mapping and introduce iterator.
>
> Previously, we recorded pairs of (bytecode offset, sparkplug pc) to
> create a mapping of bytecode offset <-> sparkplug pc.
> These pairs were only recorded after builtin/runtime calls.
> In preparation for deoptimizing to Sparkplug, we need a more precise
> mapping.
> With this CL, we record positions for every bytecode. Instead of storing
> a pair of (bytecode offset, sparkplug pc), we store only the pc,
> calculating the bytecode offset from the index in the mapping table.
> For easier use an iterator to access the mapping is introduced.
>
> Drive-by: Reduce sampling interval in cpu-profiler cctest to get rid of
flaky failures.
>
> Bug: v8:11420, v8:11429
> Change-Id: I36a9171f43a574eb67880cbca6cf9ff7ab291e60
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2720189
> Reviewed-by: Victor Gomes <victorgomes@chromium.org>
> Reviewed-by: Camillo Bruni <cbruni@chromium.org>
> Auto-Submit: Patrick Thier <pthier@chromium.org>
> Commit-Queue: Patrick Thier <pthier@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#73186}
>
> Change-Id: I9ab4cb60da002ef130f8a21ad10ba69e2826a7b6
Change-Id: I9ab4cb60da002ef130f8a21ad10ba69e2826a7b6
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2745335
Reviewed-by: Victor Gomes <victorgomes@chromium.org>
Reviewed-by: Camillo Bruni <cbruni@chromium.org>
Commit-Queue: Patrick Thier <pthier@chromium.org>
Cr-Commit-Position: refs/heads/master@{#73293}
This commit is contained in:
parent
6cf84e9ab8
commit
2966c8967a
2
BUILD.gn
2
BUILD.gn
@ -2252,6 +2252,7 @@ v8_header_set("v8_internal_headers") {
|
|||||||
"src/baseline/baseline-assembler.h",
|
"src/baseline/baseline-assembler.h",
|
||||||
"src/baseline/baseline-compiler.h",
|
"src/baseline/baseline-compiler.h",
|
||||||
"src/baseline/baseline.h",
|
"src/baseline/baseline.h",
|
||||||
|
"src/baseline/bytecode-offset-iterator.h",
|
||||||
"src/builtins/accessors.h",
|
"src/builtins/accessors.h",
|
||||||
"src/builtins/builtins-constructor.h",
|
"src/builtins/builtins-constructor.h",
|
||||||
"src/builtins/builtins-definitions.h",
|
"src/builtins/builtins-definitions.h",
|
||||||
@ -3507,6 +3508,7 @@ v8_source_set("v8_base_without_compiler") {
|
|||||||
"src/ast/variables.cc",
|
"src/ast/variables.cc",
|
||||||
"src/baseline/baseline-compiler.cc",
|
"src/baseline/baseline-compiler.cc",
|
||||||
"src/baseline/baseline.cc",
|
"src/baseline/baseline.cc",
|
||||||
|
"src/baseline/bytecode-offset-iterator.cc",
|
||||||
"src/builtins/accessors.cc",
|
"src/builtins/accessors.cc",
|
||||||
"src/builtins/builtins-api.cc",
|
"src/builtins/builtins-api.cc",
|
||||||
"src/builtins/builtins-array.cc",
|
"src/builtins/builtins-array.cc",
|
||||||
|
1
src/DEPS
1
src/DEPS
@ -5,6 +5,7 @@ include_rules = [
|
|||||||
"+src/asmjs/asm-js.h",
|
"+src/asmjs/asm-js.h",
|
||||||
"-src/baseline",
|
"-src/baseline",
|
||||||
"+src/baseline/baseline.h",
|
"+src/baseline/baseline.h",
|
||||||
|
"+src/baseline/bytecode-offset-iterator.h",
|
||||||
"-src/compiler",
|
"-src/compiler",
|
||||||
"+src/compiler/pipeline.h",
|
"+src/compiler/pipeline.h",
|
||||||
"+src/compiler/code-assembler.h",
|
"+src/compiler/code-assembler.h",
|
||||||
|
@ -275,8 +275,10 @@ void BaselineCompiler::GenerateCode() {
|
|||||||
RuntimeCallTimerScope runtimeTimer(
|
RuntimeCallTimerScope runtimeTimer(
|
||||||
stats_, RuntimeCallCounterId::kCompileBaselineVisit);
|
stats_, RuntimeCallCounterId::kCompileBaselineVisit);
|
||||||
Prologue();
|
Prologue();
|
||||||
|
AddPosition();
|
||||||
for (; !iterator_.done(); iterator_.Advance()) {
|
for (; !iterator_.done(); iterator_.Advance()) {
|
||||||
VisitSingleBytecode();
|
VisitSingleBytecode();
|
||||||
|
AddPosition();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -382,8 +384,7 @@ void BaselineCompiler::SelectBooleanConstant(
|
|||||||
}
|
}
|
||||||
|
|
||||||
void BaselineCompiler::AddPosition() {
|
void BaselineCompiler::AddPosition() {
|
||||||
bytecode_offset_table_builder_.AddPosition(__ pc_offset(),
|
bytecode_offset_table_builder_.AddPosition(__ pc_offset());
|
||||||
iterator().current_offset());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaselineCompiler::PreVisitSingleBytecode() {
|
void BaselineCompiler::PreVisitSingleBytecode() {
|
||||||
@ -408,7 +409,6 @@ void BaselineCompiler::VisitSingleBytecode() {
|
|||||||
|
|
||||||
// Record positions of exception handlers.
|
// Record positions of exception handlers.
|
||||||
if (iterator().current_offset() == *next_handler_offset_) {
|
if (iterator().current_offset() == *next_handler_offset_) {
|
||||||
AddPosition();
|
|
||||||
__ ExceptionHandler();
|
__ ExceptionHandler();
|
||||||
next_handler_offset_++;
|
next_handler_offset_++;
|
||||||
}
|
}
|
||||||
@ -552,7 +552,6 @@ void BaselineCompiler::CallBuiltin(Builtins::Name builtin, Args... args) {
|
|||||||
__ LoadContext(descriptor.ContextRegister());
|
__ LoadContext(descriptor.ContextRegister());
|
||||||
}
|
}
|
||||||
__ CallBuiltin(builtin);
|
__ CallBuiltin(builtin);
|
||||||
AddPosition();
|
|
||||||
__ RecordComment("]");
|
__ RecordComment("]");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -572,7 +571,6 @@ void BaselineCompiler::CallRuntime(Runtime::FunctionId function, Args... args) {
|
|||||||
__ LoadContext(kContextRegister);
|
__ LoadContext(kContextRegister);
|
||||||
int nargs = __ Push(args...);
|
int nargs = __ Push(args...);
|
||||||
__ CallRuntime(function, nargs);
|
__ CallRuntime(function, nargs);
|
||||||
AddPosition();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns into kInterpreterAccumulatorRegister
|
// Returns into kInterpreterAccumulatorRegister
|
||||||
|
@ -30,17 +30,12 @@ namespace baseline {
|
|||||||
|
|
||||||
class BytecodeOffsetTableBuilder {
|
class BytecodeOffsetTableBuilder {
|
||||||
public:
|
public:
|
||||||
void AddPosition(size_t pc_offset, size_t bytecode_offset) {
|
void AddPosition(size_t pc_offset) {
|
||||||
size_t pc_diff = pc_offset - previous_pc_;
|
size_t pc_diff = pc_offset - previous_pc_;
|
||||||
size_t bytecode_diff = bytecode_offset - previous_bytecode_;
|
|
||||||
DCHECK_GE(pc_diff, 0);
|
DCHECK_GE(pc_diff, 0);
|
||||||
DCHECK_LE(pc_diff, std::numeric_limits<uint32_t>::max());
|
DCHECK_LE(pc_diff, std::numeric_limits<uint32_t>::max());
|
||||||
DCHECK_GE(bytecode_diff, 0);
|
|
||||||
DCHECK_LE(bytecode_diff, std::numeric_limits<uint32_t>::max());
|
|
||||||
base::VLQEncodeUnsigned(&bytes_, static_cast<uint32_t>(pc_diff));
|
base::VLQEncodeUnsigned(&bytes_, static_cast<uint32_t>(pc_diff));
|
||||||
base::VLQEncodeUnsigned(&bytes_, static_cast<uint32_t>(bytecode_diff));
|
|
||||||
previous_pc_ = pc_offset;
|
previous_pc_ = pc_offset;
|
||||||
previous_bytecode_ = bytecode_offset;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename LocalIsolate>
|
template <typename LocalIsolate>
|
||||||
@ -48,7 +43,6 @@ class BytecodeOffsetTableBuilder {
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
size_t previous_pc_ = 0;
|
size_t previous_pc_ = 0;
|
||||||
size_t previous_bytecode_ = 0;
|
|
||||||
std::vector<byte> bytes_;
|
std::vector<byte> bytes_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
40
src/baseline/bytecode-offset-iterator.cc
Normal file
40
src/baseline/bytecode-offset-iterator.cc
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#include "src/baseline/bytecode-offset-iterator.h"
|
||||||
|
|
||||||
|
#include "src/objects/code-inl.h"
|
||||||
|
|
||||||
|
namespace v8 {
|
||||||
|
namespace internal {
|
||||||
|
namespace baseline {
|
||||||
|
|
||||||
|
BytecodeOffsetIterator::BytecodeOffsetIterator(Handle<ByteArray> mapping_table,
|
||||||
|
Handle<BytecodeArray> bytecodes)
|
||||||
|
: mapping_table_(mapping_table),
|
||||||
|
data_start_address_(mapping_table_->GetDataStartAddress()),
|
||||||
|
data_length_(mapping_table_->length()),
|
||||||
|
current_index_(0),
|
||||||
|
bytecode_iterator_(bytecodes),
|
||||||
|
local_heap_(LocalHeap::Current()
|
||||||
|
? LocalHeap::Current()
|
||||||
|
: Isolate::Current()->main_thread_local_heap()) {
|
||||||
|
local_heap_->AddGCEpilogueCallback(UpdatePointersCallback, this);
|
||||||
|
current_pc_start_offset_ = ReadPosition();
|
||||||
|
current_pc_end_offset_ = current_pc_start_offset_ + ReadPosition();
|
||||||
|
}
|
||||||
|
|
||||||
|
BytecodeOffsetIterator::~BytecodeOffsetIterator() {
|
||||||
|
local_heap_->RemoveGCEpilogueCallback(UpdatePointersCallback, this);
|
||||||
|
}
|
||||||
|
|
||||||
|
void BytecodeOffsetIterator::UpdatePointers() {
|
||||||
|
DisallowGarbageCollection no_gc;
|
||||||
|
DCHECK(!mapping_table_.is_null());
|
||||||
|
data_start_address_ = mapping_table_->GetDataStartAddress();
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace baseline
|
||||||
|
} // namespace internal
|
||||||
|
} // namespace v8
|
97
src/baseline/bytecode-offset-iterator.h
Normal file
97
src/baseline/bytecode-offset-iterator.h
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
#ifndef V8_BASELINE_BYTECODE_OFFSET_ITERATOR_H_
|
||||||
|
#define V8_BASELINE_BYTECODE_OFFSET_ITERATOR_H_
|
||||||
|
|
||||||
|
#include "src/base/vlq.h"
|
||||||
|
#include "src/common/globals.h"
|
||||||
|
#include "src/interpreter/bytecode-array-iterator.h"
|
||||||
|
#include "src/objects/code.h"
|
||||||
|
#include "src/objects/fixed-array.h"
|
||||||
|
|
||||||
|
namespace v8 {
|
||||||
|
namespace internal {
|
||||||
|
|
||||||
|
class BytecodeArray;
|
||||||
|
|
||||||
|
namespace baseline {
|
||||||
|
|
||||||
|
class V8_EXPORT_PRIVATE BytecodeOffsetIterator {
|
||||||
|
public:
|
||||||
|
// TODO(pthier): Create un-handlified version.
|
||||||
|
BytecodeOffsetIterator(Handle<ByteArray> mapping_table,
|
||||||
|
Handle<BytecodeArray> bytecodes);
|
||||||
|
~BytecodeOffsetIterator();
|
||||||
|
|
||||||
|
inline void Advance() {
|
||||||
|
DCHECK(!done());
|
||||||
|
current_pc_start_offset_ = current_pc_end_offset_;
|
||||||
|
current_pc_end_offset_ += ReadPosition();
|
||||||
|
bytecode_iterator_.Advance();
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void AdvanceToBytecodeOffset(int bytecode_offset) {
|
||||||
|
while (current_bytecode_offset() < bytecode_offset) {
|
||||||
|
Advance();
|
||||||
|
}
|
||||||
|
DCHECK(bytecode_offset == current_bytecode_offset() ||
|
||||||
|
// If kFunctionEntryBytecodeOffset is passed as bytecode_ofset, we
|
||||||
|
// want to return the PC for the first real bytecode.
|
||||||
|
bytecode_offset == kFunctionEntryBytecodeOffset);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void AdvanceToPCOffset(Address pc_offset) {
|
||||||
|
while (current_pc_end_offset() < pc_offset) {
|
||||||
|
Advance();
|
||||||
|
}
|
||||||
|
// pc could be inside the baseline prologue, wich means we didn't record any
|
||||||
|
// position for it.
|
||||||
|
DCHECK(pc_offset > current_pc_start_offset() ||
|
||||||
|
current_bytecode_offset() == 0);
|
||||||
|
DCHECK_LE(pc_offset, current_pc_end_offset());
|
||||||
|
}
|
||||||
|
|
||||||
|
// For this iterator, done() means that it is not safe to advance().
|
||||||
|
// Values are cached, so reads are always allowed.
|
||||||
|
inline bool done() const { return current_index_ >= data_length_; }
|
||||||
|
|
||||||
|
inline Address current_pc_start_offset() const {
|
||||||
|
return current_pc_start_offset_;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Address current_pc_end_offset() const {
|
||||||
|
return current_pc_end_offset_;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline int current_bytecode_offset() const {
|
||||||
|
return bytecode_iterator_.current_offset();
|
||||||
|
}
|
||||||
|
|
||||||
|
static void UpdatePointersCallback(void* iterator) {
|
||||||
|
reinterpret_cast<BytecodeOffsetIterator*>(iterator)->UpdatePointers();
|
||||||
|
}
|
||||||
|
|
||||||
|
void UpdatePointers();
|
||||||
|
|
||||||
|
private:
|
||||||
|
inline int ReadPosition() {
|
||||||
|
return base::VLQDecodeUnsigned(data_start_address_, ¤t_index_);
|
||||||
|
}
|
||||||
|
|
||||||
|
Handle<ByteArray> mapping_table_;
|
||||||
|
byte* data_start_address_;
|
||||||
|
int data_length_;
|
||||||
|
int current_index_;
|
||||||
|
Address current_pc_start_offset_;
|
||||||
|
Address current_pc_end_offset_;
|
||||||
|
interpreter::BytecodeArrayIterator bytecode_iterator_;
|
||||||
|
LocalHeap* local_heap_;
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace baseline
|
||||||
|
} // namespace internal
|
||||||
|
} // namespace v8
|
||||||
|
|
||||||
|
#endif // V8_BASELINE_BYTECODE_OFFSET_ITERATOR_H_
|
72
src/d8/d8.cc
72
src/d8/d8.cc
@ -1763,6 +1763,71 @@ void Shell::LogGetAndStop(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
|||||||
args.GetReturnValue().Set(result);
|
args.GetReturnValue().Set(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Shell::TestVerifySourcePositions(
|
||||||
|
const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||||
|
Isolate* isolate = args.GetIsolate();
|
||||||
|
if (args.Length() != 1 || !args[0]->IsFunction()) {
|
||||||
|
Throw(isolate, "Expected function as single argument.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
|
||||||
|
HandleScope handle_scope(isolate);
|
||||||
|
i::Handle<i::JSFunction> function =
|
||||||
|
i::Handle<i::JSFunction>::cast(Utils::OpenHandle(*args[0]));
|
||||||
|
if (!function->shared().HasBytecodeArray()) {
|
||||||
|
Throw(isolate, "Function has no BytecodeArray attached.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
i::Handle<i::BytecodeArray> bytecodes =
|
||||||
|
handle(function->shared().GetBytecodeArray(i_isolate), i_isolate);
|
||||||
|
i::interpreter::BytecodeArrayIterator bytecode_iterator(bytecodes);
|
||||||
|
bool has_baseline = function->shared().HasBaselineData();
|
||||||
|
i::Handle<i::ByteArray> bytecode_offsets;
|
||||||
|
std::unique_ptr<i::baseline::BytecodeOffsetIterator> offset_iterator;
|
||||||
|
if (has_baseline) {
|
||||||
|
bytecode_offsets =
|
||||||
|
handle(i::ByteArray::cast(
|
||||||
|
function->shared().GetCode().bytecode_offset_table()),
|
||||||
|
i_isolate);
|
||||||
|
offset_iterator = std::make_unique<i::baseline::BytecodeOffsetIterator>(
|
||||||
|
bytecode_offsets, bytecodes);
|
||||||
|
}
|
||||||
|
while (!bytecode_iterator.done()) {
|
||||||
|
if (has_baseline) {
|
||||||
|
if (offset_iterator->current_bytecode_offset() !=
|
||||||
|
bytecode_iterator.current_offset()) {
|
||||||
|
Throw(isolate, "Baseline bytecode offset mismatch.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Check that we map every address to this bytecode correctly.
|
||||||
|
// The start address is exclusive and the end address inclusive.
|
||||||
|
for (i::Address pc = offset_iterator->current_pc_start_offset() + 1;
|
||||||
|
pc <= offset_iterator->current_pc_end_offset(); ++pc) {
|
||||||
|
i::baseline::BytecodeOffsetIterator pc_lookup(bytecode_offsets,
|
||||||
|
bytecodes);
|
||||||
|
pc_lookup.AdvanceToPCOffset(pc);
|
||||||
|
if (pc_lookup.current_bytecode_offset() !=
|
||||||
|
bytecode_iterator.current_offset()) {
|
||||||
|
Throw(isolate, "Baseline bytecode offset mismatch for PC lookup.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
bytecode_iterator.Advance();
|
||||||
|
if (has_baseline && !bytecode_iterator.done()) {
|
||||||
|
if (offset_iterator->done()) {
|
||||||
|
Throw(isolate, "Missing bytecode(s) in baseline offset mapping.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
offset_iterator->Advance();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (has_baseline && !offset_iterator->done()) {
|
||||||
|
Throw(isolate, "Excess offsets in baseline offset mapping.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// async_hooks.createHook() registers functions to be called for different
|
// async_hooks.createHook() registers functions to be called for different
|
||||||
// lifetime events of each async operation.
|
// lifetime events of each async operation.
|
||||||
void Shell::AsyncHooksCreateHook(
|
void Shell::AsyncHooksCreateHook(
|
||||||
@ -2582,6 +2647,13 @@ Local<ObjectTemplate> Shell::CreateD8Template(Isolate* isolate) {
|
|||||||
|
|
||||||
d8_template->Set(isolate, "log", log_template);
|
d8_template->Set(isolate, "log", log_template);
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
Local<ObjectTemplate> test_template = ObjectTemplate::New(isolate);
|
||||||
|
test_template->Set(
|
||||||
|
isolate, "verifySourcePositions",
|
||||||
|
FunctionTemplate::New(isolate, TestVerifySourcePositions));
|
||||||
|
d8_template->Set(isolate, "test", test_template);
|
||||||
|
}
|
||||||
return d8_template;
|
return d8_template;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -470,6 +470,8 @@ class Shell : public i::AllStatic {
|
|||||||
const PropertyCallbackInfo<void>& info);
|
const PropertyCallbackInfo<void>& info);
|
||||||
|
|
||||||
static void LogGetAndStop(const v8::FunctionCallbackInfo<v8::Value>& args);
|
static void LogGetAndStop(const v8::FunctionCallbackInfo<v8::Value>& args);
|
||||||
|
static void TestVerifySourcePositions(
|
||||||
|
const v8::FunctionCallbackInfo<v8::Value>& args);
|
||||||
|
|
||||||
static void AsyncHooksCreateHook(
|
static void AsyncHooksCreateHook(
|
||||||
const v8::FunctionCallbackInfo<v8::Value>& args);
|
const v8::FunctionCallbackInfo<v8::Value>& args);
|
||||||
|
@ -1794,11 +1794,13 @@ void InterpretedFrame::PatchBytecodeArray(BytecodeArray bytecode_array) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int BaselineFrame::GetBytecodeOffset() const {
|
int BaselineFrame::GetBytecodeOffset() const {
|
||||||
return LookupCode().GetBytecodeOffsetForBaselinePC(this->pc());
|
return LookupCode().GetBytecodeOffsetForBaselinePC(this->pc(),
|
||||||
|
GetBytecodeArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
intptr_t BaselineFrame::GetPCForBytecodeOffset(int bytecode_offset) const {
|
intptr_t BaselineFrame::GetPCForBytecodeOffset(int bytecode_offset) const {
|
||||||
return LookupCode().GetBaselinePCForBytecodeOffset(bytecode_offset);
|
return LookupCode().GetBaselinePCForBytecodeOffset(bytecode_offset,
|
||||||
|
GetBytecodeArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaselineFrame::PatchContext(Context value) {
|
void BaselineFrame::PatchContext(Context value) {
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
#define V8_OBJECTS_CODE_INL_H_
|
#define V8_OBJECTS_CODE_INL_H_
|
||||||
|
|
||||||
#include "src/base/memory.h"
|
#include "src/base/memory.h"
|
||||||
#include "src/base/vlq.h"
|
#include "src/baseline/bytecode-offset-iterator.h"
|
||||||
#include "src/codegen/code-desc.h"
|
#include "src/codegen/code-desc.h"
|
||||||
#include "src/common/assert-scope.h"
|
#include "src/common/assert-scope.h"
|
||||||
#include "src/execution/isolate.h"
|
#include "src/execution/isolate.h"
|
||||||
@ -348,51 +348,37 @@ CodeKind Code::kind() const {
|
|||||||
return KindField::decode(flags);
|
return KindField::decode(flags);
|
||||||
}
|
}
|
||||||
|
|
||||||
int Code::GetBytecodeOffsetForBaselinePC(Address baseline_pc) {
|
int Code::GetBytecodeOffsetForBaselinePC(Address baseline_pc,
|
||||||
|
BytecodeArray bytecodes) {
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
CHECK(!is_baseline_prologue_builtin());
|
CHECK(!is_baseline_prologue_builtin());
|
||||||
if (is_baseline_leave_frame_builtin()) return kFunctionExitBytecodeOffset;
|
if (is_baseline_leave_frame_builtin()) return kFunctionExitBytecodeOffset;
|
||||||
CHECK_EQ(kind(), CodeKind::BASELINE);
|
CHECK_EQ(kind(), CodeKind::BASELINE);
|
||||||
ByteArray data = ByteArray::cast(bytecode_offset_table());
|
// TODO(pthier): We should have an un-handlefied version of
|
||||||
Address lookup_pc = 0;
|
// BytecodeOffsetIterator for uses like here, where no GC can happen.
|
||||||
|
Isolate* isolate = GetIsolate();
|
||||||
|
HandleScope scope(isolate);
|
||||||
|
baseline::BytecodeOffsetIterator offset_iterator(
|
||||||
|
handle(ByteArray::cast(bytecode_offset_table()), isolate),
|
||||||
|
handle(bytecodes, isolate));
|
||||||
Address pc = baseline_pc - InstructionStart();
|
Address pc = baseline_pc - InstructionStart();
|
||||||
int index = 0;
|
offset_iterator.AdvanceToPCOffset(pc);
|
||||||
int offset = 0;
|
return offset_iterator.current_bytecode_offset();
|
||||||
byte* data_start = data.GetDataStartAddress();
|
|
||||||
while (pc > lookup_pc) {
|
|
||||||
lookup_pc += base::VLQDecodeUnsigned(data_start, &index);
|
|
||||||
offset += base::VLQDecodeUnsigned(data_start, &index);
|
|
||||||
}
|
|
||||||
DCHECK_LE(index, data.Size());
|
|
||||||
CHECK_EQ(pc, lookup_pc);
|
|
||||||
return offset;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
uintptr_t Code::GetBaselinePCForBytecodeOffset(int bytecode_offset,
|
uintptr_t Code::GetBaselinePCForBytecodeOffset(int bytecode_offset,
|
||||||
bool precise) {
|
BytecodeArray bytecodes) {
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
CHECK_EQ(kind(), CodeKind::BASELINE);
|
CHECK_EQ(kind(), CodeKind::BASELINE);
|
||||||
ByteArray data = ByteArray::cast(bytecode_offset_table());
|
// TODO(pthier): We should have an un-handlefied version of
|
||||||
intptr_t pc = 0;
|
// BytecodeOffsetIterator for uses like here, where no GC can happen.
|
||||||
int index = 0;
|
Isolate* isolate = GetIsolate();
|
||||||
int offset = 0;
|
HandleScope scope(isolate);
|
||||||
// TODO(v8:11429,cbruni): clean up
|
baseline::BytecodeOffsetIterator offset_iterator(
|
||||||
// Return the offset for the last bytecode that matches
|
handle(ByteArray::cast(bytecode_offset_table()), isolate),
|
||||||
byte* data_start = data.GetDataStartAddress();
|
handle(bytecodes, isolate));
|
||||||
while (offset < bytecode_offset && index < data.length()) {
|
offset_iterator.AdvanceToBytecodeOffset(bytecode_offset);
|
||||||
int delta_pc = base::VLQDecodeUnsigned(data_start, &index);
|
return offset_iterator.current_pc_start_offset();
|
||||||
int delta_offset = base::VLQDecodeUnsigned(data_start, &index);
|
|
||||||
if (!precise && (bytecode_offset < offset + delta_offset)) break;
|
|
||||||
pc += delta_pc;
|
|
||||||
offset += delta_offset;
|
|
||||||
}
|
|
||||||
DCHECK_LE(index, data.length());
|
|
||||||
if (precise) {
|
|
||||||
CHECK_EQ(offset, bytecode_offset);
|
|
||||||
} else {
|
|
||||||
CHECK_LE(offset, bytecode_offset);
|
|
||||||
}
|
|
||||||
return pc;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Code::initialize_flags(CodeKind kind, bool is_turbofanned, int stack_slots,
|
void Code::initialize_flags(CodeKind kind, bool is_turbofanned, int stack_slots,
|
||||||
|
@ -385,8 +385,9 @@ class Code : public HeapObject {
|
|||||||
const CodeDesc& desc);
|
const CodeDesc& desc);
|
||||||
|
|
||||||
inline uintptr_t GetBaselinePCForBytecodeOffset(int bytecode_offset,
|
inline uintptr_t GetBaselinePCForBytecodeOffset(int bytecode_offset,
|
||||||
bool precise = true);
|
BytecodeArray bytecodes);
|
||||||
inline int GetBytecodeOffsetForBaselinePC(Address baseline_pc);
|
inline int GetBytecodeOffsetForBaselinePC(Address baseline_pc,
|
||||||
|
BytecodeArray bytecodes);
|
||||||
|
|
||||||
// Flushes the instruction cache for the executable instructions of this code
|
// Flushes the instruction cache for the executable instructions of this code
|
||||||
// object. Make sure to call this while the code is still writable.
|
// object. Make sure to call this while the code is still writable.
|
||||||
|
@ -116,10 +116,19 @@ void ProfilerListener::CodeCreateEvent(LogEventsAndTags tag,
|
|||||||
|
|
||||||
is_shared_cross_origin = script->origin_options().IsSharedCrossOrigin();
|
is_shared_cross_origin = script->origin_options().IsSharedCrossOrigin();
|
||||||
|
|
||||||
// TODO(v8:11429,cbruni): improve iteration for baseline code
|
|
||||||
bool is_baseline = abstract_code->kind() == CodeKind::BASELINE;
|
bool is_baseline = abstract_code->kind() == CodeKind::BASELINE;
|
||||||
Handle<ByteArray> source_position_table(
|
Handle<ByteArray> source_position_table(
|
||||||
abstract_code->SourcePositionTable(*shared), isolate_);
|
abstract_code->SourcePositionTable(*shared), isolate_);
|
||||||
|
std::unique_ptr<baseline::BytecodeOffsetIterator> baseline_iterator =
|
||||||
|
nullptr;
|
||||||
|
if (is_baseline) {
|
||||||
|
Handle<BytecodeArray> bytecodes(shared->GetBytecodeArray(isolate_),
|
||||||
|
isolate_);
|
||||||
|
Handle<ByteArray> bytecode_offsets(
|
||||||
|
abstract_code->GetCode().bytecode_offset_table(), isolate_);
|
||||||
|
baseline_iterator = std::make_unique<baseline::BytecodeOffsetIterator>(
|
||||||
|
bytecode_offsets, bytecodes);
|
||||||
|
}
|
||||||
// Add each position to the source position table and store inlining stacks
|
// Add each position to the source position table and store inlining stacks
|
||||||
// for inline positions. We store almost the same information in the
|
// for inline positions. We store almost the same information in the
|
||||||
// profiler as is stored on the code object, except that we transform source
|
// profiler as is stored on the code object, except that we transform source
|
||||||
@ -132,10 +141,9 @@ void ProfilerListener::CodeCreateEvent(LogEventsAndTags tag,
|
|||||||
int code_offset = it.code_offset();
|
int code_offset = it.code_offset();
|
||||||
if (is_baseline) {
|
if (is_baseline) {
|
||||||
// Use the bytecode offset to calculate pc offset for baseline code.
|
// Use the bytecode offset to calculate pc offset for baseline code.
|
||||||
// TODO(v8:11429,cbruni): Speed this up.
|
baseline_iterator->AdvanceToBytecodeOffset(code_offset);
|
||||||
code_offset = static_cast<int>(
|
code_offset =
|
||||||
abstract_code->GetCode().GetBaselinePCForBytecodeOffset(code_offset,
|
static_cast<int>(baseline_iterator->current_pc_start_offset());
|
||||||
false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (inlining_id == SourcePosition::kNotInlined) {
|
if (inlining_id == SourcePosition::kNotInlined) {
|
||||||
|
@ -558,7 +558,7 @@ v8::CpuProfile* ProfilerHelper::Run(v8::Local<v8::Function> function,
|
|||||||
ProfilingMode mode, unsigned max_samples) {
|
ProfilingMode mode, unsigned max_samples) {
|
||||||
v8::Local<v8::String> profile_name = v8_str("my_profile");
|
v8::Local<v8::String> profile_name = v8_str("my_profile");
|
||||||
|
|
||||||
profiler_->SetSamplingInterval(100);
|
profiler_->SetSamplingInterval(50);
|
||||||
profiler_->StartProfiling(profile_name, {mode, max_samples, 0});
|
profiler_->StartProfiling(profile_name, {mode, max_samples, 0});
|
||||||
|
|
||||||
v8::internal::CpuProfiler* iprofiler =
|
v8::internal::CpuProfiler* iprofiler =
|
||||||
|
31
test/mjsunit/baseline/verify-bytecode-offsets.js
Normal file
31
test/mjsunit/baseline/verify-bytecode-offsets.js
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
// Copyright 2021 the V8 project authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
// Flags: --always-sparkplug
|
||||||
|
|
||||||
|
// This test mainly exists to make ClusterFuzz aware of
|
||||||
|
// d8.test.verifySourcePositions.
|
||||||
|
|
||||||
|
globalValue = false;
|
||||||
|
|
||||||
|
function foo(param1, ...param2) {
|
||||||
|
try {
|
||||||
|
for (let key in param1) { param2.push(key); }
|
||||||
|
for (let a of param1) { param2.push(a); }
|
||||||
|
let [a, b] = param2;
|
||||||
|
let copy = [{literal:1}, {}, [], [1], 1, ...param2];
|
||||||
|
return a + b + copy.length;
|
||||||
|
} catch (e) {
|
||||||
|
return e.toString().match(/[a-zA-Z]+/g);
|
||||||
|
} finally {
|
||||||
|
globalValue = new String(23);
|
||||||
|
}
|
||||||
|
return Math.min(Math.random(), 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
var obj = [...Array(10).keys()];
|
||||||
|
obj.foo = 'bar';
|
||||||
|
foo(obj, obj);
|
||||||
|
|
||||||
|
d8.test.verifySourcePositions(foo);
|
Loading…
Reference in New Issue
Block a user