[compiler] Skip creating unneeded objects for lazy source positions

This changes Compiler::CollectSourcePositions to skip finalization of
the BytecodeArray, constant table, handler table, ScopeInfos as well as
internalization of Ast values since only the source position table is
used and the others will be collected soon after by the GC.

It will also now avoid recompiling inner functions that would otherwise
be eagerly compiled.

BytecodeArrayWriter::ToBytecodeArray has been changed to never populate
the source_position_table.

Bug: v8:8510
Change-Id: I2db2f2da6b48fde11f17a20d017c1a54c0a34fc2
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1763538
Commit-Queue: Dan Elphick <delphick@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#63365}
This commit is contained in:
Dan Elphick 2019-08-23 13:11:12 +01:00 committed by Commit Bot
parent 450128c79e
commit 735f3a689b
12 changed files with 149 additions and 77 deletions

View File

@ -535,20 +535,16 @@ std::unique_ptr<UnoptimizedCompilationJob> GenerateUnoptimizedCode(
DisallowHeapAccess no_heap_access;
DCHECK(inner_function_jobs->empty());
if (!Compiler::Analyze(parse_info)) {
return std::unique_ptr<UnoptimizedCompilationJob>();
std::unique_ptr<UnoptimizedCompilationJob> job;
if (Compiler::Analyze(parse_info)) {
job = ExecuteUnoptimizedCompileJobs(parse_info, parse_info->literal(),
allocator, inner_function_jobs);
}
// Prepare and execute compilation of the outer-most function.
std::unique_ptr<UnoptimizedCompilationJob> outer_function_job(
ExecuteUnoptimizedCompileJobs(parse_info, parse_info->literal(),
allocator, inner_function_jobs));
if (!outer_function_job) return std::unique_ptr<UnoptimizedCompilationJob>();
// Character stream shouldn't be used again.
parse_info->ResetCharacterStream();
return outer_function_job;
return job;
}
MaybeHandle<SharedFunctionInfo> GenerateUnoptimizedCodeForToplevel(
@ -1234,51 +1230,41 @@ bool Compiler::CollectSourcePositions(Isolate* isolate,
isolate, &parse_info, Compiler::ClearExceptionFlag::CLEAR_EXCEPTION);
}
// Character stream shouldn't be used again.
parse_info.ResetCharacterStream();
// Generate the unoptimized bytecode.
// TODO(v8:8510): Consider forcing preparsing of inner functions to avoid
// wasting time fully parsing them when they won't ever be used.
UnoptimizedCompilationJobList inner_function_jobs;
std::unique_ptr<UnoptimizedCompilationJob> outer_function_job(
GenerateUnoptimizedCode(&parse_info, isolate->allocator(),
&inner_function_jobs));
if (!outer_function_job) {
std::unique_ptr<UnoptimizedCompilationJob> job;
{
if (!Compiler::Analyze(&parse_info)) {
// Recompiling failed probably as a result of stack exhaustion.
bytecode->SetSourcePositionsFailedToCollect();
return FailWithPendingException(
isolate, &parse_info, Compiler::ClearExceptionFlag::CLEAR_EXCEPTION);
}
DCHECK(outer_function_job->compilation_info()->collect_source_positions());
job = interpreter::Interpreter::NewSourcePositionCollectionJob(
&parse_info, parse_info.literal(), bytecode, isolate->allocator());
// TODO(v8:8510) Avoid re-allocating bytecode array/constant pool and
// re-internalizeing the ast values. Maybe we could use the
// unoptimized_compilation_flag to signal that all we need is the source
// position table (and we could do the DCHECK that the bytecode array is the
// same in the bytecode-generator, by comparing the real bytecode array on the
// SFI with the off-heap bytecode array).
// Internalize ast values onto the heap.
parse_info.ast_value_factory()->Internalize(isolate);
{
// Allocate scope infos for the literal.
DeclarationScope::AllocateScopeInfos(&parse_info, isolate);
CHECK_EQ(outer_function_job->FinalizeJob(shared_info, isolate),
CompilationJob::SUCCEEDED);
if (!job || job->ExecuteJob() != CompilationJob::SUCCEEDED ||
job->FinalizeJob(shared_info, isolate) != CompilationJob::SUCCEEDED) {
// Recompiling failed probably as a result of stack exhaustion.
bytecode->SetSourcePositionsFailedToCollect();
return FailWithPendingException(
isolate, &parse_info, Compiler::ClearExceptionFlag::CLEAR_EXCEPTION);
}
}
// Update the source position table on the original bytecode.
DCHECK(bytecode->IsBytecodeEqual(
*outer_function_job->compilation_info()->bytecode_array()));
DCHECK(outer_function_job->compilation_info()->has_bytecode_array());
ByteArray source_position_table = outer_function_job->compilation_info()
->bytecode_array()
->SourcePositionTable();
bytecode->set_source_position_table(source_position_table);
DCHECK(job->compilation_info()->collect_source_positions());
// If debugging, make sure that instrumented bytecode has the source position
// table set on it as well.
if (shared_info->HasDebugInfo() &&
shared_info->GetDebugInfo().HasInstrumentedBytecodeArray()) {
ByteArray source_position_table =
job->compilation_info()->bytecode_array()->SourcePositionTable();
shared_info->GetDebugBytecodeArray().set_source_position_table(
source_position_table);
}

View File

@ -23,7 +23,8 @@ class RegisterTransferWriter final
: public NON_EXPORTED_BASE(BytecodeRegisterOptimizer::BytecodeWriter),
public NON_EXPORTED_BASE(ZoneObject) {
public:
RegisterTransferWriter(BytecodeArrayBuilder* builder) : builder_(builder) {}
explicit RegisterTransferWriter(BytecodeArrayBuilder* builder)
: builder_(builder) {}
~RegisterTransferWriter() override = default;
void EmitLdar(Register input) override { builder_->OutputLdarRaw(input); }
@ -98,6 +99,20 @@ Handle<BytecodeArray> BytecodeArrayBuilder::ToBytecodeArray(Isolate* isolate) {
isolate, register_count, parameter_count(), handler_table);
}
#ifdef DEBUG
void BytecodeArrayBuilder::CheckBytecodeMatches(
Handle<BytecodeArray> bytecode) {
bytecode_array_writer_.CheckBytecodeMatches(bytecode);
}
#endif
Handle<ByteArray> BytecodeArrayBuilder::ToSourcePositionTable(
Isolate* isolate) {
DCHECK(RemainderOfBlockIsDead());
return bytecode_array_writer_.ToSourcePositionTable(isolate);
}
BytecodeSourceInfo BytecodeArrayBuilder::CurrentSourcePosition(
Bytecode bytecode) {
BytecodeSourceInfo source_position;

View File

@ -21,6 +21,7 @@
namespace v8 {
namespace internal {
class BytecodeArray;
class FeedbackVectorSpec;
class Isolate;
@ -42,6 +43,11 @@ class V8_EXPORT_PRIVATE BytecodeArrayBuilder final {
SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS);
Handle<BytecodeArray> ToBytecodeArray(Isolate* isolate);
Handle<ByteArray> ToSourcePositionTable(Isolate* isolate);
#ifdef DEBUG
void CheckBytecodeMatches(Handle<BytecodeArray> bytecode);
#endif
// Get the number of parameters expected by function.
int parameter_count() const {

View File

@ -12,7 +12,6 @@
#include "src/interpreter/bytecode-source-info.h"
#include "src/interpreter/constant-array-builder.h"
#include "src/interpreter/handler-table-builder.h"
#include "src/logging/log.h"
#include "src/objects/objects-inl.h"
namespace v8 {
@ -50,18 +49,29 @@ Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
bytecode_size, &bytecodes()->front(), frame_size, parameter_count,
constant_pool);
bytecode_array->set_handler_table(*handler_table);
if (!source_position_table_builder_.Lazy()) {
return bytecode_array;
}
Handle<ByteArray> BytecodeArrayWriter::ToSourcePositionTable(Isolate* isolate) {
DCHECK(!source_position_table_builder_.Lazy());
Handle<ByteArray> source_position_table =
source_position_table_builder_.Omit()
? ReadOnlyRoots(isolate).empty_byte_array_handle()
: source_position_table_builder()->ToSourcePositionTable(isolate);
bytecode_array->set_source_position_table(*source_position_table);
LOG_CODE_EVENT(isolate, CodeLinePosInfoRecordEvent(
bytecode_array->GetFirstBytecodeAddress(),
*source_position_table));
: source_position_table_builder_.ToSourcePositionTable(isolate);
return source_position_table;
}
return bytecode_array;
#ifdef DEBUG
void BytecodeArrayWriter::CheckBytecodeMatches(Handle<BytecodeArray> bytecode) {
int bytecode_size = static_cast<int>(bytecodes()->size());
const byte* bytecode_ptr = &bytecodes()->front();
CHECK_EQ(bytecode_size, bytecode->length());
for (int i = 0; i < bytecode_size; ++i) {
CHECK_EQ(bytecode_ptr[i], bytecode->get(i));
}
}
#endif
void BytecodeArrayWriter::Write(BytecodeNode* node) {
DCHECK(!Bytecodes::IsJump(node->bytecode()));

View File

@ -55,6 +55,12 @@ class V8_EXPORT_PRIVATE BytecodeArrayWriter final {
int parameter_count,
Handle<ByteArray> handler_table);
Handle<ByteArray> ToSourcePositionTable(Isolate* isolate);
#ifdef DEBUG
void CheckBytecodeMatches(Handle<BytecodeArray> bytecode);
#endif
bool RemainderOfBlockIsDead() const { return exit_seen_in_block_; }
private:

View File

@ -15,6 +15,7 @@
#include "src/interpreter/bytecode-label.h"
#include "src/interpreter/bytecode-register-allocator.h"
#include "src/interpreter/control-flow-builders.h"
#include "src/logging/log.h"
#include "src/objects/debug-objects.h"
#include "src/objects/literal-objects-inl.h"
#include "src/objects/objects-inl.h"
@ -1065,6 +1066,28 @@ Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
return bytecode_array;
}
Handle<ByteArray> BytecodeGenerator::FinalizeSourcePositionTable(
Isolate* isolate) {
DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
#ifdef DEBUG
// Unoptimized compilation should be context-independent. Verify that we don't
// access the native context by nulling it out during finalization.
NullContextScope null_context_scope(isolate);
builder()->CheckBytecodeMatches(info_->bytecode_array());
#endif
Handle<ByteArray> source_position_table =
builder()->ToSourcePositionTable(isolate);
LOG_CODE_EVENT(isolate,
CodeLinePosInfoRecordEvent(
info_->bytecode_array()->GetFirstBytecodeAddress(),
*source_position_table));
return source_position_table;
}
void BytecodeGenerator::AllocateDeferredConstants(Isolate* isolate,
Handle<Script> script) {
// Build global declaration pair arrays.
@ -1412,7 +1435,8 @@ void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
break;
}
DCHECK_IMPLIES(decl->fun()->ShouldEagerCompile(),
DCHECK_IMPLIES(
eager_inner_literals_ != nullptr && decl->fun()->ShouldEagerCompile(),
IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
}

View File

@ -18,6 +18,7 @@ namespace internal {
class AstNodeSourceRanges;
class AstStringConstants;
class BytecodeArray;
class UnoptimizedCompilationInfo;
enum class SourceRangeKind;
@ -38,6 +39,7 @@ class BytecodeGenerator final : public AstVisitor<BytecodeGenerator> {
void GenerateBytecode(uintptr_t stack_limit);
Handle<BytecodeArray> FinalizeBytecode(Isolate* isolate,
Handle<Script> script);
Handle<ByteArray> FinalizeSourcePositionTable(Isolate* isolate);
#define DECLARE_VISIT(type) void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)

View File

@ -210,11 +210,21 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl(
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
"V8.CompileIgnitionFinalization");
Handle<BytecodeArray> bytecodes =
generator()->FinalizeBytecode(isolate, parse_info()->script());
Handle<BytecodeArray> bytecodes = compilation_info_.bytecode_array();
if (bytecodes.is_null()) {
bytecodes = generator()->FinalizeBytecode(isolate, parse_info()->script());
if (generator()->HasStackOverflow()) {
return FAILED;
}
compilation_info()->SetBytecodeArray(bytecodes);
}
if (compilation_info()->SourcePositionRecordingMode() ==
SourcePositionTableBuilder::RecordingMode::RECORD_SOURCE_POSITIONS) {
Handle<ByteArray> source_position_table =
generator()->FinalizeSourcePositionTable(isolate);
bytecodes->set_source_position_table(*source_position_table);
}
if (ShouldPrintBytecode(shared_info)) {
StdoutStream os;
@ -226,7 +236,6 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl(
os << std::flush;
}
compilation_info()->SetBytecodeArray(bytecodes);
return SUCCEEDED;
}
@ -238,6 +247,16 @@ std::unique_ptr<UnoptimizedCompilationJob> Interpreter::NewCompilationJob(
parse_info, literal, allocator, eager_inner_literals);
}
std::unique_ptr<UnoptimizedCompilationJob>
Interpreter::NewSourcePositionCollectionJob(
ParseInfo* parse_info, FunctionLiteral* literal,
Handle<BytecodeArray> existing_bytecode, AccountingAllocator* allocator) {
auto job = base::make_unique<InterpreterCompilationJob>(parse_info, literal,
allocator, nullptr);
job->compilation_info()->SetBytecodeArray(existing_bytecode);
return job;
}
void Interpreter::ForEachBytecode(
const std::function<void(Bytecode, OperandScale)>& f) {
constexpr OperandScale kOperandScales[] = {

View File

@ -18,10 +18,11 @@
namespace v8 {
namespace internal {
class Isolate;
class BytecodeArray;
class Callable;
class UnoptimizedCompilationJob;
class FunctionLiteral;
class Isolate;
class ParseInfo;
class RootVisitor;
class SetupIsolateDelegate;
@ -48,6 +49,14 @@ class Interpreter {
AccountingAllocator* allocator,
std::vector<FunctionLiteral*>* eager_inner_literals);
// Creates a compilation job which will generate source positions for
// |literal| and when finalized, store the result into |existing_bytecode|.
static std::unique_ptr<UnoptimizedCompilationJob>
NewSourcePositionCollectionJob(ParseInfo* parse_info,
FunctionLiteral* literal,
Handle<BytecodeArray> existing_bytecode,
AccountingAllocator* allocator);
// If the bytecode handler for |bytecode| and |operand_scale| has not yet
// been loaded, deserialize it. Then return the handler.
V8_EXPORT_PRIVATE Code GetBytecodeHandler(Bytecode bytecode,

View File

@ -1089,15 +1089,5 @@ const char* DependentCode::DependencyGroupName(DependencyGroup group) {
UNREACHABLE();
}
bool BytecodeArray::IsBytecodeEqual(const BytecodeArray other) const {
if (length() != other.length()) return false;
for (int i = 0; i < length(); ++i) {
if (get(i) != other.get(i)) return false;
}
return true;
}
} // namespace internal
} // namespace v8

View File

@ -827,9 +827,6 @@ class BytecodeArray : public FixedArrayBase {
// is deterministic.
inline void clear_padding();
// Compares only the bytecode array but not any of the header fields.
bool IsBytecodeEqual(const BytecodeArray other) const;
// Layout description.
DEFINE_FIELD_OFFSET_CONSTANTS(FixedArrayBase::kHeaderSize,
TORQUE_GENERATED_BYTECODE_ARRAY_FIELDS)

View File

@ -146,6 +146,8 @@ TEST_F(BytecodeArrayWriterUnittest, SimpleExample) {
Handle<BytecodeArray> bytecode_array =
writer()->ToBytecodeArray(isolate(), 0, 0, factory()->empty_byte_array());
bytecode_array->set_source_position_table(
*writer()->ToSourcePositionTable(isolate()));
CHECK_EQ(bytecodes()->size(), arraysize(expected_bytes));
PositionTableEntry expected_positions[] = {
@ -236,6 +238,8 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) {
Handle<BytecodeArray> bytecode_array =
writer()->ToBytecodeArray(isolate(), 0, 0, factory()->empty_byte_array());
bytecode_array->set_source_position_table(
*writer()->ToSourcePositionTable(isolate()));
SourcePositionTableIterator source_iterator(
bytecode_array->SourcePositionTable());
for (size_t i = 0; i < arraysize(expected_positions); ++i) {
@ -288,6 +292,8 @@ TEST_F(BytecodeArrayWriterUnittest, ElideNoneffectfulBytecodes) {
Handle<BytecodeArray> bytecode_array =
writer()->ToBytecodeArray(isolate(), 0, 0, factory()->empty_byte_array());
bytecode_array->set_source_position_table(
*writer()->ToSourcePositionTable(isolate()));
SourcePositionTableIterator source_iterator(
bytecode_array->SourcePositionTable());
for (size_t i = 0; i < arraysize(expected_positions); ++i) {
@ -356,6 +362,8 @@ TEST_F(BytecodeArrayWriterUnittest, DeadcodeElimination) {
Handle<BytecodeArray> bytecode_array =
writer()->ToBytecodeArray(isolate(), 0, 0, factory()->empty_byte_array());
bytecode_array->set_source_position_table(
*writer()->ToSourcePositionTable(isolate()));
SourcePositionTableIterator source_iterator(
bytecode_array->SourcePositionTable());
for (size_t i = 0; i < arraysize(expected_positions); ++i) {