diff --git a/src/ast/scopes.cc b/src/ast/scopes.cc index 8b90286e9c..1375c26f15 100644 --- a/src/ast/scopes.cc +++ b/src/ast/scopes.cc @@ -174,7 +174,7 @@ ModuleScope::ModuleScope(DeclarationScope* script_scope, DeclareThis(avfactory); } -ModuleScope::ModuleScope(Isolate* isolate, Handle scope_info, +ModuleScope::ModuleScope(Handle scope_info, AstValueFactory* avfactory) : DeclarationScope(avfactory->zone(), MODULE_SCOPE, avfactory, scope_info), module_descriptor_(nullptr) { @@ -188,7 +188,8 @@ ClassScope::ClassScope(Zone* zone, Scope* outer_scope, bool is_anonymous) set_language_mode(LanguageMode::kStrict); } -ClassScope::ClassScope(Isolate* isolate, Zone* zone, +template +ClassScope::ClassScope(IsolateT* isolate, Zone* zone, AstValueFactory* ast_value_factory, Handle scope_info) : Scope(zone, CLASS_SCOPE, ast_value_factory, scope_info), @@ -222,6 +223,12 @@ ClassScope::ClassScope(Isolate* isolate, Zone* zone, Context::MIN_CONTEXT_SLOTS + index); } } +template ClassScope::ClassScope(Isolate* isolate, Zone* zone, + AstValueFactory* ast_value_factory, + Handle scope_info); +template ClassScope::ClassScope(LocalIsolate* isolate, Zone* zone, + AstValueFactory* ast_value_factory, + Handle scope_info); Scope::Scope(Zone* zone, ScopeType scope_type, AstValueFactory* ast_value_factory, Handle scope_info) @@ -398,7 +405,8 @@ bool Scope::ContainsAsmModule() const { } #endif // V8_ENABLE_WEBASSEMBLY -Scope* Scope::DeserializeScopeChain(Isolate* isolate, Zone* zone, +template +Scope* Scope::DeserializeScopeChain(IsolateT* isolate, Zone* zone, ScopeInfo scope_info, DeclarationScope* script_scope, AstValueFactory* ast_value_factory, @@ -454,7 +462,7 @@ Scope* Scope::DeserializeScopeChain(Isolate* isolate, Zone* zone, handle(scope_info, isolate)); } } else if (scope_info.scope_type() == MODULE_SCOPE) { - outer_scope = zone->New(isolate, handle(scope_info, isolate), + outer_scope = zone->New(handle(scope_info, isolate), ast_value_factory); } else { DCHECK_EQ(scope_info.scope_type(), CATCH_SCOPE); @@ -502,6 +510,17 @@ Scope* Scope::DeserializeScopeChain(Isolate* isolate, Zone* zone, return innermost_scope; } +template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) + Scope* Scope::DeserializeScopeChain( + Isolate* isolate, Zone* zone, ScopeInfo scope_info, + DeclarationScope* script_scope, AstValueFactory* ast_value_factory, + DeserializationMode deserialization_mode); +template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) + Scope* Scope::DeserializeScopeChain( + LocalIsolate* isolate, Zone* zone, ScopeInfo scope_info, + DeclarationScope* script_scope, AstValueFactory* ast_value_factory, + DeserializationMode deserialization_mode); + DeclarationScope* Scope::AsDeclarationScope() { DCHECK(is_declaration_scope()); return static_cast(this); diff --git a/src/ast/scopes.h b/src/ast/scopes.h index 3dbdca1e10..dd1a693255 100644 --- a/src/ast/scopes.h +++ b/src/ast/scopes.h @@ -163,7 +163,9 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) { enum class DeserializationMode { kIncludingVariables, kScopesOnly }; - static Scope* DeserializeScopeChain(Isolate* isolate, Zone* zone, + template + EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) + static Scope* DeserializeScopeChain(IsolateT* isolate, Zone* zone, ScopeInfo scope_info, DeclarationScope* script_scope, AstValueFactory* ast_value_factory, @@ -1363,8 +1365,7 @@ class ModuleScope final : public DeclarationScope { ModuleScope(DeclarationScope* script_scope, AstValueFactory* avfactory); // Deserialization. Does not restore the module descriptor. - ModuleScope(Isolate* isolate, Handle scope_info, - AstValueFactory* avfactory); + ModuleScope(Handle scope_info, AstValueFactory* avfactory); // Returns nullptr in a deserialized scope. SourceTextModuleDescriptor* module() const { return module_descriptor_; } @@ -1381,7 +1382,8 @@ class V8_EXPORT_PRIVATE ClassScope : public Scope { public: ClassScope(Zone* zone, Scope* outer_scope, bool is_anonymous); // Deserialization. - ClassScope(Isolate* isolate, Zone* zone, AstValueFactory* ast_value_factory, + template + ClassScope(IsolateT* isolate, Zone* zone, AstValueFactory* ast_value_factory, Handle scope_info); struct HeritageParsingScope { diff --git a/src/codegen/compiler.cc b/src/codegen/compiler.cc index ecee7a59af..6a6f57614b 100644 --- a/src/codegen/compiler.cc +++ b/src/codegen/compiler.cc @@ -1406,14 +1406,17 @@ BackgroundCompileTask::BackgroundCompileTask(ScriptStreamingData* streamed_data, BackgroundCompileTask::BackgroundCompileTask( Isolate* isolate, Handle shared_info, + const UnoptimizedCompileState* compile_state, std::unique_ptr character_stream, ProducedPreparseData* preparse_data, WorkerThreadRuntimeCallStats* worker_thread_runtime_stats, TimedHistogram* timer, int max_stack_size) : isolate_for_local_isolate_(isolate), + // TODO(leszeks): Create this from parent compile flags, to avoid + // accessing the Isolate. flags_( UnoptimizedCompileFlags::ForFunctionCompile(isolate, *shared_info)), - compile_state_(isolate), + compile_state_(*compile_state), info_(std::make_unique(isolate, flags_, &compile_state_)), stack_size_(max_stack_size), worker_thread_runtime_call_stats_(worker_thread_runtime_stats), @@ -1530,7 +1533,23 @@ void BackgroundCompileTask::Run() { // Parser needs to stay alive for finalizing the parsing on the main // thread. Parser parser(&isolate, info_.get(), script_); - parser.InitializeEmptyScopeChain(info_.get()); + if (flags().is_toplevel()) { + parser.InitializeEmptyScopeChain(info_.get()); + } else { + // TODO(leszeks): Consider keeping Scope zones alive between compile tasks + // and passing the Scope for the FunctionLiteral through here directly + // without copying/deserializing. + Handle shared_info = + input_shared_info_.ToHandleChecked(); + MaybeHandle maybe_outer_scope_info; + if (shared_info->HasOuterScopeInfo()) { + maybe_outer_scope_info = + handle(shared_info->GetOuterScopeInfo(), &isolate); + } + parser.DeserializeScopeChain( + &isolate, info_.get(), maybe_outer_scope_info, + Scope::DeserializationMode::kIncludingVariables); + } parser.ParseOnBackground(&isolate, info_.get(), start_position_, end_position_, function_literal_id_); @@ -1760,6 +1779,9 @@ bool Compiler::CollectSourcePositions(Isolate* isolate, UnoptimizedCompileFlags flags = UnoptimizedCompileFlags::ForFunctionCompile(isolate, *shared_info); flags.set_collect_source_positions(true); + // Prevent parallel tasks from being spawned by this job. + flags.set_post_parallel_compile_tasks_for_eager_toplevel(false); + flags.set_post_parallel_compile_tasks_for_lazy(false); UnoptimizedCompileState compile_state(isolate); ParseInfo parse_info(isolate, flags, &compile_state); diff --git a/src/codegen/compiler.h b/src/codegen/compiler.h index 34f5bb1f75..c6dd7ef287 100644 --- a/src/codegen/compiler.h +++ b/src/codegen/compiler.h @@ -511,6 +511,7 @@ class V8_EXPORT_PRIVATE BackgroundCompileTask { // Compiler::FinalizeBackgroundCompileTask. BackgroundCompileTask( Isolate* isolate, Handle shared_info, + const UnoptimizedCompileState* compile_state, std::unique_ptr character_stream, ProducedPreparseData* preparse_data, WorkerThreadRuntimeCallStats* worker_thread_runtime_stats, diff --git a/src/compiler-dispatcher/lazy-compile-dispatcher.cc b/src/compiler-dispatcher/lazy-compile-dispatcher.cc index 51ff996561..bdeba498bf 100644 --- a/src/compiler-dispatcher/lazy-compile-dispatcher.cc +++ b/src/compiler-dispatcher/lazy-compile-dispatcher.cc @@ -79,6 +79,7 @@ LazyCompileDispatcher::~LazyCompileDispatcher() { void LazyCompileDispatcher::Enqueue( LocalIsolate* isolate, Handle shared_info, + const UnoptimizedCompileState* compile_state, std::unique_ptr character_stream, ProducedPreparseData* preparse_data) { TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), @@ -87,16 +88,16 @@ void LazyCompileDispatcher::Enqueue( std::unique_ptr job = std::make_unique(std::make_unique( - isolate_, shared_info, std::move(character_stream), preparse_data, - worker_thread_runtime_call_stats_, background_compile_timer_, - static_cast(max_stack_size_))); + isolate_, shared_info, compile_state, std::move(character_stream), + preparse_data, worker_thread_runtime_call_stats_, + background_compile_timer_, static_cast(max_stack_size_))); // Post a a background worker task to perform the compilation on the worker // thread. { base::MutexGuard lock(&mutex_); if (trace_compiler_dispatcher_) { - PrintF("LazyCompileDispatcher: enqueued job for "); + PrintF("LazyCompileDispatcher: enqueued job for "); shared_info->ShortPrint(); PrintF("\n"); } @@ -208,16 +209,18 @@ void LazyCompileDispatcher::AbortAll() { { base::MutexGuard lock(&mutex_); - SharedToJobMap::IteratableScope iteratable_scope( - &shared_to_unoptimized_job_); - for (Job** job_entry : iteratable_scope) { - Job* job = *job_entry; - DCHECK_NE(job->state, Job::State::kRunning); - DCHECK_NE(job->state, Job::State::kAbortRequested); - delete job; + { + SharedToJobMap::IteratableScope iteratable_scope( + &shared_to_unoptimized_job_); + for (Job** job_entry : iteratable_scope) { + Job* job = *job_entry; + DCHECK_NE(job->state, Job::State::kRunning); + DCHECK_NE(job->state, Job::State::kAbortRequested); + delete job; + } } + shared_to_unoptimized_job_.Clear(); } - shared_to_unoptimized_job_.Clear(); } LazyCompileDispatcher::Job* LazyCompileDispatcher::GetJobFor( @@ -311,27 +314,35 @@ void LazyCompileDispatcher::DoIdleWork(double deadline_in_seconds) { Job* job; { base::MutexGuard lock(&mutex_); - SharedToJobMap::IteratableScope iteratable_scope( - &shared_to_unoptimized_job_); + { + SharedToJobMap::IteratableScope iteratable_scope( + &shared_to_unoptimized_job_); - auto it = iteratable_scope.begin(); - auto end = iteratable_scope.end(); - for (; it != end; ++it) { - job = *it.entry(); - if (job->state == Job::State::kReadyToFinalize || - job->state == Job::State::kAborted) { - function = SharedFunctionInfo::cast(it.key()); - break; + auto it = iteratable_scope.begin(); + auto end = iteratable_scope.end(); + for (; it != end; ++it) { + job = *it.entry(); + if (job->state == Job::State::kReadyToFinalize || + job->state == Job::State::kAborted) { + function = SharedFunctionInfo::cast(it.key()); + break; + } } + // Since we hold the lock here, we can be sure no jobs have become ready + // for finalization while we looped through the list. + if (it == end) return; } - // Since we hold the lock here, we can be sure no jobs have become ready - // for finalization while we looped through the list. - if (it == end) return; DCHECK_EQ(pending_background_jobs_.find(job), pending_background_jobs_.end()); + shared_to_unoptimized_job_.Delete(function, &job); + } + + if (trace_compiler_dispatcher_) { + PrintF("LazyCompileDispatcher: idle finalizing job for "); + function.ShortPrint(); + PrintF("\n"); } - shared_to_unoptimized_job_.Delete(function, &job); if (job->state == Job::State::kReadyToFinalize) { HandleScope scope(isolate_); diff --git a/src/compiler-dispatcher/lazy-compile-dispatcher.h b/src/compiler-dispatcher/lazy-compile-dispatcher.h index 17a109ba4b..9a6dc85240 100644 --- a/src/compiler-dispatcher/lazy-compile-dispatcher.h +++ b/src/compiler-dispatcher/lazy-compile-dispatcher.h @@ -34,6 +34,7 @@ class AstValueFactory; class BackgroundCompileTask; class CancelableTaskManager; class UnoptimizedCompileJob; +class UnoptimizedCompileState; class FunctionLiteral; class Isolate; class ParseInfo; @@ -84,6 +85,7 @@ class V8_EXPORT_PRIVATE LazyCompileDispatcher { ~LazyCompileDispatcher(); void Enqueue(LocalIsolate* isolate, Handle shared_info, + const UnoptimizedCompileState* compile_state, std::unique_ptr character_stream, ProducedPreparseData* preparse_data); diff --git a/src/flags/flag-definitions.h b/src/flags/flag-definitions.h index 1988c31085..03dcf47f76 100644 --- a/src/flags/flag-definitions.h +++ b/src/flags/flag-definitions.h @@ -440,7 +440,8 @@ DEFINE_NEG_IMPLICATION(enable_third_party_heap, turbo_allocation_folding) DEFINE_NEG_IMPLICATION(enable_third_party_heap, concurrent_recompilation) DEFINE_NEG_IMPLICATION(enable_third_party_heap, concurrent_inlining) DEFINE_NEG_IMPLICATION(enable_third_party_heap, script_streaming) -DEFINE_NEG_IMPLICATION(enable_third_party_heap, parallel_compile_tasks) +DEFINE_NEG_IMPLICATION(enable_third_party_heap, + parallel_compile_tasks_for_eager_toplevel) DEFINE_NEG_IMPLICATION(enable_third_party_heap, use_marking_progress_bar) DEFINE_NEG_IMPLICATION(enable_third_party_heap, move_object_start) DEFINE_NEG_IMPLICATION(enable_third_party_heap, concurrent_marking) @@ -1566,11 +1567,17 @@ DEFINE_BOOL(compilation_cache, true, "enable compilation cache") DEFINE_BOOL(cache_prototype_transitions, true, "cache prototype transitions") // lazy-compile-dispatcher.cc -DEFINE_BOOL(parallel_compile_tasks, false, "enable parallel compile tasks") DEFINE_BOOL(lazy_compile_dispatcher, false, "enable compiler dispatcher") -DEFINE_IMPLICATION(parallel_compile_tasks, lazy_compile_dispatcher) DEFINE_BOOL(trace_compiler_dispatcher, false, "trace compiler dispatcher activity") +DEFINE_BOOL( + parallel_compile_tasks_for_eager_toplevel, false, + "spawn parallel compile tasks for eagerly compiled, top-level functions") +DEFINE_IMPLICATION(parallel_compile_tasks_for_eager_toplevel, + lazy_compile_dispatcher) +DEFINE_BOOL(parallel_compile_tasks_for_lazy, false, + "spawn parallel compile tasks for all lazily compiled functions") +DEFINE_IMPLICATION(parallel_compile_tasks_for_lazy, lazy_compile_dispatcher) // cpu-profiler.cc DEFINE_INT(cpu_profiler_sampling_interval, 1000, @@ -2160,9 +2167,10 @@ DEFINE_NEG_IMPLICATION(predictable, memory_reducer) // before. Audit them, and remove any unneeded implications. DEFINE_IMPLICATION(predictable, single_threaded_gc) DEFINE_NEG_IMPLICATION(predictable, concurrent_recompilation) -DEFINE_NEG_IMPLICATION(predictable, lazy_compile_dispatcher) -DEFINE_NEG_IMPLICATION(predictable, parallel_compile_tasks) DEFINE_NEG_IMPLICATION(predictable, stress_concurrent_inlining) +DEFINE_NEG_IMPLICATION(predictable, lazy_compile_dispatcher) +DEFINE_NEG_IMPLICATION(predictable, parallel_compile_tasks_for_eager_toplevel) +DEFINE_NEG_IMPLICATION(predictable, parallel_compile_tasks_for_lazy) DEFINE_BOOL(predictable_gc_schedule, false, "Predictable garbage collection schedule. Fixes heap growing, " @@ -2179,9 +2187,11 @@ DEFINE_NEG_IMPLICATION(predictable_gc_schedule, memory_reducer) DEFINE_BOOL(single_threaded, false, "disable the use of background tasks") DEFINE_IMPLICATION(single_threaded, single_threaded_gc) DEFINE_NEG_IMPLICATION(single_threaded, concurrent_recompilation) -DEFINE_NEG_IMPLICATION(single_threaded, lazy_compile_dispatcher) -DEFINE_NEG_IMPLICATION(single_threaded, parallel_compile_tasks) DEFINE_NEG_IMPLICATION(single_threaded, stress_concurrent_inlining) +DEFINE_NEG_IMPLICATION(single_threaded, lazy_compile_dispatcher) +DEFINE_NEG_IMPLICATION(single_threaded, + parallel_compile_tasks_for_eager_toplevel) +DEFINE_NEG_IMPLICATION(single_threaded, parallel_compile_tasks_for_lazy) // // Parallel and concurrent GC (Orinoco) related flags. diff --git a/src/interpreter/bytecode-generator.cc b/src/interpreter/bytecode-generator.cc index e31c880e65..853f1f27bc 100644 --- a/src/interpreter/bytecode-generator.cc +++ b/src/interpreter/bytecode-generator.cc @@ -2522,9 +2522,11 @@ void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) { // Only parallel compile when there's a script (not the case for source // position collection). if (!script_.is_null() && literal->should_parallel_compile()) { - // If we are already eagerly compiling this function, it must be because of - // --parallel-compile-tasks. - DCHECK_IMPLIES(!literal->ShouldEagerCompile(), FLAG_parallel_compile_tasks); + // If we should normally be eagerly compiling this function, we must be here + // because of post_parallel_compile_tasks_for_eager_toplevel. + DCHECK_IMPLIES( + literal->ShouldEagerCompile(), + info()->flags().post_parallel_compile_tasks_for_eager_toplevel()); // There exists a lazy compile dispatcher. DCHECK(info()->state()->dispatcher()); // There exists a cloneable character stream. @@ -2541,7 +2543,8 @@ void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) { shared_info = Compiler::GetSharedFunctionInfo(literal, script_, local_isolate_); info()->state()->dispatcher()->Enqueue( - local_isolate_, shared_info, info()->character_stream()->Clone(), + local_isolate_, shared_info, info()->state(), + info()->character_stream()->Clone(), literal->produced_preparse_data()); } } else if (eager_inner_literals_ && literal->ShouldEagerCompile()) { diff --git a/src/logging/runtime-call-stats-scope.h b/src/logging/runtime-call-stats-scope.h index 90bdee58ac..ffdf08378d 100644 --- a/src/logging/runtime-call-stats-scope.h +++ b/src/logging/runtime-call-stats-scope.h @@ -32,8 +32,8 @@ RuntimeCallTimerScope::RuntimeCallTimerScope(Isolate* isolate, RuntimeCallTimerScope::RuntimeCallTimerScope(LocalIsolate* isolate, RuntimeCallCounterId counter_id) { - DCHECK_NOT_NULL(isolate->runtime_call_stats()); if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return; + DCHECK_NOT_NULL(isolate->runtime_call_stats()); stats_ = isolate->runtime_call_stats(); stats_->Enter(&timer_, counter_id); } diff --git a/src/parsing/parse-info.cc b/src/parsing/parse-info.cc index 43dd80ccc3..33b4de78b0 100644 --- a/src/parsing/parse-info.cc +++ b/src/parsing/parse-info.cc @@ -37,6 +37,10 @@ UnoptimizedCompileFlags::UnoptimizedCompileFlags(Isolate* isolate, set_collect_source_positions(!FLAG_enable_lazy_source_positions || isolate->NeedsDetailedOptimizedCodeLineInfo()); set_allow_harmony_top_level_await(FLAG_harmony_top_level_await); + set_post_parallel_compile_tasks_for_eager_toplevel( + FLAG_parallel_compile_tasks_for_eager_toplevel); + set_post_parallel_compile_tasks_for_lazy( + FLAG_parallel_compile_tasks_for_lazy); } // static @@ -133,6 +137,8 @@ void UnoptimizedCompileFlags::SetFlagsFromFunction(T function) { set_class_scope_has_private_brand(function->class_scope_has_private_brand()); set_has_static_private_methods_or_accessors( function->has_static_private_methods_or_accessors()); + set_private_name_lookup_skips_outer_class( + function->private_name_lookup_skips_outer_class()); set_is_toplevel(function->is_toplevel()); } diff --git a/src/parsing/parse-info.h b/src/parsing/parse-info.h index 663de7f139..a238876f8e 100644 --- a/src/parsing/parse-info.h +++ b/src/parsing/parse-info.h @@ -40,27 +40,30 @@ class Utf16CharacterStream; class Zone; // The flags for a parse + unoptimized compile operation. -#define FLAG_FIELDS(V, _) \ - V(is_toplevel, bool, 1, _) \ - V(is_eager, bool, 1, _) \ - V(is_eval, bool, 1, _) \ - V(outer_language_mode, LanguageMode, 1, _) \ - V(parse_restriction, ParseRestriction, 1, _) \ - V(is_module, bool, 1, _) \ - V(allow_lazy_parsing, bool, 1, _) \ - V(is_lazy_compile, bool, 1, _) \ - V(collect_type_profile, bool, 1, _) \ - V(coverage_enabled, bool, 1, _) \ - V(block_coverage_enabled, bool, 1, _) \ - V(is_asm_wasm_broken, bool, 1, _) \ - V(class_scope_has_private_brand, bool, 1, _) \ - V(requires_instance_members_initializer, bool, 1, _) \ - V(has_static_private_methods_or_accessors, bool, 1, _) \ - V(might_always_opt, bool, 1, _) \ - V(allow_natives_syntax, bool, 1, _) \ - V(allow_lazy_compile, bool, 1, _) \ - V(collect_source_positions, bool, 1, _) \ - V(allow_harmony_top_level_await, bool, 1, _) \ +#define FLAG_FIELDS(V, _) \ + V(is_toplevel, bool, 1, _) \ + V(is_eager, bool, 1, _) \ + V(is_eval, bool, 1, _) \ + V(outer_language_mode, LanguageMode, 1, _) \ + V(parse_restriction, ParseRestriction, 1, _) \ + V(is_module, bool, 1, _) \ + V(allow_lazy_parsing, bool, 1, _) \ + V(is_lazy_compile, bool, 1, _) \ + V(collect_type_profile, bool, 1, _) \ + V(coverage_enabled, bool, 1, _) \ + V(block_coverage_enabled, bool, 1, _) \ + V(is_asm_wasm_broken, bool, 1, _) \ + V(class_scope_has_private_brand, bool, 1, _) \ + V(private_name_lookup_skips_outer_class, bool, 1, _) \ + V(requires_instance_members_initializer, bool, 1, _) \ + V(has_static_private_methods_or_accessors, bool, 1, _) \ + V(might_always_opt, bool, 1, _) \ + V(allow_natives_syntax, bool, 1, _) \ + V(allow_lazy_compile, bool, 1, _) \ + V(post_parallel_compile_tasks_for_eager_toplevel, bool, 1, _) \ + V(post_parallel_compile_tasks_for_lazy, bool, 1, _) \ + V(collect_source_positions, bool, 1, _) \ + V(allow_harmony_top_level_await, bool, 1, _) \ V(is_repl_mode, bool, 1, _) class V8_EXPORT_PRIVATE UnoptimizedCompileFlags { diff --git a/src/parsing/parser.cc b/src/parsing/parser.cc index 1ffa517aec..6e086fb404 100644 --- a/src/parsing/parser.cc +++ b/src/parsing/parser.cc @@ -474,8 +474,9 @@ void Parser::InitializeEmptyScopeChain(ParseInfo* info) { original_scope_ = script_scope; } +template void Parser::DeserializeScopeChain( - Isolate* isolate, ParseInfo* info, + IsolateT* isolate, ParseInfo* info, MaybeHandle maybe_outer_scope_info, Scope::DeserializationMode mode) { InitializeEmptyScopeChain(info); @@ -492,6 +493,15 @@ void Parser::DeserializeScopeChain( } } +template void Parser::DeserializeScopeChain( + Isolate* isolate, ParseInfo* info, + MaybeHandle maybe_outer_scope_info, + Scope::DeserializationMode mode); +template void Parser::DeserializeScopeChain( + LocalIsolate* isolate, ParseInfo* info, + MaybeHandle maybe_outer_scope_info, + Scope::DeserializationMode mode); + namespace { void MaybeProcessSourceRanges(ParseInfo* parse_info, Expression* root, @@ -2559,44 +2569,38 @@ FunctionLiteral* Parser::ParseFunctionLiteral( eager_compile_hint == FunctionLiteral::kShouldLazyCompile; const bool is_top_level = AllowsLazyParsingWithoutUnresolvedVariables(); const bool is_eager_top_level_function = !is_lazy && is_top_level; - const bool is_lazy_top_level_function = is_lazy && is_top_level; - const bool is_lazy_inner_function = is_lazy && !is_top_level; RCS_SCOPE(runtime_call_stats_, RuntimeCallCounterId::kParseFunctionLiteral, RuntimeCallStats::kThreadSpecific); base::ElapsedTimer timer; if (V8_UNLIKELY(FLAG_log_function_events)) timer.Start(); - // Determine whether we can still lazy parse the inner function. - // The preconditions are: - // - Lazy compilation has to be enabled. - // - Neither V8 natives nor native function declarations can be allowed, - // since parsing one would retroactively force the function to be - // eagerly compiled. - // - The invoker of this parser can't depend on the AST being eagerly - // built (either because the function is about to be compiled, or - // because the AST is going to be inspected for some reason). - // - Because of the above, we can't be attempting to parse a - // FunctionExpression; even without enclosing parentheses it might be - // immediately invoked. - // - The function literal shouldn't be hinted to eagerly compile. + // Determine whether we can lazy parse the inner function. Lazy compilation + // has to be enabled, which is either forced by overall parse flags or via a + // ParsingModeScope. + const bool can_preparse = parse_lazily(); - // Inner functions will be parsed using a temporary Zone. After parsing, we - // will migrate unresolved variable into a Scope in the main Zone. - - const bool should_preparse_inner = parse_lazily() && is_lazy_inner_function; - - // If parallel compile tasks are enabled, and the function is an eager - // top level function, then we can pre-parse the function and parse / compile - // in a parallel task on a worker thread. - bool should_post_parallel_task = - parse_lazily() && is_eager_top_level_function && - FLAG_parallel_compile_tasks && info()->dispatcher() && + // Determine whether we can post any parallel compile tasks. Preparsing must + // be possible, there has to be a dispatcher, and the character stream must be + // cloneable. + const bool can_post_parallel_task = + can_preparse && info()->dispatcher() && scanner()->stream()->can_be_cloned_for_parallel_access(); - // This may be modified later to reflect preparsing decision taken - bool should_preparse = (parse_lazily() && is_lazy_top_level_function) || - should_preparse_inner || should_post_parallel_task; + // If parallel compile tasks are enabled, enable parallel compile for the + // subset of functions as defined by flags. + bool should_post_parallel_task = + can_post_parallel_task && + ((is_eager_top_level_function && + flags().post_parallel_compile_tasks_for_eager_toplevel()) || + (is_lazy && flags().post_parallel_compile_tasks_for_lazy())); + + // Determine whether we should lazy parse the inner function. This will be + // when either the function is lazy by inspection, or when we force it to be + // preparsed now so that we can then post a parallel full parse & compile task + // for it. + const bool should_preparse = + can_preparse && (is_lazy || should_post_parallel_task); ScopedPtrList body(pointer_buffer()); int expected_property_count = 0; @@ -2607,8 +2611,10 @@ FunctionLiteral* Parser::ParseFunctionLiteral( int function_literal_id = GetNextFunctionLiteralId(); ProducedPreparseData* produced_preparse_data = nullptr; - // This Scope lives in the main zone. We'll migrate data into that zone later. + // Inner functions will be parsed using a temporary Zone. After parsing, we + // will migrate unresolved variable into a Scope in the main Zone. Zone* parse_zone = should_preparse ? &preparser_zone_ : zone(); + // This Scope lives in the main zone. We'll migrate data into that zone later. DeclarationScope* scope = NewFunctionScope(kind, parse_zone); SetLanguageMode(scope, language_mode); #ifdef DEBUG @@ -3308,6 +3314,14 @@ void Parser::ParseOnBackground(LocalIsolate* isolate, ParseInfo* info, DCHECK_EQ(function_literal_id, kFunctionLiteralIdTopLevel); result = DoParseProgram(/* isolate = */ nullptr, info); } else { + base::Optional heritage; + if (V8_UNLIKELY(flags().private_name_lookup_skips_outer_class() && + original_scope_->is_class_scope())) { + // If the function skips the outer class and the outer scope is a class, + // the function is in heritage position. Otherwise the function scope's + // skip bit will be correctly inherited from the outer scope. + heritage.emplace(original_scope_->AsClassScope()); + } result = DoParseFunction(/* isolate = */ nullptr, info, start_position, end_position, function_literal_id, info->function_name()); diff --git a/src/parsing/parser.h b/src/parsing/parser.h index fb66cbac27..613777b5d7 100644 --- a/src/parsing/parser.h +++ b/src/parsing/parser.h @@ -155,7 +155,8 @@ class V8_EXPORT_PRIVATE Parser : public NON_EXPORTED_BASE(ParserBase) { // This only deserializes the scope chain, but doesn't connect the scopes to // their corresponding scope infos. Therefore, looking up variables in the // deserialized scopes is not possible. - void DeserializeScopeChain(Isolate* isolate, ParseInfo* info, + template + void DeserializeScopeChain(IsolateT* isolate, ParseInfo* info, MaybeHandle maybe_outer_scope_info, Scope::DeserializationMode mode = Scope::DeserializationMode::kScopesOnly); diff --git a/src/runtime/runtime-test.cc b/src/runtime/runtime-test.cc index fa8a481797..2c5943d004 100644 --- a/src/runtime/runtime-test.cc +++ b/src/runtime/runtime-test.cc @@ -12,6 +12,7 @@ #include "src/codegen/assembler-inl.h" #include "src/codegen/compiler.h" #include "src/codegen/pending-optimization-table.h" +#include "src/compiler-dispatcher/lazy-compile-dispatcher.h" #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h" #include "src/debug/debug-evaluate.h" #include "src/deoptimizer/deoptimizer.h" @@ -575,12 +576,20 @@ RUNTIME_FUNCTION(Runtime_NeverOptimizeFunction) { CONVERT_ARG_HANDLE_CHECKED(Object, function_object, 0); if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate); Handle function = Handle::cast(function_object); - SharedFunctionInfo sfi = function->shared(); - if (sfi.abstract_code(isolate).kind() != CodeKind::INTERPRETED_FUNCTION && - sfi.abstract_code(isolate).kind() != CodeKind::BUILTIN) { + Handle sfi(function->shared(), isolate); + if (sfi->abstract_code(isolate).kind() != CodeKind::INTERPRETED_FUNCTION && + sfi->abstract_code(isolate).kind() != CodeKind::BUILTIN) { return CrashUnlessFuzzing(isolate); } - sfi.DisableOptimization(BailoutReason::kNeverOptimize); + // Make sure to finish compilation if there is a parallel lazy compilation in + // progress, to make sure that the compilation finalization doesn't clobber + // the SharedFunctionInfo's disable_optimization field. + if (isolate->lazy_compile_dispatcher() && + isolate->lazy_compile_dispatcher()->IsEnqueued(sfi)) { + isolate->lazy_compile_dispatcher()->FinishNow(sfi); + } + + sfi->DisableOptimization(BailoutReason::kNeverOptimize); return ReadOnlyRoots(isolate).undefined_value(); } diff --git a/src/wasm/module-compiler.cc b/src/wasm/module-compiler.cc index ff19ca8d8b..d881e97f1a 100644 --- a/src/wasm/module-compiler.cc +++ b/src/wasm/module-compiler.cc @@ -1853,7 +1853,8 @@ class BackgroundCompileJob final : public JobTask { std::shared_ptr CompileToNativeModule( Isolate* isolate, const WasmFeatures& enabled, ErrorThrower* thrower, std::shared_ptr module, const ModuleWireBytes& wire_bytes, - Handle* export_wrappers_out, int compilation_id) { + Handle* export_wrappers_out, int compilation_id, + v8::metrics::Recorder::ContextId context_id) { const WasmModule* wasm_module = module.get(); WasmEngine* engine = GetWasmEngine(); base::OwnedVector wire_bytes_copy = @@ -1890,8 +1891,6 @@ std::shared_ptr CompileToNativeModule( // Sync compilation is user blocking, so we increase the priority. native_module->compilation_state()->SetHighPriority(); - v8::metrics::Recorder::ContextId context_id = - isolate->GetOrRegisterRecorderContextId(isolate->native_context()); CompileNativeModule(isolate, context_id, thrower, wasm_module, native_module, export_wrappers_out); bool cache_hit = !engine->UpdateNativeModuleCache(thrower->error(), diff --git a/src/wasm/module-compiler.h b/src/wasm/module-compiler.h index 2926527ccc..1aab188d29 100644 --- a/src/wasm/module-compiler.h +++ b/src/wasm/module-compiler.h @@ -13,6 +13,7 @@ #include #include +#include "include/v8-metrics.h" #include "src/base/optional.h" #include "src/common/globals.h" #include "src/logging/metrics.h" @@ -52,7 +53,8 @@ V8_EXPORT_PRIVATE std::shared_ptr CompileToNativeModule( Isolate* isolate, const WasmFeatures& enabled, ErrorThrower* thrower, std::shared_ptr module, const ModuleWireBytes& wire_bytes, - Handle* export_wrappers_out, int compilation_id); + Handle* export_wrappers_out, int compilation_id, + v8::metrics::Recorder::ContextId context_id); void RecompileNativeModule(NativeModule* native_module, TieringState new_tiering_state); diff --git a/src/wasm/wasm-engine.cc b/src/wasm/wasm-engine.cc index e26c906f41..7ace3d3150 100644 --- a/src/wasm/wasm-engine.cc +++ b/src/wasm/wasm-engine.cc @@ -13,6 +13,7 @@ #include "src/execution/v8threads.h" #include "src/handles/global-handles-inl.h" #include "src/logging/counters.h" +#include "src/logging/metrics.h" #include "src/objects/heap-number.h" #include "src/objects/js-promise.h" #include "src/objects/managed-inl.h" @@ -490,10 +491,13 @@ MaybeHandle WasmEngine::SyncCompileTranslatedAsmJs( ModuleOrigin origin = language_mode == LanguageMode::kSloppy ? kAsmJsSloppyOrigin : kAsmJsStrictOrigin; + // TODO(leszeks): If we want asm.js in UKM, we should figure out a way to pass + // the context id in here. + v8::metrics::Recorder::ContextId context_id = + v8::metrics::Recorder::ContextId::Empty(); ModuleResult result = DecodeWasmModule( WasmFeatures::ForAsmjs(), bytes.start(), bytes.end(), false, origin, - isolate->counters(), isolate->metrics_recorder(), - isolate->GetOrRegisterRecorderContextId(isolate->native_context()), + isolate->counters(), isolate->metrics_recorder(), context_id, DecodingMethod::kSync, allocator()); if (result.failed()) { // This happens once in a while when we have missed some limit check @@ -510,7 +514,7 @@ MaybeHandle WasmEngine::SyncCompileTranslatedAsmJs( Handle export_wrappers; std::shared_ptr native_module = CompileToNativeModule( isolate, WasmFeatures::ForAsmjs(), thrower, std::move(result).value(), - bytes, &export_wrappers, compilation_id); + bytes, &export_wrappers, compilation_id, context_id); if (!native_module) return {}; return AsmWasmData::New(isolate, std::move(native_module), export_wrappers, @@ -534,11 +538,12 @@ MaybeHandle WasmEngine::SyncCompile( const ModuleWireBytes& bytes) { int compilation_id = next_compilation_id_.fetch_add(1); TRACE_EVENT1("v8.wasm", "wasm.SyncCompile", "id", compilation_id); - ModuleResult result = DecodeWasmModule( - enabled, bytes.start(), bytes.end(), false, kWasmOrigin, - isolate->counters(), isolate->metrics_recorder(), - isolate->GetOrRegisterRecorderContextId(isolate->native_context()), - DecodingMethod::kSync, allocator()); + v8::metrics::Recorder::ContextId context_id = + isolate->GetOrRegisterRecorderContextId(isolate->native_context()); + ModuleResult result = + DecodeWasmModule(enabled, bytes.start(), bytes.end(), false, kWasmOrigin, + isolate->counters(), isolate->metrics_recorder(), + context_id, DecodingMethod::kSync, allocator()); if (result.failed()) { thrower->CompileFailed(result.error()); return {}; @@ -549,7 +554,7 @@ MaybeHandle WasmEngine::SyncCompile( Handle export_wrappers; std::shared_ptr native_module = CompileToNativeModule( isolate, enabled, thrower, std::move(result).value(), bytes, - &export_wrappers, compilation_id); + &export_wrappers, compilation_id, context_id); if (!native_module) return {}; #ifdef DEBUG diff --git a/test/mjsunit/parallel-compile-tasks.js b/test/mjsunit/parallel-compile-tasks.js index fbde569556..b091734a7f 100644 --- a/test/mjsunit/parallel-compile-tasks.js +++ b/test/mjsunit/parallel-compile-tasks.js @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Flags: --compiler-dispatcher --parallel-compile-tasks --use-external-strings +// Flags: --compiler-dispatcher --parallel-compile-tasks-for-eager-toplevel --use-external-strings (function(a) { assertEquals(a, "IIFE"); diff --git a/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc b/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc index 1fa4522220..76a6dea180 100644 --- a/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc +++ b/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc @@ -79,6 +79,7 @@ class LazyCompileDispatcherTest : public TestWithNativeContext { test::OuterParseInfoForShared(isolate, shared, &state); if (dispatcher->IsEnqueued(shared)) return; dispatcher->Enqueue(isolate->main_thread_local_isolate(), shared, + outer_parse_info->state(), outer_parse_info->character_stream()->Clone(), nullptr); } }; diff --git a/test/unittests/tasks/background-compile-task-unittest.cc b/test/unittests/tasks/background-compile-task-unittest.cc index 895b0590b2..faa821b176 100644 --- a/test/unittests/tasks/background-compile-task-unittest.cc +++ b/test/unittests/tasks/background-compile-task-unittest.cc @@ -80,7 +80,8 @@ class BackgroundCompileTaskTest : public TestWithNativeContext { shared->function_literal_id(), nullptr); return new BackgroundCompileTask( - isolate, shared, outer_parse_info->character_stream()->Clone(), + isolate, shared, outer_parse_info->state(), + outer_parse_info->character_stream()->Clone(), function_literal->produced_preparse_data(), isolate->counters()->worker_thread_runtime_call_stats(), isolate->counters()->compile_function_on_background(), FLAG_stack_size); diff --git a/test/unittests/wasm/memory-protection-unittest.cc b/test/unittests/wasm/memory-protection-unittest.cc index 73062c1057..743bc5b2af 100644 --- a/test/unittests/wasm/memory-protection-unittest.cc +++ b/test/unittests/wasm/memory-protection-unittest.cc @@ -137,7 +137,7 @@ class MemoryProtectionTest : public TestWithNativeContext { std::shared_ptr native_module = CompileToNativeModule( isolate(), WasmFeatures::All(), &thrower, std::move(result).value(), ModuleWireBytes{base::ArrayVector(module_bytes)}, &export_wrappers, - kNoCompilationId); + kNoCompilationId, v8::metrics::Recorder::ContextId::Empty()); CHECK(!thrower.error()); CHECK_NOT_NULL(native_module); diff --git a/tools/testrunner/local/variants.py b/tools/testrunner/local/variants.py index 1134e67fff..f7628d4cd3 100644 --- a/tools/testrunner/local/variants.py +++ b/tools/testrunner/local/variants.py @@ -94,7 +94,8 @@ INCOMPATIBLE_FLAGS_PER_BUILD_VARIABLE = { "lite_mode": ["--no-lazy-feedback-allocation", "--max-semi-space-size=*", "--stress-concurrent-inlining"] + INCOMPATIBLE_FLAGS_PER_VARIANT["jitless"], - "predictable": ["--parallel-compile-tasks", + "predictable": ["--parallel-compile-tasks-for-eager-toplevel", + "--parallel-compile-tasks-for-lazy", "--concurrent-recompilation", "--stress-concurrent-allocation", "--stress-concurrent-inlining"], @@ -111,7 +112,8 @@ INCOMPATIBLE_FLAGS_PER_BUILD_VARIABLE = { # implications defined in flag-definitions.h. INCOMPATIBLE_FLAGS_PER_EXTRA_FLAG = { "--concurrent-recompilation": ["--predictable", "--assert-types"], - "--parallel-compile-tasks": ["--predictable"], + "--parallel-compile-tasks-for-eager-toplevel": ["--predictable"], + "--parallel-compile-tasks-for-lazy": ["--predictable"], "--gc-interval=*": ["--gc-interval=*"], "--optimize-for-size": ["--max-semi-space-size=*"], "--stress_concurrent_allocation":