diff --git a/src/codegen/reloc-info.h b/src/codegen/reloc-info.h index a4ea9b1ee9..e6d7dfd01c 100644 --- a/src/codegen/reloc-info.h +++ b/src/codegen/reloc-info.h @@ -328,6 +328,13 @@ class RelocInfo { static const int kApplyMask; // Modes affected by apply. Depends on arch. + static constexpr int AllRealModesMask() { + constexpr Mode kFirstUnrealRelocMode = + static_cast(RelocInfo::LAST_REAL_RELOC_MODE + 1); + return (ModeMask(kFirstUnrealRelocMode) - 1) & + ~(ModeMask(RelocInfo::FIRST_REAL_RELOC_MODE) - 1); + } + static int EmbeddedObjectModeMask() { return ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) | ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT); diff --git a/src/compiler/backend/code-generator.cc b/src/compiler/backend/code-generator.cc index 83dccf69e8..80677bcac4 100644 --- a/src/compiler/backend/code-generator.cc +++ b/src/compiler/backend/code-generator.cc @@ -513,6 +513,11 @@ MaybeHandle CodeGenerator::FinalizeCode() { return MaybeHandle(); } + // TODO(jgruber,v8:8888): Turn this into a DCHECK once confidence is + // high that the implementation is complete. + CHECK_IMPLIES(info()->native_context_independent(), + code->IsNativeContextIndependent(isolate())); + isolate()->counters()->total_compiled_code_size()->Increment( code->raw_instruction_size()); diff --git a/src/compiler/compilation-dependencies.cc b/src/compiler/compilation-dependencies.cc index b9ed54256a..1b65606827 100644 --- a/src/compiler/compilation-dependencies.cc +++ b/src/compiler/compilation-dependencies.cc @@ -505,6 +505,12 @@ bool CompilationDependencies::AreValid() const { } bool CompilationDependencies::Commit(Handle code) { + // Dependencies are context-dependent. In the future it may be possible to + // restore them in the consumer native context, but for now they are + // disabled. + CHECK_IMPLIES(broker_->is_native_context_independent(), + dependencies_.empty()); + for (auto dep : dependencies_) { if (!dep->IsValid()) { dependencies_.clear(); diff --git a/src/compiler/js-call-reducer.cc b/src/compiler/js-call-reducer.cc index d6f085a638..6bcbf9165d 100644 --- a/src/compiler/js-call-reducer.cc +++ b/src/compiler/js-call-reducer.cc @@ -3760,6 +3760,10 @@ Reduction JSCallReducer::ReduceCallOrConstructWithArrayLikeOrSpread( start_index = formal_parameter_count; } + // TODO(jgruber,v8:8888): Attempt to remove this restriction. The reason it + // currently exists is because we cannot create code dependencies in NCI code. + if (broker()->is_native_context_independent()) return NoChange(); + // For call/construct with spread, we need to also install a code // dependency on the array iterator lookup protector cell to ensure // that no one messed with the %ArrayIteratorPrototype%.next method. diff --git a/src/objects/code.cc b/src/objects/code.cc index e60571d611..9c365a2292 100644 --- a/src/objects/code.cc +++ b/src/objects/code.cc @@ -238,15 +238,12 @@ const char* AbstractCode::Kind2String(Kind kind) { } bool Code::IsIsolateIndependent(Isolate* isolate) { - constexpr int all_real_modes_mask = - (1 << (RelocInfo::LAST_REAL_RELOC_MODE + 1)) - - (1 << (RelocInfo::FIRST_REAL_RELOC_MODE - 1)) - 1; - constexpr int mode_mask = all_real_modes_mask & - ~RelocInfo::ModeMask(RelocInfo::CONST_POOL) & - ~RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) & - ~RelocInfo::ModeMask(RelocInfo::VENEER_POOL); - STATIC_ASSERT(RelocInfo::LAST_REAL_RELOC_MODE == RelocInfo::VENEER_POOL); - STATIC_ASSERT(mode_mask == + static constexpr int kModeMask = + RelocInfo::AllRealModesMask() & + ~RelocInfo::ModeMask(RelocInfo::CONST_POOL) & + ~RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) & + ~RelocInfo::ModeMask(RelocInfo::VENEER_POOL); + STATIC_ASSERT(kModeMask == (RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) | @@ -258,8 +255,7 @@ bool Code::IsIsolateIndependent(Isolate* isolate) { RelocInfo::ModeMask(RelocInfo::WASM_CALL) | RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL))); - bool is_process_independent = true; - for (RelocIterator it(*this, mode_mask); !it.done(); it.next()) { + for (RelocIterator it(*this, kModeMask); !it.done(); it.next()) { #if defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_ARM64) || \ defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS) || \ defined(V8_TARGET_ARCH_S390) || defined(V8_TARGET_ARCH_IA32) @@ -276,10 +272,70 @@ bool Code::IsIsolateIndependent(Isolate* isolate) { if (Builtins::IsIsolateIndependentBuiltin(target)) continue; } #endif - is_process_independent = false; + return false; } - return is_process_independent; + return true; +} + +// Multiple native contexts live on the same heap, and V8 currently +// draws no clear distinction between native-context-dependent and +// independent objects. A good guideline is "objects embedded into +// bytecode are nc-independent", since bytecode is shared between +// native contexts. Among others, this is the case for ScopeInfo, +// SharedFunctionInfo, String, etc. +bool Code::IsNativeContextIndependent(Isolate* isolate) { + static constexpr int kModeMask = + RelocInfo::AllRealModesMask() & + ~RelocInfo::ModeMask(RelocInfo::CONST_POOL) & + ~RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) & + ~RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) & + ~RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) & + ~RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) & + ~RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) & + ~RelocInfo::ModeMask(RelocInfo::VENEER_POOL); + STATIC_ASSERT(kModeMask == + (RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | + RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET) | + RelocInfo::ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) | + RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) | + RelocInfo::ModeMask(RelocInfo::WASM_CALL) | + RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL))); + + bool is_independent = true; + for (RelocIterator it(*this, kModeMask); !it.done(); it.next()) { + if (RelocInfo::IsEmbeddedObjectMode(it.rinfo()->rmode())) { + HeapObject o = it.rinfo()->target_object(); + // TODO(jgruber,v8:8888): Extend this with further NCI objects, + // and define a more systematic + // IsNativeContextIndependent() predicate. + if (o.IsString()) continue; + if (o.IsScopeInfo()) continue; + if (o.IsHeapNumber()) continue; + if (o.IsBigInt()) continue; + if (o.IsSharedFunctionInfo()) continue; + if (o.IsArrayBoilerplateDescription()) continue; + if (o.IsObjectBoilerplateDescription()) continue; + if (o.IsTemplateObjectDescription()) continue; + if (o.IsFixedArray()) { + // Some uses of FixedArray are valid. + // 1. Passed as arg to %DeclareGlobals, contains only strings + // and SFIs. + // 2. Passed as arg to %DefineClass. No well defined contents. + // .. ? + // TODO(jgruber): Consider assigning dedicated instance + // types instead of assuming fixed arrays are okay. + continue; + } + // Other objects are expected to be context-dependent. + PrintF("Found native-context-dependent object:\n"); + o.Print(); + o.map().Print(); + } + is_independent = false; + } + + return is_independent; } bool Code::Inlines(SharedFunctionInfo sfi) { diff --git a/src/objects/code.h b/src/objects/code.h index ea6f52cc59..34c3049075 100644 --- a/src/objects/code.h +++ b/src/objects/code.h @@ -371,6 +371,7 @@ class Code : public HeapObject { inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction(); bool IsIsolateIndependent(Isolate* isolate); + bool IsNativeContextIndependent(Isolate* isolate); inline bool CanContainWeakObjects();