// Copyright 2017 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "test/cctest/wasm/wasm-run-utils.h" #include "src/base/optional.h" #include "src/codegen/assembler-inl.h" #include "src/diagnostics/code-tracer.h" #include "src/heap/heap-inl.h" #include "src/wasm/baseline/liftoff-compiler.h" #include "src/wasm/code-space-access.h" #include "src/wasm/graph-builder-interface.h" #include "src/wasm/leb-helper.h" #include "src/wasm/module-compiler.h" #include "src/wasm/wasm-engine.h" #include "src/wasm/wasm-import-wrapper-cache.h" #include "src/wasm/wasm-objects-inl.h" #include "src/wasm/wasm-opcodes.h" namespace v8 { namespace internal { namespace wasm { // Helper Functions. bool IsSameNan(float expected, float actual) { // Sign is non-deterministic. uint32_t expected_bits = bit_cast(expected) & ~0x80000000; uint32_t actual_bits = bit_cast(actual) & ~0x80000000; // Some implementations convert signaling NaNs to quiet NaNs. return (expected_bits == actual_bits) || ((expected_bits | 0x00400000) == actual_bits); } bool IsSameNan(double expected, double actual) { // Sign is non-deterministic. uint64_t expected_bits = bit_cast(expected) & ~0x8000000000000000; uint64_t actual_bits = bit_cast(actual) & ~0x8000000000000000; // Some implementations convert signaling NaNs to quiet NaNs. return (expected_bits == actual_bits) || ((expected_bits | 0x0008000000000000) == actual_bits); } TestingModuleBuilder::TestingModuleBuilder( Zone* zone, ManuallyImportedJSFunction* maybe_import, TestExecutionTier tier, RuntimeExceptionSupport exception_support, TestingModuleMemoryType mem_type, Isolate* isolate) : test_module_(std::make_shared()), isolate_(isolate ? isolate : CcTest::InitIsolateOnce()), enabled_features_(WasmFeatures::FromIsolate(isolate_)), execution_tier_(tier), runtime_exception_support_(exception_support) { WasmJs::Install(isolate_, true); test_module_->is_memory64 = mem_type == kMemory64; test_module_->untagged_globals_buffer_size = kMaxGlobalsSize; memset(globals_data_, 0, sizeof(globals_data_)); uint32_t maybe_import_index = 0; if (maybe_import) { // Manually add an imported function before any other functions. // This must happen before the instance object is created, since the // instance object allocates import entries. maybe_import_index = AddFunction(maybe_import->sig, nullptr, kImport); DCHECK_EQ(0, maybe_import_index); } instance_object_ = InitInstanceObject(); Handle tables(isolate_->factory()->NewFixedArray(0)); instance_object_->set_tables(*tables); if (maybe_import) { // Manually compile an import wrapper and insert it into the instance. auto resolved = compiler::ResolveWasmImportCall( maybe_import->js_function, maybe_import->sig, instance_object_->module(), enabled_features_); compiler::WasmImportCallKind kind = resolved.first; Handle callable = resolved.second; WasmImportWrapperCache::ModificationScope cache_scope( native_module_->import_wrapper_cache()); WasmImportWrapperCache::CacheKey key( kind, maybe_import->sig, static_cast(maybe_import->sig->parameter_count())); auto import_wrapper = cache_scope[key]; if (import_wrapper == nullptr) { CodeSpaceWriteScope write_scope(native_module_); import_wrapper = CompileImportWrapper( native_module_, isolate_->counters(), kind, maybe_import->sig, static_cast(maybe_import->sig->parameter_count()), &cache_scope); } ImportedFunctionEntry(instance_object_, maybe_import_index) .SetWasmToJs(isolate_, callable, import_wrapper); } if (tier == TestExecutionTier::kInterpreter) { interpreter_ = std::make_unique( isolate_, test_module_.get(), ModuleWireBytes{native_module_->wire_bytes()}, instance_object_); } } TestingModuleBuilder::~TestingModuleBuilder() { // When the native module dies and is erased from the cache, it is expected to // have either valid bytes or no bytes at all. native_module_->SetWireBytes({}); } byte* TestingModuleBuilder::AddMemory(uint32_t size, SharedFlag shared) { CHECK(!test_module_->has_memory); CHECK_NULL(mem_start_); CHECK_EQ(0, mem_size_); DCHECK(!instance_object_->has_memory_object()); uint32_t initial_pages = RoundUp(size, kWasmPageSize) / kWasmPageSize; uint32_t maximum_pages = (test_module_->maximum_pages != 0) ? test_module_->maximum_pages : initial_pages; test_module_->has_memory = true; // Create the WasmMemoryObject. Handle memory_object = WasmMemoryObject::New(isolate_, initial_pages, maximum_pages, shared) .ToHandleChecked(); instance_object_->set_memory_object(*memory_object); mem_start_ = reinterpret_cast(memory_object->array_buffer().backing_store()); mem_size_ = size; CHECK(size == 0 || mem_start_); WasmMemoryObject::AddInstance(isolate_, memory_object, instance_object_); // TODO(wasm): Delete the following two lines when test-run-wasm will use a // multiple of kPageSize as memory size. At the moment, the effect of these // two lines is used to shrink the memory for testing purposes. instance_object_->SetRawMemory(mem_start_, mem_size_); return mem_start_; } uint32_t TestingModuleBuilder::AddFunction(const FunctionSig* sig, const char* name, FunctionType type) { if (test_module_->functions.size() == 0) { // TODO(titzer): Reserving space here to avoid the underlying WasmFunction // structs from moving. test_module_->functions.reserve(kMaxFunctions); } uint32_t index = static_cast(test_module_->functions.size()); test_module_->functions.push_back({sig, // sig index, // func_index 0, // sig_index {0, 0}, // code 0, // feedback slots false, // imported false, // exported false}); // declared if (type == kImport) { DCHECK_EQ(0, test_module_->num_declared_functions); ++test_module_->num_imported_functions; test_module_->functions.back().imported = true; } else { ++test_module_->num_declared_functions; } DCHECK_EQ(test_module_->functions.size(), test_module_->num_imported_functions + test_module_->num_declared_functions); if (name) { base::Vector name_vec = base::Vector::cast(base::CStrVector(name)); test_module_->lazily_generated_names.AddForTesting( index, {AddBytes(name_vec), static_cast(name_vec.length())}); } if (interpreter_) { interpreter_->AddFunctionForTesting(&test_module_->functions.back()); } DCHECK_LT(index, kMaxFunctions); // limited for testing. return index; } void TestingModuleBuilder::FreezeSignatureMapAndInitializeWrapperCache() { if (test_module_->signature_map.is_frozen()) return; test_module_->signature_map.Freeze(); size_t max_num_sigs = MaxNumExportWrappers(test_module_.get()); Handle export_wrappers = isolate_->factory()->NewFixedArray(static_cast(max_num_sigs)); instance_object_->module_object().set_export_wrappers(*export_wrappers); } Handle TestingModuleBuilder::WrapCode(uint32_t index) { CHECK(!interpreter_); FreezeSignatureMapAndInitializeWrapperCache(); return handle( JSFunction::cast(WasmInstanceObject::GetOrCreateWasmInternalFunction( isolate_, instance_object(), index) ->external()), isolate_); } void TestingModuleBuilder::AddIndirectFunctionTable( const uint16_t* function_indexes, uint32_t table_size, ValueType table_type) { Handle instance = instance_object(); uint32_t table_index = static_cast(test_module_->tables.size()); test_module_->tables.emplace_back(); WasmTable& table = test_module_->tables.back(); table.initial_size = table_size; table.maximum_size = table_size; table.has_maximum_size = true; table.type = table_type; { // Allocate the indirect function table. Handle old_tables = table_index == 0 ? isolate_->factory()->empty_fixed_array() : handle(instance_object_->indirect_function_tables(), isolate_); Handle new_tables = isolate_->factory()->CopyFixedArrayAndGrow(old_tables, 1); Handle table_obj = WasmIndirectFunctionTable::New(isolate_, table.initial_size); new_tables->set(table_index, *table_obj); instance_object_->set_indirect_function_tables(*new_tables); } WasmInstanceObject::EnsureIndirectFunctionTableWithMinimumSize( instance_object(), table_index, table_size); Handle table_obj = WasmTableObject::New(isolate_, instance, table.type, table.initial_size, table.has_maximum_size, table.maximum_size, nullptr, isolate_->factory()->null_value()); WasmTableObject::AddDispatchTable(isolate_, table_obj, instance_object_, table_index); if (function_indexes) { for (uint32_t i = 0; i < table_size; ++i) { WasmFunction& function = test_module_->functions[function_indexes[i]]; int sig_id = test_module_->signature_map.Find(*function.sig); FunctionTargetAndRef entry(instance, function.func_index); instance->GetIndirectFunctionTable(isolate_, table_index) ->Set(i, sig_id, entry.call_target(), *entry.ref()); WasmTableObject::SetFunctionTablePlaceholder( isolate_, table_obj, i, instance_object_, function_indexes[i]); } } Handle old_tables(instance_object_->tables(), isolate_); Handle new_tables = isolate_->factory()->CopyFixedArrayAndGrow(old_tables, 1); new_tables->set(old_tables->length(), *table_obj); instance_object_->set_tables(*new_tables); } uint32_t TestingModuleBuilder::AddBytes(base::Vector bytes) { base::Vector old_bytes = native_module_->wire_bytes(); uint32_t old_size = static_cast(old_bytes.size()); // Avoid placing strings at offset 0, this might be interpreted as "not // set", e.g. for function names. uint32_t bytes_offset = old_size ? old_size : 1; size_t new_size = bytes_offset + bytes.size(); base::OwnedVector new_bytes = base::OwnedVector::New(new_size); if (old_size > 0) { memcpy(new_bytes.start(), old_bytes.begin(), old_size); } else { // Set the unused byte. It is never decoded, but the bytes are used as the // key in the native module cache. new_bytes[0] = 0; } memcpy(new_bytes.start() + bytes_offset, bytes.begin(), bytes.length()); native_module_->SetWireBytes(std::move(new_bytes)); return bytes_offset; } uint32_t TestingModuleBuilder::AddException(const FunctionSig* sig) { DCHECK_EQ(0, sig->return_count()); uint32_t index = static_cast(test_module_->tags.size()); test_module_->tags.push_back(WasmTag{sig}); Handle tag = WasmExceptionTag::New(isolate_, index); Handle table(instance_object_->tags_table(), isolate_); table = isolate_->factory()->CopyFixedArrayAndGrow(table, 1); instance_object_->set_tags_table(*table); table->set(index, *tag); return index; } uint32_t TestingModuleBuilder::AddPassiveDataSegment( base::Vector bytes) { uint32_t index = static_cast(test_module_->data_segments.size()); DCHECK_EQ(index, test_module_->data_segments.size()); DCHECK_EQ(index, data_segment_starts_.size()); DCHECK_EQ(index, data_segment_sizes_.size()); // Add a passive data segment. This isn't used by function compilation, but // but it keeps the index in sync. The data segment's source will not be // correct, since we don't store data in the module wire bytes. test_module_->data_segments.emplace_back(); // The num_declared_data_segments (from the DataCount section) is used // to validate the segment index, during function compilation. test_module_->num_declared_data_segments = index + 1; Address old_data_address = reinterpret_cast
(data_segment_data_.data()); size_t old_data_size = data_segment_data_.size(); data_segment_data_.resize(old_data_size + bytes.length()); Address new_data_address = reinterpret_cast
(data_segment_data_.data()); memcpy(data_segment_data_.data() + old_data_size, bytes.begin(), bytes.length()); // The data_segment_data_ offset may have moved, so update all the starts. for (Address& start : data_segment_starts_) { start += new_data_address - old_data_address; } data_segment_starts_.push_back(new_data_address + old_data_size); data_segment_sizes_.push_back(bytes.length()); // The vector pointers may have moved, so update the instance object. instance_object_->set_data_segment_starts(data_segment_starts_.data()); instance_object_->set_data_segment_sizes(data_segment_sizes_.data()); return index; } uint32_t TestingModuleBuilder::AddPassiveElementSegment( const std::vector& entries) { uint32_t index = static_cast(test_module_->elem_segments.size()); DCHECK_EQ(index, dropped_elem_segments_.size()); test_module_->elem_segments.emplace_back(kWasmFuncRef, false); auto& elem_segment = test_module_->elem_segments.back(); for (uint32_t entry : entries) { elem_segment.entries.push_back( WasmElemSegment::Entry(WasmElemSegment::Entry::kRefFuncEntry, entry)); } // The vector pointers may have moved, so update the instance object. dropped_elem_segments_.push_back(0); instance_object_->set_dropped_elem_segments(dropped_elem_segments_.data()); return index; } CompilationEnv TestingModuleBuilder::CreateCompilationEnv() { return {test_module_.get(), native_module_->bounds_checks(), runtime_exception_support_, enabled_features_, DynamicTiering::kDisabled}; } const WasmGlobal* TestingModuleBuilder::AddGlobal(ValueType type) { byte size = type.element_size_bytes(); global_offset = (global_offset + size - 1) & ~(size - 1); // align test_module_->globals.push_back( {type, true, {}, {global_offset}, false, false}); global_offset += size; // limit number of globals. CHECK_LT(global_offset, kMaxGlobalsSize); return &test_module_->globals.back(); } Handle TestingModuleBuilder::InitInstanceObject() { const bool kUsesLiftoff = true; size_t code_size_estimate = wasm::WasmCodeManager::EstimateNativeModuleCodeSize(test_module_.get(), kUsesLiftoff); auto native_module = GetWasmEngine()->NewNativeModule( isolate_, enabled_features_, test_module_, code_size_estimate); native_module->SetWireBytes(base::OwnedVector()); native_module->compilation_state()->set_compilation_id(0); constexpr base::Vector kNoSourceUrl{"", 0}; Handle