// Copyright 2016 the V8 project authors. All rights reserved. Use of this // source code is governed by a BSD-style license that can be found in the // LICENSE file. #include #include #include #include "src/base/bits.h" #include "src/base/utils/random-number-generator.h" #include "src/codegen.h" #include "src/objects-inl.h" #include "test/cctest/cctest.h" #include "test/cctest/compiler/codegen-tester.h" #include "test/cctest/compiler/graph-builder-tester.h" #include "test/cctest/compiler/value-helper.h" namespace v8 { namespace internal { namespace compiler { static void UpdateMemoryReferences(Handle code, Address old_base, Address new_base, uint32_t old_size, uint32_t new_size) { Isolate* isolate = CcTest::i_isolate(); bool modified = false; int mode_mask = RelocInfo::ModeMask(RelocInfo::WASM_MEMORY_REFERENCE) | RelocInfo::ModeMask(RelocInfo::WASM_MEMORY_SIZE_REFERENCE); for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { RelocInfo::Mode mode = it.rinfo()->rmode(); if (RelocInfo::IsWasmMemoryReference(mode)) { it.rinfo()->update_wasm_memory_reference(isolate, old_base, new_base); } else { DCHECK(RelocInfo::IsWasmMemorySizeReference(mode)); it.rinfo()->update_wasm_memory_size(isolate, old_size, new_size); } modified = true; } if (modified) { Assembler::FlushICache(isolate, code->instruction_start(), code->instruction_size()); } } static void UpdateFunctionTableSizeReferences(Handle code, uint32_t old_size, uint32_t new_size) { Isolate* isolate = CcTest::i_isolate(); bool modified = false; int mode_mask = RelocInfo::ModeMask(RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE); for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { RelocInfo::Mode mode = it.rinfo()->rmode(); if (RelocInfo::IsWasmFunctionTableSizeReference(mode)) { it.rinfo()->update_wasm_function_table_size_reference(isolate, old_size, new_size); modified = true; } } if (modified) { Assembler::FlushICache(isolate, code->instruction_start(), code->instruction_size()); } } template static void RunLoadStoreRelocation(MachineType rep) { const int kNumElems = 2; CType buffer[kNumElems]; CType new_buffer[kNumElems]; byte* raw = reinterpret_cast(buffer); byte* new_raw = reinterpret_cast(new_buffer); for (size_t i = 0; i < sizeof(buffer); i++) { raw[i] = static_cast((i + sizeof(CType)) ^ 0xAA); new_raw[i] = static_cast((i + sizeof(CType)) ^ 0xAA); } uint32_t OK = 0x29000; RawMachineAssemblerTester m; Node* base = m.RelocatableIntPtrConstant(reinterpret_cast(raw), RelocInfo::WASM_MEMORY_REFERENCE); Node* base1 = m.RelocatableIntPtrConstant( reinterpret_cast(raw + sizeof(CType)), RelocInfo::WASM_MEMORY_REFERENCE); Node* index = m.Int32Constant(0); Node* load = m.Load(rep, base, index); m.Store(rep.representation(), base1, index, load, kNoWriteBarrier); m.Return(m.Int32Constant(OK)); CHECK(buffer[0] != buffer[1]); CHECK_EQ(OK, m.Call()); CHECK(buffer[0] == buffer[1]); m.GenerateCode(); Handle code = m.GetCode(); UpdateMemoryReferences(code, raw, new_raw, sizeof(buffer), sizeof(new_buffer)); CHECK(new_buffer[0] != new_buffer[1]); CHECK_EQ(OK, m.Call()); CHECK(new_buffer[0] == new_buffer[1]); } TEST(RunLoadStoreRelocation) { RunLoadStoreRelocation(MachineType::Int8()); RunLoadStoreRelocation(MachineType::Uint8()); RunLoadStoreRelocation(MachineType::Int16()); RunLoadStoreRelocation(MachineType::Uint16()); RunLoadStoreRelocation(MachineType::Int32()); RunLoadStoreRelocation(MachineType::Uint32()); RunLoadStoreRelocation(MachineType::AnyTagged()); RunLoadStoreRelocation(MachineType::Float32()); RunLoadStoreRelocation(MachineType::Float64()); } template static void RunLoadStoreRelocationOffset(MachineType rep) { RawMachineAssemblerTester r(MachineType::Int32()); const int kNumElems = 4; CType buffer[kNumElems]; CType new_buffer[kNumElems + 1]; for (int32_t x = 0; x < kNumElems; x++) { int32_t y = kNumElems - x - 1; // initialize the buffer with raw data. byte* raw = reinterpret_cast(buffer); for (size_t i = 0; i < sizeof(buffer); i++) { raw[i] = static_cast((i + sizeof(buffer)) ^ 0xAA); } RawMachineAssemblerTester m; int32_t OK = 0x29000 + x; Node* base = m.RelocatableIntPtrConstant(reinterpret_cast(buffer), RelocInfo::WASM_MEMORY_REFERENCE); Node* index0 = m.IntPtrConstant(x * sizeof(buffer[0])); Node* load = m.Load(rep, base, index0); Node* index1 = m.IntPtrConstant(y * sizeof(buffer[0])); m.Store(rep.representation(), base, index1, load, kNoWriteBarrier); m.Return(m.Int32Constant(OK)); CHECK(buffer[x] != buffer[y]); CHECK_EQ(OK, m.Call()); CHECK(buffer[x] == buffer[y]); m.GenerateCode(); // Initialize new buffer and set old_buffer to 0 byte* new_raw = reinterpret_cast(new_buffer); for (size_t i = 0; i < sizeof(buffer); i++) { raw[i] = 0; new_raw[i] = static_cast((i + sizeof(buffer)) ^ 0xAA); } // Perform relocation on generated code Handle code = m.GetCode(); UpdateMemoryReferences(code, raw, new_raw, sizeof(buffer), sizeof(new_buffer)); CHECK(new_buffer[x] != new_buffer[y]); CHECK_EQ(OK, m.Call()); CHECK(new_buffer[x] == new_buffer[y]); } } TEST(RunLoadStoreRelocationOffset) { RunLoadStoreRelocationOffset(MachineType::Int8()); RunLoadStoreRelocationOffset(MachineType::Uint8()); RunLoadStoreRelocationOffset(MachineType::Int16()); RunLoadStoreRelocationOffset(MachineType::Uint16()); RunLoadStoreRelocationOffset(MachineType::Int32()); RunLoadStoreRelocationOffset(MachineType::Uint32()); RunLoadStoreRelocationOffset(MachineType::AnyTagged()); RunLoadStoreRelocationOffset(MachineType::Float32()); RunLoadStoreRelocationOffset(MachineType::Float64()); } TEST(Uint32LessThanMemoryRelocation) { RawMachineAssemblerTester m; RawMachineLabel within_bounds, out_of_bounds; Node* index = m.Int32Constant(0x200); Node* limit = m.RelocatableInt32Constant(0x200, RelocInfo::WASM_MEMORY_SIZE_REFERENCE); Node* cond = m.AddNode(m.machine()->Uint32LessThan(), index, limit); m.Branch(cond, &within_bounds, &out_of_bounds); m.Bind(&within_bounds); m.Return(m.Int32Constant(0xaced)); m.Bind(&out_of_bounds); m.Return(m.Int32Constant(0xdeadbeef)); // Check that index is out of bounds with current size CHECK_EQ(0xdeadbeef, m.Call()); m.GenerateCode(); Handle code = m.GetCode(); UpdateMemoryReferences(code, reinterpret_cast
(1234), reinterpret_cast
(1234), 0x200, 0x400); // Check that after limit is increased, index is within bounds. CHECK_EQ(0xacedu, m.Call()); } TEST(Uint32LessThanFunctionTableRelocation) { RawMachineAssemblerTester m; RawMachineLabel within_bounds, out_of_bounds; Node* index = m.Int32Constant(0x200); Node* limit = m.RelocatableInt32Constant( 0x200, RelocInfo::WASM_FUNCTION_TABLE_SIZE_REFERENCE); Node* cond = m.AddNode(m.machine()->Uint32LessThan(), index, limit); m.Branch(cond, &within_bounds, &out_of_bounds); m.Bind(&within_bounds); m.Return(m.Int32Constant(0xaced)); m.Bind(&out_of_bounds); m.Return(m.Int32Constant(0xdeadbeef)); // Check that index is out of bounds with current size CHECK_EQ(0xdeadbeef, m.Call()); m.GenerateCode(); Handle code = m.GetCode(); UpdateFunctionTableSizeReferences(code, 0x200, 0x400); // Check that after limit is increased, index is within bounds. CHECK_EQ(0xaced, m.Call()); } } // namespace compiler } // namespace internal } // namespace v8