From 19eb7234bab92d3bb47da85a7a9a197f3bd6e57d Mon Sep 17 00:00:00 2001 From: Sigurd Schneider Date: Tue, 18 Jun 2019 15:04:43 +0200 Subject: [PATCH] [arm64] Ensure pools are emitted before emitting large branch tables Change-Id: Iedb78a62886177f5c603b2f3ce9b586ac1320d31 Bug: chromium:968078 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1664067 Reviewed-by: Andreas Haas Commit-Queue: Sigurd Schneider Cr-Commit-Position: refs/heads/master@{#62244} --- src/codegen/arm64/assembler-arm64.cc | 4 +- src/codegen/arm64/assembler-arm64.h | 9 ++-- src/codegen/constant-pool.h | 2 +- .../backend/arm64/code-generator-arm64.cc | 3 +- test/mjsunit/regress/wasm/regress-968078.js | 47 +++++++++++++++++++ 5 files changed, 58 insertions(+), 7 deletions(-) create mode 100644 test/mjsunit/regress/wasm/regress-968078.js diff --git a/src/codegen/arm64/assembler-arm64.cc b/src/codegen/arm64/assembler-arm64.cc index d7790c7144..c01f5813ed 100644 --- a/src/codegen/arm64/assembler-arm64.cc +++ b/src/codegen/arm64/assembler-arm64.cc @@ -4418,7 +4418,7 @@ bool Assembler::ShouldEmitVeneer(int max_reachable_pc, size_t margin) { } void Assembler::RecordVeneerPool(int location_offset, int size) { - Assembler::BlockPoolsScope block_pools(this); + Assembler::BlockPoolsScope block_pools(this, PoolEmissionCheck::kSkip); RelocInfo rinfo(reinterpret_cast
(buffer_start_) + location_offset, RelocInfo::VENEER_POOL, static_cast(size), Code()); reloc_info_writer.Write(&rinfo); @@ -4426,7 +4426,7 @@ void Assembler::RecordVeneerPool(int location_offset, int size) { void Assembler::EmitVeneers(bool force_emit, bool need_protection, size_t margin) { - BlockPoolsScope scope(this, ConstantPool::PoolEmissionCheck::kSkip); + BlockPoolsScope scope(this, PoolEmissionCheck::kSkip); RecordComment("[ Veneers"); // The exact size of the veneer pool must be recorded (see the comment at the diff --git a/src/codegen/arm64/assembler-arm64.h b/src/codegen/arm64/assembler-arm64.h index cdba676008..db74b13fc0 100644 --- a/src/codegen/arm64/assembler-arm64.h +++ b/src/codegen/arm64/assembler-arm64.h @@ -2371,12 +2371,15 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { class BlockPoolsScope { public: - explicit BlockPoolsScope(Assembler* assem) - : assem_(assem), block_const_pool_(assem) { + // Block veneer and constant pool. Emits pools if necessary to ensure that + // {margin} more bytes can be emitted without triggering pool emission. + explicit BlockPoolsScope(Assembler* assem, size_t margin = 0) + : assem_(assem), block_const_pool_(assem, margin) { + assem_->CheckVeneerPool(false, true, margin); assem_->StartBlockVeneerPool(); } - BlockPoolsScope(Assembler* assem, ConstantPool::PoolEmissionCheck check) + BlockPoolsScope(Assembler* assem, PoolEmissionCheck check) : assem_(assem), block_const_pool_(assem, check) { assem_->StartBlockVeneerPool(); } diff --git a/src/codegen/constant-pool.h b/src/codegen/constant-pool.h index dab09a01b2..7d40069ae5 100644 --- a/src/codegen/constant-pool.h +++ b/src/codegen/constant-pool.h @@ -235,6 +235,7 @@ enum class Jump { kOmitted, kRequired }; enum class Emission { kIfNeeded, kForced }; enum class Alignment { kOmitted, kRequired }; enum class RelocInfoStatus { kMustRecord, kMustOmitForDuplicate }; +enum class PoolEmissionCheck { kSkip }; // Pools are emitted in the instruction stream, preferably after unconditional // jumps or after returns from functions (in dead code locations). @@ -279,7 +280,6 @@ class ConstantPool { void SetNextCheckIn(size_t instructions); // Class for scoping postponing the constant pool generation. - enum class PoolEmissionCheck { kSkip }; class V8_EXPORT_PRIVATE BlockScope { public: // BlockScope immediatelly emits the pool if necessary to ensure that diff --git a/src/compiler/backend/arm64/code-generator-arm64.cc b/src/compiler/backend/arm64/code-generator-arm64.cc index 15d31f1849..804c63ae59 100644 --- a/src/compiler/backend/arm64/code-generator-arm64.cc +++ b/src/compiler/backend/arm64/code-generator-arm64.cc @@ -2401,7 +2401,8 @@ void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) { __ Add(temp, temp, Operand(input, UXTW, 2)); __ Br(temp); { - TurboAssembler::BlockPoolsScope block_pools(tasm()); + TurboAssembler::BlockPoolsScope block_pools(tasm(), + case_count * kInstrSize); __ Bind(&table); for (size_t index = 0; index < case_count; ++index) { __ B(GetLabel(i.InputRpo(index + 2))); diff --git a/test/mjsunit/regress/wasm/regress-968078.js b/test/mjsunit/regress/wasm/regress-968078.js new file mode 100644 index 0000000000..2935ea05e3 --- /dev/null +++ b/test/mjsunit/regress/wasm/regress-968078.js @@ -0,0 +1,47 @@ +// Copyright 2019 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-wasm + +load("test/mjsunit/wasm/wasm-module-builder.js"); + +(function() { + function repeat(value, length) { + var arr = new Array(length); + for (let i = 0; i < length; i++) { + arr[i] = value; + } + return arr; + } + function br_table(block_index, length, def_block) { + const bytes = new Binary(); + bytes.emit_bytes([kExprBrTable]); + // Functions count (different than the count in the functions section. + bytes.emit_u32v(length); + bytes.emit_bytes(repeat(block_index, length)); + bytes.emit_bytes([def_block]); + return Array.from(bytes.trunc_buffer()); + } + var builder = new WasmModuleBuilder(); + builder.addMemory(12, 12, false); + builder.addFunction("foo", kSig_v_iii) + .addBody([].concat([ + kExprBlock, kWasmStmt, + kExprGetLocal, 0x2, + kExprI32Const, 0x01, + kExprI32And, + // Generate a test branch (which has 32k limited reach). + kExprIf, kWasmStmt, + kExprGetLocal, 0x0, + kExprI32Const, 0x01, + kExprI32And, + kExprBrIf, 0x1, + kExprGetLocal, 0x0, + // Emit a br_table that is long enough to make the test branch go out of range. + ], br_table(0x1, 9000, 0x00), [ + kExprEnd, + kExprEnd, + ])).exportFunc(); + builder.instantiate(); +})();