[arm] Clean up disabling of sharing code target entries.

This fixes an issue with ful-codegen where code target entries for the OSR
check were being incorrectly shared. We now explicitly disable sharing of code
target constant pool entries for full-codegen and for calls to builtins from
WASM code, using a scope.

BUG=chromium:725743

Review-Url: https://codereview.chromium.org/2922433002
Cr-Commit-Position: refs/heads/master@{#45661}
This commit is contained in:
georgia.kouveli 2017-06-01 06:18:21 -07:00 committed by Commit Bot
parent 15691758b3
commit 6a99238b90
5 changed files with 89 additions and 10 deletions

View File

@ -557,6 +557,7 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
pending_64_bit_constants_.reserve(kMinNumPendingConstants); pending_64_bit_constants_.reserve(kMinNumPendingConstants);
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
next_buffer_check_ = 0; next_buffer_check_ = 0;
code_target_sharing_blocked_nesting_ = 0;
const_pool_blocked_nesting_ = 0; const_pool_blocked_nesting_ = 0;
no_const_pool_before_ = 0; no_const_pool_before_ = 0;
first_const_pool_32_use_ = -1; first_const_pool_32_use_ = -1;
@ -573,7 +574,8 @@ Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size)
Assembler::~Assembler() { Assembler::~Assembler() {
DCHECK(const_pool_blocked_nesting_ == 0); DCHECK_EQ(const_pool_blocked_nesting_, 0);
DCHECK_EQ(code_target_sharing_blocked_nesting_, 0);
} }
void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) { void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) {
@ -5055,9 +5057,9 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode,
if (pending_32_bit_constants_.empty()) { if (pending_32_bit_constants_.empty()) {
first_const_pool_32_use_ = position; first_const_pool_32_use_ = position;
} }
ConstantPoolEntry entry( ConstantPoolEntry entry(position, value,
position, value, sharing_ok || (rmode == RelocInfo::CODE_TARGET &&
sharing_ok || (rmode == RelocInfo::CODE_TARGET && serializer_enabled())); IsCodeTargetSharingAllowed()));
bool shared = false; bool shared = false;
if (sharing_ok) { if (sharing_ok) {
@ -5073,10 +5075,7 @@ void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode,
} }
} }
if (rmode == RelocInfo::CODE_TARGET && serializer_enabled()) { if (rmode == RelocInfo::CODE_TARGET && IsCodeTargetSharingAllowed()) {
// TODO(all): We only do this in the serializer, for now, because
// full-codegen relies on RelocInfo for translating PCs between full-codegen
// normal and debug code.
// Sharing entries here relies on canonicalized handles - without them, we // Sharing entries here relies on canonicalized handles - without them, we
// will miss the optimisation opportunity. // will miss the optimisation opportunity.
Address handle_address = reinterpret_cast<Address>(value); Address handle_address = reinterpret_cast<Address>(value);

View File

@ -1509,6 +1509,36 @@ class Assembler : public AssemblerBase {
DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope); DISALLOW_IMPLICIT_CONSTRUCTORS(BlockConstPoolScope);
}; };
// Class for blocking sharing of code targets in constant pool.
class BlockCodeTargetSharingScope {
public:
explicit BlockCodeTargetSharingScope(Assembler* assem) : assem_(nullptr) {
Open(assem);
}
// This constructor does not initialize the scope. The user needs to
// explicitly call Open() before using it.
BlockCodeTargetSharingScope() : assem_(nullptr) {}
~BlockCodeTargetSharingScope() {
Close();
}
void Open(Assembler* assem) {
DCHECK_NULL(assem_);
DCHECK_NOT_NULL(assem);
assem_ = assem;
assem_->StartBlockCodeTargetSharing();
}
private:
void Close() {
if (assem_ != nullptr) {
assem_->EndBlockCodeTargetSharing();
}
}
Assembler* assem_;
DISALLOW_COPY_AND_ASSIGN(BlockCodeTargetSharingScope);
};
// Debugging // Debugging
// Mark address of a debug break slot. // Mark address of a debug break slot.
@ -1675,8 +1705,22 @@ class Assembler : public AssemblerBase {
// Patch branch instruction at pos to branch to given branch target pos // Patch branch instruction at pos to branch to given branch target pos
void target_at_put(int pos, int target_pos); void target_at_put(int pos, int target_pos);
// Prevent sharing of code target constant pool entries until
// EndBlockCodeTargetSharing is called. Calls to this function can be nested
// but must be followed by an equal number of call to
// EndBlockCodeTargetSharing.
void StartBlockCodeTargetSharing() {
++code_target_sharing_blocked_nesting_;
}
// Resume sharing of constant pool code target entries. Needs to be called
// as many times as StartBlockCodeTargetSharing to have an effect.
void EndBlockCodeTargetSharing() {
--code_target_sharing_blocked_nesting_;
}
// Prevent contant pool emission until EndBlockConstPool is called. // Prevent contant pool emission until EndBlockConstPool is called.
// Call to this function can be nested but must be followed by an equal // Calls to this function can be nested but must be followed by an equal
// number of call to EndBlockConstpool. // number of call to EndBlockConstpool.
void StartBlockConstPool() { void StartBlockConstPool() {
if (const_pool_blocked_nesting_++ == 0) { if (const_pool_blocked_nesting_++ == 0) {
@ -1686,7 +1730,7 @@ class Assembler : public AssemblerBase {
} }
} }
// Resume constant pool emission. Need to be called as many time as // Resume constant pool emission. Needs to be called as many times as
// StartBlockConstPool to have an effect. // StartBlockConstPool to have an effect.
void EndBlockConstPool() { void EndBlockConstPool() {
if (--const_pool_blocked_nesting_ == 0) { if (--const_pool_blocked_nesting_ == 0) {
@ -1778,6 +1822,11 @@ class Assembler : public AssemblerBase {
static constexpr int kCheckPoolIntervalInst = 32; static constexpr int kCheckPoolIntervalInst = 32;
static constexpr int kCheckPoolInterval = kCheckPoolIntervalInst * kInstrSize; static constexpr int kCheckPoolInterval = kCheckPoolIntervalInst * kInstrSize;
// Sharing of code target entries may be blocked in some code sequences.
int code_target_sharing_blocked_nesting_;
bool IsCodeTargetSharingAllowed() const {
return code_target_sharing_blocked_nesting_ == 0;
}
// Emission of the constant pool may be blocked in some code sequences. // Emission of the constant pool may be blocked in some code sequences.
int const_pool_blocked_nesting_; // Block emission if this is not zero. int const_pool_blocked_nesting_; // Block emission if this is not zero.
@ -1820,6 +1869,7 @@ class Assembler : public AssemblerBase {
friend class RelocInfo; friend class RelocInfo;
friend class CodePatcher; friend class CodePatcher;
friend class BlockConstPoolScope; friend class BlockConstPoolScope;
friend class BlockCodeTargetSharingScope;
friend class EnsureSpace; friend class EnsureSpace;
}; };

View File

@ -690,6 +690,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
switch (arch_opcode) { switch (arch_opcode) {
case kArchCallCodeObject: { case kArchCallCodeObject: {
// We must not share code targets for calls to builtins for WASM code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(masm());
EnsureSpaceForLazyDeopt(); EnsureSpaceForLazyDeopt();
if (instr->InputAt(0)->IsImmediate()) { if (instr->InputAt(0)->IsImmediate()) {
__ Call(Handle<Code>::cast(i.InputHeapObject(0)), __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
@ -706,6 +711,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
case kArchTailCallCodeObjectFromJSFunction: case kArchTailCallCodeObjectFromJSFunction:
case kArchTailCallCodeObject: { case kArchTailCallCodeObject: {
// We must not share code targets for calls to builtins for WASM code, as
// they might need to be patched individually.
internal::Assembler::BlockCodeTargetSharingScope scope;
if (info()->IsWasm()) scope.Open(masm());
if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
i.TempRegister(0), i.TempRegister(1), i.TempRegister(0), i.TempRegister(1),

View File

@ -109,6 +109,10 @@ class JumpPatchSite BASE_EMBEDDED {
// frames-arm.h for its layout. // frames-arm.h for its layout.
void FullCodeGenerator::Generate() { void FullCodeGenerator::Generate() {
CompilationInfo* info = info_; CompilationInfo* info = info_;
// Block sharing of code target entries. The interrupt checks must be
// possible to patch individually, and replacing code with a debug version
// relies on RelocInfo not being shared.
Assembler::BlockCodeTargetSharingScope block_code_target_sharing(masm_);
profiling_counter_ = isolate()->factory()->NewCell( profiling_counter_ = isolate()->factory()->NewCell(
Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
SetFunctionPosition(literal()); SetFunctionPosition(literal());

View File

@ -0,0 +1,16 @@
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --no-turbo --cache=code --no-lazy
function f() {
var n = a.length;
for (var i = 0; i < n; i++) {
}
for (var i = 0; i < n; i++) {
}
}
var a = "xxxxxxxxxxxxxxxxxxxxxxxxx";
while (a.length < 100000) a = a + a;
f();