[Turbofan] Insert nops for lazy bailout patching, fix translation of literals.
The code for EnsureSpaceForLazyDeopt is taken from lithium-codegen-*. BUG= R=bmeurer@chromium.org Review URL: https://codereview.chromium.org/562033003 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@24138 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
0a4e98ba43
commit
9ef343c18d
@ -137,6 +137,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
|
||||
switch (ArchOpcodeField::decode(instr->opcode())) {
|
||||
case kArchCallCodeObject: {
|
||||
EnsureSpaceForLazyDeopt();
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
__ Call(Handle<Code>::cast(i.InputHeapObject(0)),
|
||||
RelocInfo::CODE_TARGET);
|
||||
@ -150,6 +151,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
break;
|
||||
}
|
||||
case kArchCallJSFunction: {
|
||||
EnsureSpaceForLazyDeopt();
|
||||
Register func = i.InputRegister(0);
|
||||
if (FLAG_debug_code) {
|
||||
// Check the function's context matches the context argument.
|
||||
@ -843,6 +845,27 @@ void CodeGenerator::AddNopForSmiCodeInlining() {
|
||||
// On 32-bit ARM we do not insert nops for inlined Smi code.
|
||||
}
|
||||
|
||||
|
||||
void CodeGenerator::EnsureSpaceForLazyDeopt() {
|
||||
int space_needed = Deoptimizer::patch_size();
|
||||
if (!linkage()->info()->IsStub()) {
|
||||
// Ensure that we have enough space after the previous lazy-bailout
|
||||
// instruction for patching the code here.
|
||||
int current_pc = masm()->pc_offset();
|
||||
if (current_pc < last_lazy_deopt_pc_ + space_needed) {
|
||||
// Block literal pool emission for duration of padding.
|
||||
v8::internal::Assembler::BlockConstPoolScope block_const_pool(masm());
|
||||
int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
|
||||
DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
|
||||
while (padding_size > 0) {
|
||||
__ nop();
|
||||
padding_size -= v8::internal::Assembler::kInstrSize;
|
||||
}
|
||||
}
|
||||
}
|
||||
MarkLazyDeoptSite();
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
} // namespace compiler
|
||||
|
@ -132,6 +132,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
InstructionCode opcode = instr->opcode();
|
||||
switch (ArchOpcodeField::decode(opcode)) {
|
||||
case kArchCallCodeObject: {
|
||||
EnsureSpaceForLazyDeopt();
|
||||
if (instr->InputAt(0)->IsImmediate()) {
|
||||
__ Call(Handle<Code>::cast(i.InputHeapObject(0)),
|
||||
RelocInfo::CODE_TARGET);
|
||||
@ -144,6 +145,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
break;
|
||||
}
|
||||
case kArchCallJSFunction: {
|
||||
EnsureSpaceForLazyDeopt();
|
||||
Register func = i.InputRegister(0);
|
||||
if (FLAG_debug_code) {
|
||||
// Check the function's context matches the context argument.
|
||||
@ -844,6 +846,29 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
|
||||
|
||||
void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); }
|
||||
|
||||
|
||||
void CodeGenerator::EnsureSpaceForLazyDeopt() {
|
||||
int space_needed = Deoptimizer::patch_size();
|
||||
if (!linkage()->info()->IsStub()) {
|
||||
// Ensure that we have enough space after the previous lazy-bailout
|
||||
// instruction for patching the code here.
|
||||
intptr_t current_pc = masm()->pc_offset();
|
||||
|
||||
if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
|
||||
intptr_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
|
||||
DCHECK((padding_size % kInstructionSize) == 0);
|
||||
InstructionAccurateScope instruction_accurate(
|
||||
masm(), padding_size / kInstructionSize);
|
||||
|
||||
while (padding_size > 0) {
|
||||
__ nop();
|
||||
padding_size -= kInstructionSize;
|
||||
}
|
||||
}
|
||||
}
|
||||
MarkLazyDeoptSite();
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
} // namespace compiler
|
||||
|
@ -21,7 +21,8 @@ CodeGenerator::CodeGenerator(InstructionSequence* code)
|
||||
safepoints_(code->zone()),
|
||||
deoptimization_states_(code->zone()),
|
||||
deoptimization_literals_(code->zone()),
|
||||
translations_(code->zone()) {}
|
||||
translations_(code->zone()),
|
||||
last_lazy_deopt_pc_(0) {}
|
||||
|
||||
|
||||
Handle<Code> CodeGenerator::GenerateCode() {
|
||||
@ -242,6 +243,7 @@ void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) {
|
||||
}
|
||||
|
||||
if (needs_frame_state) {
|
||||
MarkLazyDeoptSite();
|
||||
// If the frame state is present, it starts at argument 1
|
||||
// (just after the code address).
|
||||
InstructionOperandConverter converter(this, instr);
|
||||
@ -387,8 +389,7 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation,
|
||||
isolate()->factory()->NewNumberFromInt(constant.ToInt32());
|
||||
break;
|
||||
case Constant::kFloat64:
|
||||
constant_object =
|
||||
isolate()->factory()->NewHeapNumber(constant.ToFloat64());
|
||||
constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
|
||||
break;
|
||||
case Constant::kHeapObject:
|
||||
constant_object = constant.ToHeapObject();
|
||||
@ -403,6 +404,11 @@ void CodeGenerator::AddTranslationForOperand(Translation* translation,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void CodeGenerator::MarkLazyDeoptSite() {
|
||||
last_lazy_deopt_pc_ = masm()->pc_offset();
|
||||
}
|
||||
|
||||
#if !V8_TURBOFAN_BACKEND
|
||||
|
||||
void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
|
@ -98,8 +98,10 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
|
||||
void AddTranslationForOperand(Translation* translation, Instruction* instr,
|
||||
InstructionOperand* op);
|
||||
void AddNopForSmiCodeInlining();
|
||||
// ===========================================================================
|
||||
void EnsureSpaceForLazyDeopt();
|
||||
void MarkLazyDeoptSite();
|
||||
|
||||
// ===========================================================================
|
||||
struct DeoptimizationState : ZoneObject {
|
||||
public:
|
||||
BailoutId bailout_id() const { return bailout_id_; }
|
||||
@ -126,6 +128,7 @@ class CodeGenerator FINAL : public GapResolver::Assembler {
|
||||
ZoneDeque<DeoptimizationState*> deoptimization_states_;
|
||||
ZoneDeque<Handle<Object> > deoptimization_literals_;
|
||||
TranslationBuffer translations_;
|
||||
int last_lazy_deopt_pc_;
|
||||
};
|
||||
|
||||
} // namespace compiler
|
||||
|
@ -112,6 +112,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
|
||||
switch (ArchOpcodeField::decode(instr->opcode())) {
|
||||
case kArchCallCodeObject: {
|
||||
EnsureSpaceForLazyDeopt();
|
||||
if (HasImmediateInput(instr, 0)) {
|
||||
Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
|
||||
__ call(code, RelocInfo::CODE_TARGET);
|
||||
@ -123,6 +124,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
break;
|
||||
}
|
||||
case kArchCallJSFunction: {
|
||||
EnsureSpaceForLazyDeopt();
|
||||
Register func = i.InputRegister(0);
|
||||
if (FLAG_debug_code) {
|
||||
// Check the function's context matches the context argument.
|
||||
@ -932,6 +934,21 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
|
||||
|
||||
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
|
||||
|
||||
|
||||
void CodeGenerator::EnsureSpaceForLazyDeopt() {
|
||||
int space_needed = Deoptimizer::patch_size();
|
||||
if (!linkage()->info()->IsStub()) {
|
||||
// Ensure that we have enough space after the previous lazy-bailout
|
||||
// instruction for patching the code here.
|
||||
int current_pc = masm()->pc_offset();
|
||||
if (current_pc < last_lazy_deopt_pc_ + space_needed) {
|
||||
int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
|
||||
__ Nop(padding_size);
|
||||
}
|
||||
}
|
||||
MarkLazyDeoptSite();
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
} // namespace compiler
|
||||
|
@ -205,6 +205,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
|
||||
switch (ArchOpcodeField::decode(instr->opcode())) {
|
||||
case kArchCallCodeObject: {
|
||||
EnsureSpaceForLazyDeopt();
|
||||
if (HasImmediateInput(instr, 0)) {
|
||||
Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
|
||||
__ Call(code, RelocInfo::CODE_TARGET);
|
||||
@ -217,6 +218,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
|
||||
break;
|
||||
}
|
||||
case kArchCallJSFunction: {
|
||||
EnsureSpaceForLazyDeopt();
|
||||
Register func = i.InputRegister(0);
|
||||
if (FLAG_debug_code) {
|
||||
// Check the function's context matches the context argument.
|
||||
@ -991,6 +993,21 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source,
|
||||
|
||||
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
|
||||
|
||||
|
||||
void CodeGenerator::EnsureSpaceForLazyDeopt() {
|
||||
int space_needed = Deoptimizer::patch_size();
|
||||
if (!linkage()->info()->IsStub()) {
|
||||
// Ensure that we have enough space after the previous lazy-bailout
|
||||
// instruction for patching the code here.
|
||||
int current_pc = masm()->pc_offset();
|
||||
if (current_pc < last_lazy_deopt_pc_ + space_needed) {
|
||||
int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
|
||||
__ Nop(padding_size);
|
||||
}
|
||||
}
|
||||
MarkLazyDeoptSite();
|
||||
}
|
||||
|
||||
#undef __
|
||||
|
||||
} // namespace internal
|
||||
|
@ -25,7 +25,7 @@
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Flags: --allow-natives-syntax
|
||||
// Flags: --allow-natives-syntax --turbo-deoptimization
|
||||
|
||||
(function OneByteSeqStringSetCharDeoptOsr() {
|
||||
function deopt() {
|
||||
|
Loading…
Reference in New Issue
Block a user