ARM: Implement DoInstanceOfKnownGlobal stub
BUG=none TEST=none Patch by Martyn Capewell from ARM Ltd. Review URL: http://codereview.chromium.org/6248004 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6403 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
0f1a2bda1a
commit
3ff8e4d3fa
@ -2890,18 +2890,33 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
||||
}
|
||||
|
||||
|
||||
// Uses registers r0 to r4. Expected input is
|
||||
// object in r0 (or at sp+1*kPointerSize) and function in
|
||||
// r1 (or at sp), depending on whether or not
|
||||
// args_in_registers() is true.
|
||||
// Uses registers r0 to r4.
|
||||
// Expected input (depending on whether args are in registers or on the stack):
|
||||
// * object: r0 or at sp + 1 * kPointerSize.
|
||||
// * function: r1 or at sp.
|
||||
//
|
||||
// An inlined call site may have been generated before calling this stub.
|
||||
// In this case the offset to the inline site to patch is passed on the stack,
|
||||
// in the safepoint slot for register r4.
|
||||
// (See LCodeGen::DoInstanceOfKnownGlobal)
|
||||
void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
// Call site inlining and patching implies arguments in registers.
|
||||
ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
|
||||
// ReturnTrueFalse is only implemented for inlined call sites.
|
||||
ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck());
|
||||
|
||||
// Fixed register usage throughout the stub:
|
||||
const Register object = r0; // Object (lhs).
|
||||
const Register map = r3; // Map of the object.
|
||||
Register map = r3; // Map of the object.
|
||||
const Register function = r1; // Function (rhs).
|
||||
const Register prototype = r4; // Prototype of the function.
|
||||
const Register inline_site = r9;
|
||||
const Register scratch = r2;
|
||||
|
||||
const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
|
||||
|
||||
Label slow, loop, is_instance, is_not_instance, not_js_object;
|
||||
|
||||
if (!HasArgsInRegisters()) {
|
||||
__ ldr(object, MemOperand(sp, 1 * kPointerSize));
|
||||
__ ldr(function, MemOperand(sp, 0));
|
||||
@ -2911,50 +2926,100 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
__ BranchOnSmi(object, ¬_js_object);
|
||||
__ IsObjectJSObjectType(object, map, scratch, ¬_js_object);
|
||||
|
||||
// Look up the function and the map in the instanceof cache.
|
||||
Label miss;
|
||||
__ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
|
||||
__ cmp(function, ip);
|
||||
__ b(ne, &miss);
|
||||
__ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
|
||||
__ cmp(map, ip);
|
||||
__ b(ne, &miss);
|
||||
__ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
||||
__ Ret(HasArgsInRegisters() ? 0 : 2);
|
||||
// If there is a call site cache don't look in the global cache, but do the
|
||||
// real lookup and update the call site cache.
|
||||
if (!HasCallSiteInlineCheck()) {
|
||||
Label miss;
|
||||
__ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
|
||||
__ cmp(function, ip);
|
||||
__ b(ne, &miss);
|
||||
__ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
|
||||
__ cmp(map, ip);
|
||||
__ b(ne, &miss);
|
||||
__ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
||||
__ Ret(HasArgsInRegisters() ? 0 : 2);
|
||||
|
||||
__ bind(&miss);
|
||||
__ bind(&miss);
|
||||
}
|
||||
|
||||
// Get the prototype of the function.
|
||||
__ TryGetFunctionPrototype(function, prototype, scratch, &slow);
|
||||
|
||||
// Check that the function prototype is a JS object.
|
||||
__ BranchOnSmi(prototype, &slow);
|
||||
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
|
||||
|
||||
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
|
||||
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
|
||||
// Update the global instanceof or call site inlined cache with the current
|
||||
// map and function. The cached answer will be set when it is known below.
|
||||
if (!HasCallSiteInlineCheck()) {
|
||||
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
|
||||
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
|
||||
} else {
|
||||
ASSERT(HasArgsInRegisters());
|
||||
// Patch the (relocated) inlined map check.
|
||||
|
||||
// The offset was stored in r4 safepoint slot.
|
||||
// (See LCodeGen::DoDeferredLInstanceOfKnownGlobal)
|
||||
__ ldr(scratch, MacroAssembler::SafepointRegisterSlot(r4));
|
||||
__ sub(inline_site, lr, scratch);
|
||||
// Get the map location in scratch and patch it.
|
||||
__ GetRelocatedValueLocation(inline_site, scratch);
|
||||
__ str(map, MemOperand(scratch));
|
||||
}
|
||||
|
||||
// Register mapping: r3 is object map and r4 is function prototype.
|
||||
// Get prototype of object into r2.
|
||||
__ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
|
||||
|
||||
// We don't need map any more. Use it as a scratch register.
|
||||
Register scratch2 = map;
|
||||
map = no_reg;
|
||||
|
||||
// Loop through the prototype chain looking for the function prototype.
|
||||
__ LoadRoot(scratch2, Heap::kNullValueRootIndex);
|
||||
__ bind(&loop);
|
||||
__ cmp(scratch, Operand(prototype));
|
||||
__ b(eq, &is_instance);
|
||||
__ LoadRoot(ip, Heap::kNullValueRootIndex);
|
||||
__ cmp(scratch, ip);
|
||||
__ cmp(scratch, scratch2);
|
||||
__ b(eq, &is_not_instance);
|
||||
__ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
|
||||
__ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
|
||||
__ jmp(&loop);
|
||||
|
||||
__ bind(&is_instance);
|
||||
__ mov(r0, Operand(Smi::FromInt(0)));
|
||||
__ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
||||
if (!HasCallSiteInlineCheck()) {
|
||||
__ mov(r0, Operand(Smi::FromInt(0)));
|
||||
__ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
||||
} else {
|
||||
// Patch the call site to return true.
|
||||
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
|
||||
__ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
|
||||
// Get the boolean result location in scratch and patch it.
|
||||
__ GetRelocatedValueLocation(inline_site, scratch);
|
||||
__ str(r0, MemOperand(scratch));
|
||||
|
||||
if (!ReturnTrueFalseObject()) {
|
||||
__ mov(r0, Operand(Smi::FromInt(0)));
|
||||
}
|
||||
}
|
||||
__ Ret(HasArgsInRegisters() ? 0 : 2);
|
||||
|
||||
__ bind(&is_not_instance);
|
||||
__ mov(r0, Operand(Smi::FromInt(1)));
|
||||
__ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
||||
if (!HasCallSiteInlineCheck()) {
|
||||
__ mov(r0, Operand(Smi::FromInt(1)));
|
||||
__ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
|
||||
} else {
|
||||
// Patch the call site to return false.
|
||||
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
|
||||
__ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
|
||||
// Get the boolean result location in scratch and patch it.
|
||||
__ GetRelocatedValueLocation(inline_site, scratch);
|
||||
__ str(r0, MemOperand(scratch));
|
||||
|
||||
if (!ReturnTrueFalseObject()) {
|
||||
__ mov(r0, Operand(Smi::FromInt(1)));
|
||||
}
|
||||
}
|
||||
__ Ret(HasArgsInRegisters() ? 0 : 2);
|
||||
|
||||
Label object_not_null, object_not_null_or_smi;
|
||||
@ -2962,7 +3027,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
// Before null, smi and string value checks, check that the rhs is a function
|
||||
// as for a non-function rhs an exception needs to be thrown.
|
||||
__ BranchOnSmi(function, &slow);
|
||||
__ CompareObjectType(function, map, scratch, JS_FUNCTION_TYPE);
|
||||
__ CompareObjectType(function, scratch2, scratch, JS_FUNCTION_TYPE);
|
||||
__ b(ne, &slow);
|
||||
|
||||
// Null is not instance of anything.
|
||||
@ -2985,13 +3050,30 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Slow-case. Tail call builtin.
|
||||
__ bind(&slow);
|
||||
if (HasArgsInRegisters()) {
|
||||
__ Push(r0, r1);
|
||||
}
|
||||
if (!ReturnTrueFalseObject()) {
|
||||
if (HasArgsInRegisters()) {
|
||||
__ Push(r0, r1);
|
||||
}
|
||||
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_JS);
|
||||
} else {
|
||||
__ EnterInternalFrame();
|
||||
__ Push(r0, r1);
|
||||
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_JS);
|
||||
__ LeaveInternalFrame();
|
||||
__ cmp(r0, Operand(0));
|
||||
__ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
|
||||
__ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
|
||||
__ Ret(HasArgsInRegisters() ? 0 : 2);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Register InstanceofStub::left() { return r0; }
|
||||
|
||||
|
||||
Register InstanceofStub::right() { return r1; }
|
||||
|
||||
|
||||
void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
|
||||
// The displacement is the offset of the last parameter (if any)
|
||||
// relative to the frame pointer.
|
||||
|
@ -674,6 +674,12 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
|
||||
allocator_->MarkAsSaveDoubles();
|
||||
return instr;
|
||||
}
|
||||
|
||||
|
||||
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
|
||||
ASSERT(!instr->HasPointerMap());
|
||||
instr->set_pointer_map(new LPointerMap(position_));
|
||||
@ -1083,8 +1089,9 @@ LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
|
||||
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
|
||||
HInstanceOfKnownGlobal* instr) {
|
||||
LInstruction* result =
|
||||
new LInstanceOfKnownGlobal(UseFixed(instr->value(), r0));
|
||||
return MarkAsCall(DefineFixed(result, r0), instr);
|
||||
new LInstanceOfKnownGlobal(UseFixed(instr->value(), r0), FixedTemp(r4));
|
||||
MarkAsSaveDoubles(result);
|
||||
return AssignEnvironment(AssignPointerMap(DefineFixed(result, r0)));
|
||||
}
|
||||
|
||||
|
||||
|
@ -945,14 +945,18 @@ class LInstanceOfAndBranch: public LInstanceOf {
|
||||
|
||||
class LInstanceOfKnownGlobal: public LUnaryOperation {
|
||||
public:
|
||||
explicit LInstanceOfKnownGlobal(LOperand* left)
|
||||
: LUnaryOperation(left) { }
|
||||
explicit LInstanceOfKnownGlobal(LOperand* left, LOperand* temp)
|
||||
: LUnaryOperation(left), temp_(temp) { }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
|
||||
"instance-of-known-global")
|
||||
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
|
||||
|
||||
Handle<JSFunction> function() const { return hydrogen()->function(); }
|
||||
LOperand* temp() const { return temp_; }
|
||||
|
||||
private:
|
||||
LOperand* temp_;
|
||||
};
|
||||
|
||||
|
||||
@ -1927,6 +1931,7 @@ class LChunkBuilder BASE_EMBEDDED {
|
||||
LInstruction* instr,
|
||||
HInstruction* hinstr,
|
||||
CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY);
|
||||
LInstruction* MarkAsSaveDoubles(LInstruction* instr);
|
||||
|
||||
LInstruction* SetInstructionPendingDeoptimizationEnvironment(
|
||||
LInstruction* instr, int ast_id);
|
||||
|
@ -1949,7 +1949,119 @@ void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
|
||||
|
||||
|
||||
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
||||
Abort("DoInstanceOfKnownGlobal unimplemented.");
|
||||
class DeferredInstanceOfKnownGlobal: public LDeferredCode {
|
||||
public:
|
||||
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
|
||||
LInstanceOfKnownGlobal* instr)
|
||||
: LDeferredCode(codegen), instr_(instr) { }
|
||||
virtual void Generate() {
|
||||
codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
|
||||
}
|
||||
|
||||
Label* map_check() { return &map_check_; }
|
||||
|
||||
private:
|
||||
LInstanceOfKnownGlobal* instr_;
|
||||
Label map_check_;
|
||||
};
|
||||
|
||||
DeferredInstanceOfKnownGlobal* deferred;
|
||||
deferred = new DeferredInstanceOfKnownGlobal(this, instr);
|
||||
|
||||
Label done, false_result;
|
||||
Register object = ToRegister(instr->input());
|
||||
Register temp = ToRegister(instr->temp());
|
||||
Register result = ToRegister(instr->result());
|
||||
|
||||
ASSERT(object.is(r0));
|
||||
ASSERT(result.is(r0));
|
||||
|
||||
// A Smi is not instance of anything.
|
||||
__ BranchOnSmi(object, &false_result);
|
||||
|
||||
// This is the inlined call site instanceof cache. The two occurences of the
|
||||
// hole value will be patched to the last map/result pair generated by the
|
||||
// instanceof stub.
|
||||
Label cache_miss;
|
||||
Register map = temp;
|
||||
__ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
__ bind(deferred->map_check()); // Label for calculating code patching.
|
||||
// We use Factory::the_hole_value() on purpose instead of loading from the
|
||||
// root array to force relocation to be able to later patch with
|
||||
// the cached map.
|
||||
__ mov(ip, Operand(Factory::the_hole_value()));
|
||||
__ cmp(map, Operand(ip));
|
||||
__ b(ne, &cache_miss);
|
||||
// We use Factory::the_hole_value() on purpose instead of loading from the
|
||||
// root array to force relocation to be able to later patch
|
||||
// with true or false.
|
||||
__ mov(result, Operand(Factory::the_hole_value()));
|
||||
__ b(&done);
|
||||
|
||||
// The inlined call site cache did not match. Check null and string before
|
||||
// calling the deferred code.
|
||||
__ bind(&cache_miss);
|
||||
// Null is not instance of anything.
|
||||
__ LoadRoot(ip, Heap::kNullValueRootIndex);
|
||||
__ cmp(object, Operand(ip));
|
||||
__ b(eq, &false_result);
|
||||
|
||||
// String values is not instance of anything.
|
||||
Condition is_string = masm_->IsObjectStringType(object, temp);
|
||||
__ b(is_string, &false_result);
|
||||
|
||||
// Go to the deferred code.
|
||||
__ b(deferred->entry());
|
||||
|
||||
__ bind(&false_result);
|
||||
__ LoadRoot(result, Heap::kFalseValueRootIndex);
|
||||
|
||||
// Here result has either true or false. Deferred code also produces true or
|
||||
// false object.
|
||||
__ bind(deferred->exit());
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
||||
Label* map_check) {
|
||||
Register result = ToRegister(instr->result());
|
||||
ASSERT(result.is(r0));
|
||||
|
||||
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
|
||||
flags = static_cast<InstanceofStub::Flags>(
|
||||
flags | InstanceofStub::kArgsInRegisters);
|
||||
flags = static_cast<InstanceofStub::Flags>(
|
||||
flags | InstanceofStub::kCallSiteInlineCheck);
|
||||
flags = static_cast<InstanceofStub::Flags>(
|
||||
flags | InstanceofStub::kReturnTrueFalseObject);
|
||||
InstanceofStub stub(flags);
|
||||
|
||||
__ PushSafepointRegisters();
|
||||
|
||||
// Get the temp register reserved by the instruction. This needs to be r4 as
|
||||
// its slot of the pushing of safepoint registers is used to communicate the
|
||||
// offset to the location of the map check.
|
||||
Register temp = ToRegister(instr->temp());
|
||||
ASSERT(temp.is(r4));
|
||||
__ mov(InstanceofStub::right(), Operand(instr->function()));
|
||||
static const int kAdditionalDelta = 4;
|
||||
int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
|
||||
Label before_push_delta;
|
||||
__ bind(&before_push_delta);
|
||||
__ BlockConstPoolFor(kAdditionalDelta);
|
||||
__ mov(temp, Operand(delta * kPointerSize));
|
||||
__ StoreToSafepointRegisterSlot(temp);
|
||||
__ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
ASSERT_EQ(kAdditionalDelta,
|
||||
masm_->InstructionsGeneratedSince(&before_push_delta));
|
||||
RecordSafepointWithRegisters(
|
||||
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
|
||||
// Put the result value into the result register slot and
|
||||
// restore all registers.
|
||||
__ StoreToSafepointRegisterSlot(result);
|
||||
|
||||
__ PopSafepointRegisters();
|
||||
}
|
||||
|
||||
|
||||
|
@ -99,6 +99,8 @@ class LCodeGen BASE_EMBEDDED {
|
||||
void DoDeferredTaggedToI(LTaggedToI* instr);
|
||||
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
|
||||
void DoDeferredStackCheck(LGoto* instr);
|
||||
void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
||||
Label* map_check);
|
||||
|
||||
// Parallel move support.
|
||||
void DoParallelMove(LParallelMove* move);
|
||||
|
@ -485,6 +485,11 @@ void MacroAssembler::PopSafepointRegistersAndDoubles() {
|
||||
PopSafepointRegisters();
|
||||
}
|
||||
|
||||
void MacroAssembler::StoreToSafepointRegisterSlot(Register reg) {
|
||||
str(reg, SafepointRegisterSlot(reg));
|
||||
}
|
||||
|
||||
|
||||
int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
|
||||
// The registers are pushed starting with the highest encoding,
|
||||
// which means that lowest encodings are closest to the stack pointer.
|
||||
@ -493,6 +498,11 @@ int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
|
||||
}
|
||||
|
||||
|
||||
MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
|
||||
return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kInstrSize);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Ldrd(Register dst1, Register dst2,
|
||||
const MemOperand& src, Condition cond) {
|
||||
ASSERT(src.rm().is(no_reg));
|
||||
@ -2185,6 +2195,26 @@ void MacroAssembler::CallCFunction(Register function, int num_arguments) {
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
|
||||
Register result) {
|
||||
const uint32_t kLdrOffsetMask = (1 << 12) - 1;
|
||||
const int32_t kPCRegOffset = 2 * kPointerSize;
|
||||
ldr(result, MemOperand(ldr_location));
|
||||
if (FLAG_debug_code) {
|
||||
// Check that the instruction is a ldr reg, [pc + offset] .
|
||||
and_(result, result, Operand(kLdrPCPattern));
|
||||
cmp(result, Operand(kLdrPCPattern));
|
||||
Check(eq, "The instruction to patch should be a load from pc.");
|
||||
// Result was clobbered. Restore it.
|
||||
ldr(result, MemOperand(ldr_location));
|
||||
}
|
||||
// Get the address of the constant.
|
||||
and_(result, result, Operand(kLdrOffsetMask));
|
||||
add(result, ldr_location, Operand(result));
|
||||
add(result, result, Operand(kPCRegOffset));
|
||||
}
|
||||
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
CodePatcher::CodePatcher(byte* address, int instructions)
|
||||
: address_(address),
|
||||
|
@ -234,8 +234,9 @@ class MacroAssembler: public Assembler {
|
||||
void PopSafepointRegisters();
|
||||
void PushSafepointRegistersAndDoubles();
|
||||
void PopSafepointRegistersAndDoubles();
|
||||
|
||||
void StoreToSafepointRegisterSlot(Register reg);
|
||||
static int SafepointRegisterStackIndex(int reg_code);
|
||||
static MemOperand SafepointRegisterSlot(Register reg);
|
||||
|
||||
// Load two consecutive registers with two consecutive memory locations.
|
||||
void Ldrd(Register dst1,
|
||||
@ -776,6 +777,15 @@ class MacroAssembler: public Assembler {
|
||||
Label* failure);
|
||||
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Patching helpers.
|
||||
|
||||
// Get the location of a relocated constant (its address in the constant pool)
|
||||
// from its load site.
|
||||
void GetRelocatedValueLocation(Register ldr_location,
|
||||
Register result);
|
||||
|
||||
|
||||
private:
|
||||
void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
|
||||
void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
|
||||
|
@ -153,7 +153,12 @@ void Debugger::Stop(Instr* instr) {
|
||||
if (sim_->isWatchedStop(code) && !sim_->watched_stops[code].desc) {
|
||||
sim_->watched_stops[code].desc = msg;
|
||||
}
|
||||
PrintF("Simulator hit %s\n", msg);
|
||||
// Print the stop message and code if it is not the default code.
|
||||
if (code != kMaxStopCode) {
|
||||
PrintF("Simulator hit stop %u: %s\n", code, msg);
|
||||
} else {
|
||||
PrintF("Simulator hit %s\n", msg);
|
||||
}
|
||||
sim_->set_pc(sim_->get_pc() + 2 * Instr::kInstrSize);
|
||||
Debug();
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user