remove most uses of Isolate::Current in arch specific files
R=svenpanne@chromium.org BUG= Review URL: https://codereview.chromium.org/24031003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16643 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
ae7813cacf
commit
0a6d15453c
@ -279,7 +279,7 @@ bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
|
||||
}
|
||||
|
||||
|
||||
void RelocInfo::Visit(ObjectVisitor* visitor) {
|
||||
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
RelocInfo::Mode mode = rmode();
|
||||
if (mode == RelocInfo::EMBEDDED_OBJECT) {
|
||||
visitor->VisitEmbeddedPointer(this);
|
||||
@ -292,12 +292,11 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
visitor->VisitCodeAgeSequence(this);
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
// TODO(isolates): Get a cached isolate below.
|
||||
} else if (((RelocInfo::IsJSReturn(mode) &&
|
||||
IsPatchedReturnSequence()) ||
|
||||
(RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence())) &&
|
||||
Isolate::Current()->debug()->has_break_points()) {
|
||||
isolate->debug()->has_break_points()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
#endif
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
|
@ -324,15 +324,12 @@ void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
|
||||
// See assembler-arm-inl.h for inlined constructors
|
||||
|
||||
Operand::Operand(Handle<Object> handle) {
|
||||
#ifdef DEBUG
|
||||
Isolate* isolate = Isolate::Current();
|
||||
#endif
|
||||
AllowDeferredHandleDereference using_raw_address;
|
||||
rm_ = no_reg;
|
||||
// Verify all Objects referred by code are NOT in new space.
|
||||
Object* obj = *handle;
|
||||
ASSERT(!isolate->heap()->InNewSpace(obj));
|
||||
if (obj->IsHeapObject()) {
|
||||
ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
|
||||
imm32_ = reinterpret_cast<intptr_t>(handle.location());
|
||||
rmode_ = RelocInfo::EMBEDDED_OBJECT;
|
||||
} else {
|
||||
|
@ -622,7 +622,7 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
bool WriteInt32ToHeapNumberStub::IsPregenerated() {
|
||||
bool WriteInt32ToHeapNumberStub::IsPregenerated(Isolate* isolate) {
|
||||
// These variants are compiled ahead of time. See next method.
|
||||
if (the_int_.is(r1) && the_heap_number_.is(r0) && scratch_.is(r2)) {
|
||||
return true;
|
||||
@ -2694,8 +2694,8 @@ bool CEntryStub::NeedsImmovableCode() {
|
||||
}
|
||||
|
||||
|
||||
bool CEntryStub::IsPregenerated() {
|
||||
return (!save_doubles_ || Isolate::Current()->fp_stubs_generated()) &&
|
||||
bool CEntryStub::IsPregenerated(Isolate* isolate) {
|
||||
return (!save_doubles_ || isolate->fp_stubs_generated()) &&
|
||||
result_size_ == 1;
|
||||
}
|
||||
|
||||
@ -6457,7 +6457,7 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
|
||||
#undef REG
|
||||
|
||||
|
||||
bool RecordWriteStub::IsPregenerated() {
|
||||
bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
|
||||
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
|
||||
!entry->object.is(no_reg);
|
||||
entry++) {
|
||||
|
@ -68,7 +68,7 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
@ -232,7 +232,7 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
|
||||
the_heap_number_(the_heap_number),
|
||||
scratch_(scratch) { }
|
||||
|
||||
bool IsPregenerated();
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
|
||||
private:
|
||||
@ -305,7 +305,7 @@ class RecordWriteStub: public PlatformCodeStub {
|
||||
INCREMENTAL_COMPACTION
|
||||
};
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
|
@ -55,7 +55,8 @@ void BreakLocationIterator::SetDebugBreakAtReturn() {
|
||||
CodePatcher patcher(rinfo()->pc(), Assembler::kJSReturnSequenceInstructions);
|
||||
patcher.masm()->ldr(v8::internal::ip, MemOperand(v8::internal::pc, 0));
|
||||
patcher.masm()->blx(v8::internal::ip);
|
||||
patcher.Emit(Isolate::Current()->debug()->debug_break_return()->entry());
|
||||
patcher.Emit(
|
||||
debug_info_->GetIsolate()->debug()->debug_break_return()->entry());
|
||||
patcher.masm()->bkpt(0);
|
||||
}
|
||||
|
||||
@ -95,7 +96,8 @@ void BreakLocationIterator::SetDebugBreakAtSlot() {
|
||||
CodePatcher patcher(rinfo()->pc(), Assembler::kDebugBreakSlotInstructions);
|
||||
patcher.masm()->ldr(v8::internal::ip, MemOperand(v8::internal::pc, 0));
|
||||
patcher.masm()->blx(v8::internal::ip);
|
||||
patcher.Emit(Isolate::Current()->debug()->debug_break_slot()->entry());
|
||||
patcher.Emit(
|
||||
debug_info_->GetIsolate()->debug()->debug_break_slot()->entry());
|
||||
}
|
||||
|
||||
|
||||
|
@ -354,7 +354,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
extra_state,
|
||||
Code::NORMAL,
|
||||
argc);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, r1, r2, r3, r4, r5, r6);
|
||||
|
||||
// If the stub cache probing failed, the receiver might be a value.
|
||||
@ -393,7 +393,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
|
||||
// Probe the stub cache for the value object.
|
||||
__ bind(&probe);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, r1, r2, r3, r4, r5, r6);
|
||||
|
||||
__ bind(&miss);
|
||||
@ -658,7 +658,7 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
|
||||
Code::Flags flags = Code::ComputeFlags(
|
||||
Code::STUB, MONOMORPHIC, Code::kNoExtraICState,
|
||||
Code::NORMAL, Code::LOAD_IC);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, r0, r2, r3, r4, r5, r6);
|
||||
|
||||
// Cache miss: Jump to runtime.
|
||||
@ -1490,7 +1490,7 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
|
||||
Code::STUB, MONOMORPHIC, strict_mode,
|
||||
Code::NORMAL, Code::STORE_IC);
|
||||
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, r1, r2, r3, r4, r5, r6);
|
||||
|
||||
// Cache miss: Jump to runtime.
|
||||
|
@ -872,7 +872,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
|
||||
masm_->GetCode(&code_desc);
|
||||
Handle<Code> code = isolate()->factory()->NewCode(
|
||||
code_desc, Code::ComputeFlags(Code::REGEXP), masm_->CodeObject());
|
||||
PROFILE(Isolate::Current(), RegExpCodeCreateEvent(*code, *source));
|
||||
PROFILE(masm_->isolate(), RegExpCodeCreateEvent(*code, *source));
|
||||
return Handle<HeapObject>::cast(code);
|
||||
}
|
||||
|
||||
@ -1097,7 +1097,6 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
|
||||
Code* re_code,
|
||||
Address re_frame) {
|
||||
Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
|
||||
ASSERT(isolate == Isolate::Current());
|
||||
if (isolate->stack_guard()->IsStackOverflow()) {
|
||||
isolate->StackOverflow();
|
||||
return EXCEPTION;
|
||||
|
@ -425,7 +425,7 @@ class RelocInfo BASE_EMBEDDED {
|
||||
INLINE(Object** call_object_address());
|
||||
|
||||
template<typename StaticVisitor> inline void Visit(Heap* heap);
|
||||
inline void Visit(ObjectVisitor* v);
|
||||
inline void Visit(Isolate* isolate, ObjectVisitor* v);
|
||||
|
||||
// Patch the code with some other code.
|
||||
void PatchCode(byte* instructions, int instruction_count);
|
||||
|
@ -136,7 +136,7 @@ Handle<Code> CodeStub::GetCode(Isolate* isolate) {
|
||||
if (UseSpecialCache()
|
||||
? FindCodeInSpecialCache(&code, isolate)
|
||||
: FindCodeInCache(&code, isolate)) {
|
||||
ASSERT(IsPregenerated() == code->is_pregenerated());
|
||||
ASSERT(IsPregenerated(isolate) == code->is_pregenerated());
|
||||
return Handle<Code>(code);
|
||||
}
|
||||
|
||||
|
@ -158,14 +158,14 @@ class CodeStub BASE_EMBEDDED {
|
||||
virtual ~CodeStub() {}
|
||||
|
||||
bool CompilingCallsToThisStubIsGCSafe(Isolate* isolate) {
|
||||
bool is_pregenerated = IsPregenerated();
|
||||
bool is_pregenerated = IsPregenerated(isolate);
|
||||
Code* code = NULL;
|
||||
CHECK(!is_pregenerated || FindCodeInCache(&code, isolate));
|
||||
return is_pregenerated;
|
||||
}
|
||||
|
||||
// See comment above, where Instanceof is defined.
|
||||
virtual bool IsPregenerated() { return false; }
|
||||
virtual bool IsPregenerated(Isolate* isolate) { return false; }
|
||||
|
||||
static void GenerateStubsAheadOfTime(Isolate* isolate);
|
||||
static void GenerateFPStubs(Isolate* isolate);
|
||||
@ -682,7 +682,7 @@ class CreateAllocationSiteStub : public HydrogenCodeStub {
|
||||
|
||||
virtual Handle<Code> GenerateCode(Isolate* isolate);
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
|
||||
|
||||
static void GenerateAheadOfTime(Isolate* isolate);
|
||||
|
||||
@ -1305,7 +1305,7 @@ class CEntryStub : public PlatformCodeStub {
|
||||
// time, so it's OK to call it from other stubs that can't cope with GC during
|
||||
// their code generation. On machines that always have gp registers (x64) we
|
||||
// can generate both variants ahead of time.
|
||||
virtual bool IsPregenerated();
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
|
||||
static void GenerateAheadOfTime(Isolate* isolate);
|
||||
|
||||
private:
|
||||
@ -1317,6 +1317,7 @@ class CEntryStub : public PlatformCodeStub {
|
||||
bool always_allocate_scope);
|
||||
|
||||
// Number of pointers/values returned.
|
||||
Isolate* isolate_;
|
||||
const int result_size_;
|
||||
SaveFPRegsMode save_doubles_;
|
||||
|
||||
@ -1895,7 +1896,7 @@ class ArrayConstructorStubBase : public HydrogenCodeStub {
|
||||
return ContextCheckModeBits::decode(bit_field_);
|
||||
}
|
||||
|
||||
virtual bool IsPregenerated() {
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE {
|
||||
// We only pre-generate stubs that verify correct context
|
||||
return context_mode() == CONTEXT_CHECK_REQUIRED;
|
||||
}
|
||||
@ -1996,7 +1997,7 @@ class InternalArrayConstructorStubBase : public HydrogenCodeStub {
|
||||
kind_ = kind;
|
||||
}
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
|
||||
static void GenerateStubsAheadOfTime(Isolate* isolate);
|
||||
static void InstallDescriptors(Isolate* isolate);
|
||||
|
||||
@ -2260,7 +2261,7 @@ class StubFailureTrampolineStub : public PlatformCodeStub {
|
||||
explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
|
||||
: fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {}
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
|
||||
|
||||
static void GenerateAheadOfTime(Isolate* isolate);
|
||||
|
||||
|
@ -251,7 +251,7 @@ bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
|
||||
}
|
||||
|
||||
|
||||
void RelocInfo::Visit(ObjectVisitor* visitor) {
|
||||
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
RelocInfo::Mode mode = rmode();
|
||||
if (mode == RelocInfo::EMBEDDED_OBJECT) {
|
||||
visitor->VisitEmbeddedPointer(this);
|
||||
@ -266,12 +266,11 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
visitor->VisitCodeAgeSequence(this);
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
// TODO(isolates): Get a cached isolate below.
|
||||
} else if (((RelocInfo::IsJSReturn(mode) &&
|
||||
IsPatchedReturnSequence()) ||
|
||||
(RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence())) &&
|
||||
Isolate::Current()->debug()->has_break_points()) {
|
||||
isolate->debug()->has_break_points()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
#endif
|
||||
} else if (IsRuntimeEntry(mode)) {
|
||||
@ -329,14 +328,11 @@ Immediate::Immediate(Label* internal_offset) {
|
||||
|
||||
|
||||
Immediate::Immediate(Handle<Object> handle) {
|
||||
#ifdef DEBUG
|
||||
Isolate* isolate = Isolate::Current();
|
||||
#endif
|
||||
AllowDeferredHandleDereference using_raw_address;
|
||||
// Verify all Objects referred by code are NOT in new space.
|
||||
Object* obj = *handle;
|
||||
ASSERT(!isolate->heap()->InNewSpace(obj));
|
||||
if (obj->IsHeapObject()) {
|
||||
ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
|
||||
x_ = reinterpret_cast<intptr_t>(handle.location());
|
||||
rmode_ = RelocInfo::EMBEDDED_OBJECT;
|
||||
} else {
|
||||
|
@ -4430,8 +4430,8 @@ bool CEntryStub::NeedsImmovableCode() {
|
||||
}
|
||||
|
||||
|
||||
bool CEntryStub::IsPregenerated() {
|
||||
return (!save_doubles_ || Isolate::Current()->fp_stubs_generated()) &&
|
||||
bool CEntryStub::IsPregenerated(Isolate* isolate) {
|
||||
return (!save_doubles_ || isolate->fp_stubs_generated()) &&
|
||||
result_size_ == 1;
|
||||
}
|
||||
|
||||
@ -6847,7 +6847,7 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
|
||||
|
||||
#undef REG
|
||||
|
||||
bool RecordWriteStub::IsPregenerated() {
|
||||
bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
|
||||
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
|
||||
!entry->object.is(no_reg);
|
||||
entry++) {
|
||||
|
@ -74,7 +74,7 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
@ -327,7 +327,7 @@ class RecordWriteStub: public PlatformCodeStub {
|
||||
INCREMENTAL_COMPACTION
|
||||
};
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
|
@ -49,8 +49,8 @@ bool BreakLocationIterator::IsDebugBreakAtReturn() {
|
||||
void BreakLocationIterator::SetDebugBreakAtReturn() {
|
||||
ASSERT(Assembler::kJSReturnSequenceLength >=
|
||||
Assembler::kCallInstructionLength);
|
||||
Isolate* isolate = Isolate::Current();
|
||||
rinfo()->PatchCodeWithCall(isolate->debug()->debug_break_return()->entry(),
|
||||
rinfo()->PatchCodeWithCall(
|
||||
debug_info_->GetIsolate()->debug()->debug_break_return()->entry(),
|
||||
Assembler::kJSReturnSequenceLength - Assembler::kCallInstructionLength);
|
||||
}
|
||||
|
||||
@ -79,7 +79,7 @@ bool BreakLocationIterator::IsDebugBreakAtSlot() {
|
||||
|
||||
void BreakLocationIterator::SetDebugBreakAtSlot() {
|
||||
ASSERT(IsDebugBreakSlot());
|
||||
Isolate* isolate = Isolate::Current();
|
||||
Isolate* isolate = debug_info_->GetIsolate();
|
||||
rinfo()->PatchCodeWithCall(
|
||||
isolate->debug()->debug_break_slot()->entry(),
|
||||
Assembler::kDebugBreakSlotLength - Assembler::kCallInstructionLength);
|
||||
|
@ -1306,7 +1306,7 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
|
||||
Code::Flags flags = Code::ComputeFlags(
|
||||
Code::STUB, MONOMORPHIC, Code::kNoExtraICState,
|
||||
Code::NORMAL, Code::LOAD_IC);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, edx, ecx, ebx, eax);
|
||||
|
||||
// Cache miss: Jump to runtime.
|
||||
@ -1425,8 +1425,8 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
|
||||
Code::Flags flags = Code::ComputeFlags(
|
||||
Code::STUB, MONOMORPHIC, strict_mode,
|
||||
Code::NORMAL, Code::STORE_IC);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
|
||||
no_reg);
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, edx, ecx, ebx, no_reg);
|
||||
|
||||
// Cache miss: Jump to runtime.
|
||||
GenerateMiss(masm);
|
||||
|
@ -1099,7 +1099,6 @@ int RegExpMacroAssemblerIA32::CheckStackGuardState(Address* return_address,
|
||||
Code* re_code,
|
||||
Address re_frame) {
|
||||
Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
|
||||
ASSERT(isolate == Isolate::Current());
|
||||
if (isolate->stack_guard()->IsStackOverflow()) {
|
||||
isolate->StackOverflow();
|
||||
return EXCEPTION;
|
||||
|
@ -3049,13 +3049,14 @@ class EvacuationWeakObjectRetainer : public WeakObjectRetainer {
|
||||
};
|
||||
|
||||
|
||||
static inline void UpdateSlot(ObjectVisitor* v,
|
||||
static inline void UpdateSlot(Isolate* isolate,
|
||||
ObjectVisitor* v,
|
||||
SlotsBuffer::SlotType slot_type,
|
||||
Address addr) {
|
||||
switch (slot_type) {
|
||||
case SlotsBuffer::CODE_TARGET_SLOT: {
|
||||
RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL);
|
||||
rinfo.Visit(v);
|
||||
rinfo.Visit(isolate, v);
|
||||
break;
|
||||
}
|
||||
case SlotsBuffer::CODE_ENTRY_SLOT: {
|
||||
@ -3069,17 +3070,17 @@ static inline void UpdateSlot(ObjectVisitor* v,
|
||||
}
|
||||
case SlotsBuffer::DEBUG_TARGET_SLOT: {
|
||||
RelocInfo rinfo(addr, RelocInfo::DEBUG_BREAK_SLOT, 0, NULL);
|
||||
if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(v);
|
||||
if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v);
|
||||
break;
|
||||
}
|
||||
case SlotsBuffer::JS_RETURN_SLOT: {
|
||||
RelocInfo rinfo(addr, RelocInfo::JS_RETURN, 0, NULL);
|
||||
if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(v);
|
||||
if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(isolate, v);
|
||||
break;
|
||||
}
|
||||
case SlotsBuffer::EMBEDDED_OBJECT_SLOT: {
|
||||
RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL);
|
||||
rinfo.Visit(v);
|
||||
rinfo.Visit(isolate, v);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
@ -4266,7 +4267,8 @@ void SlotsBuffer::UpdateSlots(Heap* heap) {
|
||||
} else {
|
||||
++slot_idx;
|
||||
ASSERT(slot_idx < idx_);
|
||||
UpdateSlot(&v,
|
||||
UpdateSlot(heap->isolate(),
|
||||
&v,
|
||||
DecodeSlotType(slot),
|
||||
reinterpret_cast<Address>(slots_[slot_idx]));
|
||||
}
|
||||
@ -4288,7 +4290,8 @@ void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) {
|
||||
ASSERT(slot_idx < idx_);
|
||||
Address pc = reinterpret_cast<Address>(slots_[slot_idx]);
|
||||
if (!IsOnInvalidatedCodeObject(pc)) {
|
||||
UpdateSlot(&v,
|
||||
UpdateSlot(heap->isolate(),
|
||||
&v,
|
||||
DecodeSlotType(slot),
|
||||
reinterpret_cast<Address>(slots_[slot_idx]));
|
||||
}
|
||||
|
@ -848,8 +848,9 @@ void Code::CodeIterateBody(ObjectVisitor* v) {
|
||||
IteratePointer(v, kTypeFeedbackInfoOffset);
|
||||
|
||||
RelocIterator it(this, mode_mask);
|
||||
Isolate* isolate = this->GetIsolate();
|
||||
for (; !it.done(); it.next()) {
|
||||
it.rinfo()->Visit(v);
|
||||
it.rinfo()->Visit(isolate, v);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -448,7 +448,7 @@ Object** RelocInfo::call_object_address() {
|
||||
}
|
||||
|
||||
|
||||
void RelocInfo::Visit(ObjectVisitor* visitor) {
|
||||
void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
|
||||
RelocInfo::Mode mode = rmode();
|
||||
if (mode == RelocInfo::EMBEDDED_OBJECT) {
|
||||
visitor->VisitEmbeddedPointer(this);
|
||||
@ -463,12 +463,11 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
|
||||
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
|
||||
visitor->VisitCodeAgeSequence(this);
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
// TODO(isolates): Get a cached isolate below.
|
||||
} else if (((RelocInfo::IsJSReturn(mode) &&
|
||||
IsPatchedReturnSequence()) ||
|
||||
(RelocInfo::IsDebugBreakSlot(mode) &&
|
||||
IsPatchedDebugBreakSlotSequence())) &&
|
||||
Isolate::Current()->debug()->has_break_points()) {
|
||||
isolate->debug()->has_break_points()) {
|
||||
visitor->VisitDebugTarget(this);
|
||||
#endif
|
||||
} else if (RelocInfo::IsRuntimeEntry(mode)) {
|
||||
|
@ -1200,7 +1200,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
|
||||
ExternalReference::transcendental_cache_array_address(masm->isolate());
|
||||
__ movq(rax, cache_array);
|
||||
int cache_array_index =
|
||||
type_ * sizeof(Isolate::Current()->transcendental_cache()->caches_[0]);
|
||||
type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
|
||||
__ movq(rax, Operand(rax, cache_array_index));
|
||||
// rax points to the cache for the type type_.
|
||||
// If NULL, the cache hasn't been initialized yet, so go through runtime.
|
||||
@ -3490,7 +3490,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
__ SetCallKind(rcx, CALL_AS_METHOD);
|
||||
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
|
||||
Handle<Code> adaptor =
|
||||
Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
|
||||
isolate->builtins()->ArgumentsAdaptorTrampoline();
|
||||
__ Jump(adaptor, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
@ -3545,7 +3545,7 @@ bool CEntryStub::NeedsImmovableCode() {
|
||||
}
|
||||
|
||||
|
||||
bool CEntryStub::IsPregenerated() {
|
||||
bool CEntryStub::IsPregenerated(Isolate* isolate) {
|
||||
#ifdef _WIN64
|
||||
return result_size_ == 1;
|
||||
#else
|
||||
@ -5937,7 +5937,7 @@ struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
|
||||
|
||||
#undef REG
|
||||
|
||||
bool RecordWriteStub::IsPregenerated() {
|
||||
bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
|
||||
for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
|
||||
!entry->object.is(no_reg);
|
||||
entry++) {
|
||||
|
@ -69,7 +69,7 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
@ -321,7 +321,7 @@ class RecordWriteStub: public PlatformCodeStub {
|
||||
INCREMENTAL_COMPACTION
|
||||
};
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
|
@ -50,7 +50,7 @@ bool BreakLocationIterator::IsDebugBreakAtReturn() {
|
||||
void BreakLocationIterator::SetDebugBreakAtReturn() {
|
||||
ASSERT(Assembler::kJSReturnSequenceLength >= Assembler::kCallSequenceLength);
|
||||
rinfo()->PatchCodeWithCall(
|
||||
Isolate::Current()->debug()->debug_break_return()->entry(),
|
||||
debug_info_->GetIsolate()->debug()->debug_break_return()->entry(),
|
||||
Assembler::kJSReturnSequenceLength - Assembler::kCallSequenceLength);
|
||||
}
|
||||
|
||||
@ -80,7 +80,7 @@ bool BreakLocationIterator::IsDebugBreakAtSlot() {
|
||||
void BreakLocationIterator::SetDebugBreakAtSlot() {
|
||||
ASSERT(IsDebugBreakSlot());
|
||||
rinfo()->PatchCodeWithCall(
|
||||
Isolate::Current()->debug()->debug_break_slot()->entry(),
|
||||
debug_info_->GetIsolate()->debug()->debug_break_slot()->entry(),
|
||||
Assembler::kDebugBreakSlotLength - Assembler::kCallSequenceLength);
|
||||
}
|
||||
|
||||
|
@ -822,8 +822,8 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
extra_state,
|
||||
Code::NORMAL,
|
||||
argc);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
|
||||
rax);
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, rdx, rcx, rbx, rax);
|
||||
|
||||
// If the stub cache probing failed, the receiver might be a value.
|
||||
// For value objects, we use the map of the prototype objects for
|
||||
@ -859,8 +859,8 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
|
||||
// Probe the stub cache for the value object.
|
||||
__ bind(&probe);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
|
||||
no_reg);
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, rdx, rcx, rbx, no_reg);
|
||||
|
||||
__ bind(&miss);
|
||||
}
|
||||
@ -1332,7 +1332,7 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
|
||||
Code::Flags flags = Code::ComputeFlags(
|
||||
Code::STUB, MONOMORPHIC, Code::kNoExtraICState,
|
||||
Code::NORMAL, Code::LOAD_IC);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, rax, rcx, rbx, rdx);
|
||||
|
||||
GenerateMiss(masm);
|
||||
@ -1453,8 +1453,8 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
|
||||
Code::Flags flags = Code::ComputeFlags(
|
||||
Code::STUB, MONOMORPHIC, strict_mode,
|
||||
Code::NORMAL, Code::STORE_IC);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
|
||||
no_reg);
|
||||
masm->isolate()->stub_cache()->GenerateProbe(
|
||||
masm, flags, rdx, rcx, rbx, no_reg);
|
||||
|
||||
// Cache miss: Jump to runtime.
|
||||
GenerateMiss(masm);
|
||||
|
@ -1188,7 +1188,6 @@ int RegExpMacroAssemblerX64::CheckStackGuardState(Address* return_address,
|
||||
Code* re_code,
|
||||
Address re_frame) {
|
||||
Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
|
||||
ASSERT(isolate == Isolate::Current());
|
||||
if (isolate->stack_guard()->IsStackOverflow()) {
|
||||
isolate->StackOverflow();
|
||||
return EXCEPTION;
|
||||
|
Loading…
Reference in New Issue
Block a user