Added Isolate parameter to CodeStub::GetCode().
According to gprof, this was the #1 cause for TLS access during an Octane run. BUG=v8:2487 Review URL: https://codereview.chromium.org/12317141 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@13756 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
6d5f0d8772
commit
6e829ed485
@ -630,11 +630,11 @@ void FloatingPointHelper::LoadSmis(MacroAssembler* masm,
|
||||
__ mov(scratch1, Operand(r0));
|
||||
ConvertToDoubleStub stub1(r3, r2, scratch1, scratch2);
|
||||
__ push(lr);
|
||||
__ Call(stub1.GetCode());
|
||||
__ Call(stub1.GetCode(masm->isolate()));
|
||||
// Write Smi from r1 to r1 and r0 in double format.
|
||||
__ mov(scratch1, Operand(r1));
|
||||
ConvertToDoubleStub stub2(r1, r0, scratch1, scratch2);
|
||||
__ Call(stub2.GetCode());
|
||||
__ Call(stub2.GetCode(masm->isolate()));
|
||||
__ pop(lr);
|
||||
}
|
||||
}
|
||||
@ -692,7 +692,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
|
||||
__ mov(scratch1, Operand(object));
|
||||
ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2);
|
||||
__ push(lr);
|
||||
__ Call(stub.GetCode());
|
||||
__ Call(stub.GetCode(masm->isolate()));
|
||||
__ pop(lr);
|
||||
}
|
||||
|
||||
@ -1120,11 +1120,12 @@ bool WriteInt32ToHeapNumberStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
|
||||
Isolate* isolate) {
|
||||
WriteInt32ToHeapNumberStub stub1(r1, r0, r2);
|
||||
WriteInt32ToHeapNumberStub stub2(r2, r0, r3);
|
||||
stub1.GetCode()->set_is_pregenerated(true);
|
||||
stub2.GetCode()->set_is_pregenerated(true);
|
||||
stub1.GetCode(isolate)->set_is_pregenerated(true);
|
||||
stub2.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
|
||||
@ -1316,7 +1317,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
|
||||
// Convert lhs to a double in r2, r3.
|
||||
__ mov(r7, Operand(lhs));
|
||||
ConvertToDoubleStub stub1(r3, r2, r7, r6);
|
||||
__ Call(stub1.GetCode());
|
||||
__ Call(stub1.GetCode(masm->isolate()));
|
||||
// Load rhs to a double in r0, r1.
|
||||
__ Ldrd(r0, r1, FieldMemOperand(rhs, HeapNumber::kValueOffset));
|
||||
__ pop(lr);
|
||||
@ -1358,7 +1359,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
|
||||
// Convert rhs to a double in r0, r1.
|
||||
__ mov(r7, Operand(rhs));
|
||||
ConvertToDoubleStub stub2(r1, r0, r7, r6);
|
||||
__ Call(stub2.GetCode());
|
||||
__ Call(stub2.GetCode(masm->isolate()));
|
||||
__ pop(lr);
|
||||
}
|
||||
// Fall through to both_loaded_as_doubles.
|
||||
@ -2318,7 +2319,7 @@ void UnaryOpStub::GenerateHeapNumberCodeBitNot(
|
||||
// WriteInt32ToHeapNumberStub does not trigger GC, so we do not
|
||||
// have to set up a frame.
|
||||
WriteInt32ToHeapNumberStub stub(r1, r0, r2);
|
||||
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
__ bind(&impossible);
|
||||
@ -3874,15 +3875,15 @@ bool CEntryStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void CodeStub::GenerateStubsAheadOfTime() {
|
||||
CEntryStub::GenerateAheadOfTime();
|
||||
WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime();
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
|
||||
RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
|
||||
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
|
||||
CEntryStub::GenerateAheadOfTime(isolate);
|
||||
WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
}
|
||||
|
||||
|
||||
void CodeStub::GenerateFPStubs() {
|
||||
void CodeStub::GenerateFPStubs(Isolate* isolate) {
|
||||
SaveFPRegsMode mode = CpuFeatures::IsSupported(VFP2)
|
||||
? kSaveFPRegs
|
||||
: kDontSaveFPRegs;
|
||||
@ -3896,11 +3897,11 @@ void CodeStub::GenerateFPStubs() {
|
||||
if (!save_doubles.FindCodeInCache(&save_doubles_code, ISOLATE)) {
|
||||
if (CpuFeatures::IsSupported(VFP2)) {
|
||||
CpuFeatures::Scope scope2(VFP2);
|
||||
save_doubles_code = *save_doubles.GetCode();
|
||||
store_buffer_overflow_code = *stub.GetCode();
|
||||
save_doubles_code = *save_doubles.GetCode(isolate);
|
||||
store_buffer_overflow_code = *stub.GetCode(isolate);
|
||||
} else {
|
||||
save_doubles_code = *save_doubles.GetCode();
|
||||
store_buffer_overflow_code = *stub.GetCode();
|
||||
save_doubles_code = *save_doubles.GetCode(isolate);
|
||||
store_buffer_overflow_code = *stub.GetCode(isolate);
|
||||
}
|
||||
save_doubles_code->set_is_pregenerated(true);
|
||||
store_buffer_overflow_code->set_is_pregenerated(true);
|
||||
@ -3909,9 +3910,9 @@ void CodeStub::GenerateFPStubs() {
|
||||
}
|
||||
|
||||
|
||||
void CEntryStub::GenerateAheadOfTime() {
|
||||
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
|
||||
CEntryStub stub(1, kDontSaveFPRegs);
|
||||
Handle<Code> code = stub.GetCode();
|
||||
Handle<Code> code = stub.GetCode(isolate);
|
||||
code->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
@ -7004,7 +7005,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
|
||||
__ bind(&generic_stub);
|
||||
ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
|
||||
CompareIC::GENERIC);
|
||||
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&maybe_undefined1);
|
||||
if (Token::IsOrderedRelationalCompareOp(op_)) {
|
||||
@ -7222,8 +7223,9 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
|
||||
|
||||
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
|
||||
Register target) {
|
||||
__ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
|
||||
RelocInfo::CODE_TARGET));
|
||||
intptr_t code =
|
||||
reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
|
||||
__ mov(lr, Operand(code, RelocInfo::CODE_TARGET));
|
||||
|
||||
// Prevent literal pool emission during calculation of return address.
|
||||
Assembler::BlockConstPoolScope block_const_pool(masm);
|
||||
@ -7556,13 +7558,14 @@ bool StoreBufferOverflowStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
|
||||
Isolate* isolate) {
|
||||
StoreBufferOverflowStub stub1(kDontSaveFPRegs);
|
||||
stub1.GetCode()->set_is_pregenerated(true);
|
||||
stub1.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
|
||||
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
|
||||
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
|
||||
!entry->object.is(no_reg);
|
||||
entry++) {
|
||||
@ -7571,7 +7574,7 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
entry->address,
|
||||
entry->action,
|
||||
kDontSaveFPRegs);
|
||||
stub.GetCode()->set_is_pregenerated(true);
|
||||
stub.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -7846,7 +7849,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
ASSERT(!Serializer::enabled());
|
||||
bool save_fp_regs = CpuFeatures::IsSupported(VFP2);
|
||||
CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs);
|
||||
__ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
int parameter_count_offset =
|
||||
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
|
||||
__ ldr(r1, MemOperand(fp, parameter_count_offset));
|
||||
|
@ -66,7 +66,7 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
@ -305,7 +305,7 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
|
||||
scratch_(scratch) { }
|
||||
|
||||
bool IsPregenerated();
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
|
||||
private:
|
||||
Register the_int_;
|
||||
@ -379,7 +379,7 @@ class RecordWriteStub: public PlatformCodeStub {
|
||||
};
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
static void PatchBranchIntoNop(MacroAssembler* masm, int pos) {
|
||||
|
@ -1035,7 +1035,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
||||
|
||||
// Record position before stub call for type feedback.
|
||||
SetSourcePosition(clause->position());
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
|
||||
CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
|
||||
patch_site.EmitPatchInfo();
|
||||
|
||||
@ -1950,7 +1950,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
||||
|
||||
__ bind(&stub_call);
|
||||
BinaryOpStub stub(op, mode);
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->BinaryOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ jmp(&done);
|
||||
@ -2034,7 +2034,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
||||
__ pop(r1);
|
||||
BinaryOpStub stub(op, mode);
|
||||
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->BinaryOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
context()->Plug(r0);
|
||||
@ -2536,7 +2536,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
|
||||
__ mov(r2, Operand(cell));
|
||||
|
||||
CallConstructStub stub(RECORD_CALL_TARGET);
|
||||
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
|
||||
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
|
||||
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
|
||||
context()->Plug(r0);
|
||||
}
|
||||
@ -4016,7 +4016,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
|
||||
// accumulator register r0.
|
||||
VisitForAccumulatorValue(expr->expression());
|
||||
SetSourcePosition(expr->position());
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->UnaryOperationFeedbackId());
|
||||
context()->Plug(r0);
|
||||
}
|
||||
@ -4129,7 +4129,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
||||
SetSourcePosition(expr->position());
|
||||
|
||||
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
|
||||
CallIC(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
expr->CountBinOpFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ bind(&done);
|
||||
|
||||
@ -4364,7 +4366,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
||||
|
||||
// Record position and call the compare IC.
|
||||
SetSourcePosition(expr->position());
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
||||
|
@ -1070,39 +1070,39 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
|
||||
switch (instr->hydrogen()->major_key()) {
|
||||
case CodeStub::RegExpConstructResult: {
|
||||
RegExpConstructResultStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::RegExpExec: {
|
||||
RegExpExecStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::SubString: {
|
||||
SubStringStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::NumberToString: {
|
||||
NumberToStringStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::StringAdd: {
|
||||
StringAddStub stub(NO_STRING_ADD_FLAGS);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::StringCompare: {
|
||||
StringCompareStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::TranscendentalCache: {
|
||||
__ ldr(r0, MemOperand(sp, 0));
|
||||
TranscendentalCacheStub stub(instr->transcendental_type(),
|
||||
TranscendentalCacheStub::TAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
@ -2124,7 +2124,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
|
||||
// Block literal pool emission to ensure nop indicating no inlined smi code
|
||||
// is in the correct position.
|
||||
Assembler::BlockConstPoolScope block_const_pool(masm());
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
__ nop(); // Signals no inlined code.
|
||||
}
|
||||
|
||||
@ -2538,7 +2538,7 @@ void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
|
||||
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
||||
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
||||
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
// This instruction also signals no smi code inlined.
|
||||
__ cmp(r0, Operand::Zero());
|
||||
@ -2713,7 +2713,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
|
||||
ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
|
||||
|
||||
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
|
||||
__ cmp(r0, Operand::Zero());
|
||||
__ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
|
||||
@ -2842,7 +2842,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
||||
__ nop();
|
||||
}
|
||||
__ StoreToSafepointRegisterSlot(temp, temp);
|
||||
CallCodeGeneric(stub.GetCode(),
|
||||
CallCodeGeneric(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr,
|
||||
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
|
||||
@ -2865,7 +2865,7 @@ void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
|
||||
void LCodeGen::DoCmpT(LCmpT* instr) {
|
||||
Token::Value op = instr->op();
|
||||
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
// This instruction also signals no smi code inlined.
|
||||
__ cmp(r0, Operand::Zero());
|
||||
@ -4134,7 +4134,7 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(d2));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::LOG,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4142,7 +4142,7 @@ void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(d2));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::TAN,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4150,7 +4150,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(d2));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::COS,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4158,7 +4158,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(d2));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::SIN,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4250,7 +4250,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
}
|
||||
|
||||
@ -4284,7 +4284,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
|
||||
|
||||
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
|
||||
__ mov(r0, Operand(instr->arity()));
|
||||
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4646,7 +4646,7 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
|
||||
__ push(ToRegister(instr->left()));
|
||||
__ push(ToRegister(instr->right()));
|
||||
StringAddStub stub(NO_STRING_CHECK_IN_STUB);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -5708,7 +5708,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
||||
FastCloneShallowArrayStub::Mode mode =
|
||||
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
|
||||
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
} else if (instr->hydrogen()->depth() > 1) {
|
||||
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
|
||||
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
|
||||
@ -5719,7 +5719,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
||||
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
|
||||
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
|
||||
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5913,7 +5913,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
|
||||
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
|
||||
} else {
|
||||
FastCloneShallowObjectStub stub(properties_count);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5987,7 +5987,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
|
||||
FastNewClosureStub stub(shared_info->language_mode());
|
||||
__ mov(r1, Operand(shared_info));
|
||||
__ push(r1);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
} else {
|
||||
__ mov(r2, Operand(shared_info));
|
||||
__ mov(r1, Operand(pretenure
|
||||
@ -6225,7 +6225,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
__ b(hs, &done);
|
||||
StackCheckStub stub;
|
||||
PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
EnsureSpaceForLazyDeopt();
|
||||
__ bind(&done);
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
|
@ -1231,7 +1231,7 @@ void MacroAssembler::DebugBreak() {
|
||||
mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
|
||||
CEntryStub ces(1);
|
||||
ASSERT(AllowThisStubCall(&ces));
|
||||
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
|
||||
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -2223,13 +2223,13 @@ void MacroAssembler::CallStub(CodeStub* stub,
|
||||
TypeFeedbackId ast_id,
|
||||
Condition cond) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
|
||||
Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
|
||||
Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id, cond);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
|
||||
ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
|
||||
Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
|
||||
Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, cond);
|
||||
}
|
||||
|
||||
|
||||
@ -2824,7 +2824,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
|
||||
#endif
|
||||
mov(r1, Operand(builtin));
|
||||
CEntryStub stub(1);
|
||||
Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
|
@ -3087,11 +3087,11 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
|
||||
receiver_map->has_external_array_elements()) {
|
||||
Handle<Code> stub = KeyedLoadFastElementStub(
|
||||
receiver_map->instance_type() == JS_ARRAY_TYPE,
|
||||
elements_kind).GetCode();
|
||||
elements_kind).GetCode(isolate());
|
||||
__ DispatchMap(r1, r2, receiver_map, stub, DO_SMI_CHECK);
|
||||
} else {
|
||||
Handle<Code> stub =
|
||||
KeyedLoadDictionaryElementStub().GetCode();
|
||||
KeyedLoadDictionaryElementStub().GetCode(isolate());
|
||||
__ DispatchMap(r1, r2, receiver_map, stub, DO_SMI_CHECK);
|
||||
}
|
||||
|
||||
@ -3184,7 +3184,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
|
||||
ElementsKind elements_kind = receiver_map->elements_kind();
|
||||
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
|
||||
Handle<Code> stub =
|
||||
KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
|
||||
KeyedStoreElementStub(is_js_array,
|
||||
elements_kind,
|
||||
grow_mode_).GetCode(isolate());
|
||||
|
||||
__ DispatchMap(r2, r3, receiver_map, stub, DO_SMI_CHECK);
|
||||
|
||||
|
@ -105,8 +105,7 @@ Handle<Code> PlatformCodeStub::GenerateCode() {
|
||||
}
|
||||
|
||||
|
||||
Handle<Code> CodeStub::GetCode() {
|
||||
Isolate* isolate = Isolate::Current();
|
||||
Handle<Code> CodeStub::GetCode(Isolate* isolate) {
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
Code* code;
|
||||
@ -604,10 +603,10 @@ void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void StubFailureTrampolineStub::GenerateAheadOfTime() {
|
||||
void StubFailureTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
|
||||
int i = 0;
|
||||
for (; i <= StubFailureTrampolineStub::kMaxExtraExpressionStackCount; ++i) {
|
||||
StubFailureTrampolineStub(i).GetCode();
|
||||
StubFailureTrampolineStub(i).GetCode(isolate);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -126,7 +126,7 @@ class CodeStub BASE_EMBEDDED {
|
||||
};
|
||||
|
||||
// Retrieve the code for the stub. Generate the code if needed.
|
||||
Handle<Code> GetCode();
|
||||
Handle<Code> GetCode(Isolate* isolate);
|
||||
|
||||
static Major MajorKeyFromKey(uint32_t key) {
|
||||
return static_cast<Major>(MajorKeyBits::decode(key));
|
||||
@ -154,8 +154,8 @@ class CodeStub BASE_EMBEDDED {
|
||||
// See comment above, where Instanceof is defined.
|
||||
virtual bool IsPregenerated() { return false; }
|
||||
|
||||
static void GenerateStubsAheadOfTime();
|
||||
static void GenerateFPStubs();
|
||||
static void GenerateStubsAheadOfTime(Isolate* isolate);
|
||||
static void GenerateFPStubs(Isolate* isolate);
|
||||
|
||||
// Some stubs put untagged junk on the stack that cannot be scanned by the
|
||||
// GC. This means that we must be statically sure that no GC can occur while
|
||||
@ -844,7 +844,7 @@ class CEntryStub : public PlatformCodeStub {
|
||||
// their code generation. On machines that always have gp registers (x64) we
|
||||
// can generate both variants ahead of time.
|
||||
virtual bool IsPregenerated();
|
||||
static void GenerateAheadOfTime();
|
||||
static void GenerateAheadOfTime(Isolate* isolate);
|
||||
|
||||
private:
|
||||
void GenerateCore(MacroAssembler* masm,
|
||||
@ -1462,7 +1462,7 @@ class StubFailureTrampolineStub : public PlatformCodeStub {
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
|
||||
static void GenerateAheadOfTime();
|
||||
static void GenerateAheadOfTime(Isolate* isolate);
|
||||
|
||||
private:
|
||||
Major MajorKey() { return StubFailureTrampoline; }
|
||||
|
@ -2689,13 +2689,13 @@ bool Heap::CreateApiObjects() {
|
||||
|
||||
void Heap::CreateJSEntryStub() {
|
||||
JSEntryStub stub;
|
||||
set_js_entry_code(*stub.GetCode());
|
||||
set_js_entry_code(*stub.GetCode(isolate()));
|
||||
}
|
||||
|
||||
|
||||
void Heap::CreateJSConstructEntryStub() {
|
||||
JSConstructEntryStub stub;
|
||||
set_js_construct_entry_code(*stub.GetCode());
|
||||
set_js_construct_entry_code(*stub.GetCode(isolate()));
|
||||
}
|
||||
|
||||
|
||||
@ -2720,7 +2720,7 @@ void Heap::CreateFixedStubs() {
|
||||
// create them if we need them during the creation of another stub.
|
||||
// Stub creation mixes raw pointers and handles in an unsafe manner so
|
||||
// we cannot create stubs while we are creating stubs.
|
||||
CodeStub::GenerateStubsAheadOfTime();
|
||||
CodeStub::GenerateStubsAheadOfTime(isolate());
|
||||
}
|
||||
|
||||
|
||||
|
@ -4971,32 +4971,32 @@ bool CEntryStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void CodeStub::GenerateStubsAheadOfTime() {
|
||||
CEntryStub::GenerateAheadOfTime();
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
|
||||
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
|
||||
CEntryStub::GenerateAheadOfTime(isolate);
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
// It is important that the store buffer overflow stubs are generated first.
|
||||
RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
|
||||
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
}
|
||||
|
||||
|
||||
void CodeStub::GenerateFPStubs() {
|
||||
void CodeStub::GenerateFPStubs(Isolate* isolate) {
|
||||
if (CpuFeatures::IsSupported(SSE2)) {
|
||||
CEntryStub save_doubles(1, kSaveFPRegs);
|
||||
// Stubs might already be in the snapshot, detect that and don't regenerate,
|
||||
// which would lead to code stub initialization state being messed up.
|
||||
Code* save_doubles_code;
|
||||
if (!save_doubles.FindCodeInCache(&save_doubles_code, ISOLATE)) {
|
||||
save_doubles_code = *(save_doubles.GetCode());
|
||||
if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
|
||||
save_doubles_code = *(save_doubles.GetCode(isolate));
|
||||
}
|
||||
save_doubles_code->set_is_pregenerated(true);
|
||||
save_doubles_code->GetIsolate()->set_fp_stubs_generated(true);
|
||||
isolate->set_fp_stubs_generated(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void CEntryStub::GenerateAheadOfTime() {
|
||||
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
|
||||
CEntryStub stub(1, kDontSaveFPRegs);
|
||||
Handle<Code> code = stub.GetCode();
|
||||
Handle<Code> code = stub.GetCode(isolate);
|
||||
code->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
@ -6817,7 +6817,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
|
||||
__ bind(&generic_stub);
|
||||
ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
|
||||
CompareIC::GENERIC);
|
||||
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&maybe_undefined1);
|
||||
if (Token::IsOrderedRelationalCompareOp(op_)) {
|
||||
@ -7322,19 +7322,20 @@ bool RecordWriteStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
|
||||
Isolate* isolate) {
|
||||
StoreBufferOverflowStub stub1(kDontSaveFPRegs);
|
||||
stub1.GetCode()->set_is_pregenerated(true);
|
||||
stub1.GetCode(isolate)->set_is_pregenerated(true);
|
||||
|
||||
CpuFeatures::TryForceFeatureScope scope(SSE2);
|
||||
if (CpuFeatures::IsSupported(SSE2)) {
|
||||
StoreBufferOverflowStub stub2(kSaveFPRegs);
|
||||
stub2.GetCode()->set_is_pregenerated(true);
|
||||
stub2.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
|
||||
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
|
||||
!entry->object.is(no_reg);
|
||||
entry++) {
|
||||
@ -7343,7 +7344,7 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
entry->address,
|
||||
entry->action,
|
||||
kDontSaveFPRegs);
|
||||
stub.GetCode()->set_is_pregenerated(true);
|
||||
stub.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -7639,7 +7640,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
ASSERT(!Serializer::enabled());
|
||||
bool save_fp_regs = CpuFeatures::IsSupported(SSE2);
|
||||
CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs);
|
||||
__ call(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
int parameter_count_offset =
|
||||
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
|
||||
__ mov(ebx, MemOperand(ebp, parameter_count_offset));
|
||||
|
@ -69,7 +69,7 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
@ -406,7 +406,7 @@ class RecordWriteStub: public PlatformCodeStub {
|
||||
};
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
static const byte kTwoByteNopInstruction = 0x3c; // Cmpb al, #imm8.
|
||||
|
@ -995,7 +995,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
||||
|
||||
// Record position before stub call for type feedback.
|
||||
SetSourcePosition(clause->position());
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
|
||||
CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ test(eax, eax);
|
||||
@ -1902,7 +1902,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
||||
__ bind(&stub_call);
|
||||
__ mov(eax, ecx);
|
||||
BinaryOpStub stub(op, mode);
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->BinaryOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ jmp(&done, Label::kNear);
|
||||
@ -1988,7 +1988,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
||||
__ pop(edx);
|
||||
BinaryOpStub stub(op, mode);
|
||||
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->BinaryOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
context()->Plug(eax);
|
||||
@ -2479,7 +2479,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
|
||||
__ mov(ebx, cell);
|
||||
|
||||
CallConstructStub stub(RECORD_CALL_TARGET);
|
||||
__ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
|
||||
__ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
|
||||
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
|
||||
context()->Plug(eax);
|
||||
}
|
||||
@ -3989,7 +3989,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
|
||||
// accumulator register eax.
|
||||
VisitForAccumulatorValue(expr->expression());
|
||||
SetSourcePosition(expr->position());
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->UnaryOperationFeedbackId());
|
||||
context()->Plug(eax);
|
||||
}
|
||||
@ -4111,7 +4111,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
||||
__ mov(edx, eax);
|
||||
__ mov(eax, Immediate(Smi::FromInt(1)));
|
||||
BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
|
||||
CallIC(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
expr->CountBinOpFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ bind(&done);
|
||||
|
||||
@ -4347,7 +4349,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
||||
|
||||
// Record position and call the compare IC.
|
||||
SetSourcePosition(expr->position());
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
|
||||
|
@ -1088,38 +1088,38 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
|
||||
switch (instr->hydrogen()->major_key()) {
|
||||
case CodeStub::RegExpConstructResult: {
|
||||
RegExpConstructResultStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::RegExpExec: {
|
||||
RegExpExecStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::SubString: {
|
||||
SubStringStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::NumberToString: {
|
||||
NumberToStringStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::StringAdd: {
|
||||
StringAddStub stub(NO_STRING_ADD_FLAGS);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::StringCompare: {
|
||||
StringCompareStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::TranscendentalCache: {
|
||||
TranscendentalCacheStub stub(instr->transcendental_type(),
|
||||
TranscendentalCacheStub::TAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
@ -1970,7 +1970,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(eax));
|
||||
|
||||
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
__ nop(); // Signals no inlined code.
|
||||
}
|
||||
|
||||
@ -2372,7 +2372,7 @@ void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
|
||||
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
||||
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
||||
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
|
||||
Condition condition = ComputeCompareCondition(op);
|
||||
@ -2540,7 +2540,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
|
||||
// Object and function are in fixed registers defined by the stub.
|
||||
ASSERT(ToRegister(instr->context()).is(esi));
|
||||
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
|
||||
Label true_value, done;
|
||||
__ test(eax, Operand(eax));
|
||||
@ -2641,7 +2641,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
||||
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
|
||||
__ mov(temp, Immediate(delta));
|
||||
__ StoreToSafepointRegisterSlot(temp, temp);
|
||||
CallCodeGeneric(stub.GetCode(),
|
||||
CallCodeGeneric(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr,
|
||||
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
|
||||
@ -2666,7 +2666,7 @@ void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
|
||||
void LCodeGen::DoCmpT(LCmpT* instr) {
|
||||
Token::Value op = instr->op();
|
||||
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
|
||||
Condition condition = ComputeCompareCondition(op);
|
||||
@ -3966,7 +3966,7 @@ void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::TAN,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3974,7 +3974,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::COS,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3982,7 +3982,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::SIN,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4073,7 +4073,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4107,7 +4107,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
|
||||
|
||||
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
|
||||
__ Set(eax, Immediate(instr->arity()));
|
||||
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4534,7 +4534,7 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
|
||||
EmitPushTaggedOperand(instr->left());
|
||||
EmitPushTaggedOperand(instr->right());
|
||||
StringAddStub stub(NO_STRING_CHECK_IN_STUB);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -5544,7 +5544,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
||||
FastCloneShallowArrayStub::Mode mode =
|
||||
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
|
||||
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
} else if (instr->hydrogen()->depth() > 1) {
|
||||
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
|
||||
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
|
||||
@ -5555,7 +5555,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
||||
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
|
||||
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
|
||||
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5767,7 +5767,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
|
||||
__ mov(ecx, Immediate(constant_properties));
|
||||
__ mov(edx, Immediate(Smi::FromInt(flags)));
|
||||
FastCloneShallowObjectStub stub(properties_count);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5840,7 +5840,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
|
||||
if (!pretenure && shared_info->num_literals() == 0) {
|
||||
FastNewClosureStub stub(shared_info->language_mode());
|
||||
__ push(Immediate(shared_info));
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
} else {
|
||||
__ push(esi);
|
||||
__ push(Immediate(shared_info));
|
||||
@ -6061,7 +6061,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
ASSERT(instr->context()->IsRegister());
|
||||
ASSERT(ToRegister(instr->context()).is(esi));
|
||||
StackCheckStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
EnsureSpaceForLazyDeopt();
|
||||
__ bind(&done);
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
|
@ -385,7 +385,7 @@ void MacroAssembler::DebugBreak() {
|
||||
Set(eax, Immediate(0));
|
||||
mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
|
||||
CEntryStub ces(1);
|
||||
call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
|
||||
call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -1802,13 +1802,13 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
|
||||
|
||||
void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
|
||||
call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
|
||||
call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::TailCallStub(CodeStub* stub) {
|
||||
ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
|
||||
jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
|
||||
jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
@ -2097,7 +2097,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
|
||||
// Set the entry point and jump to the C entry runtime stub.
|
||||
mov(ebx, Immediate(ext));
|
||||
CEntryStub ces(1);
|
||||
jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
|
@ -2922,7 +2922,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
|
||||
ElementsKind elements_kind = receiver_map->elements_kind();
|
||||
bool is_jsarray = receiver_map->instance_type() == JS_ARRAY_TYPE;
|
||||
Handle<Code> stub =
|
||||
KeyedStoreElementStub(is_jsarray, elements_kind, grow_mode_).GetCode();
|
||||
KeyedStoreElementStub(is_jsarray,
|
||||
elements_kind,
|
||||
grow_mode_).GetCode(isolate());
|
||||
|
||||
__ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK);
|
||||
|
||||
@ -3164,11 +3166,11 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
|
||||
receiver_map->has_external_array_elements()) {
|
||||
Handle<Code> stub = KeyedLoadFastElementStub(
|
||||
receiver_map->instance_type() == JS_ARRAY_TYPE,
|
||||
elements_kind).GetCode();
|
||||
elements_kind).GetCode(isolate());
|
||||
__ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK);
|
||||
} else {
|
||||
Handle<Code> stub =
|
||||
KeyedLoadDictionaryElementStub().GetCode();
|
||||
KeyedLoadDictionaryElementStub().GetCode(isolate());
|
||||
__ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK);
|
||||
}
|
||||
|
||||
|
25
src/ic.cc
25
src/ic.cc
@ -822,10 +822,10 @@ MaybeObject* LoadIC::Load(State state,
|
||||
stub = pre_monomorphic_stub();
|
||||
} else if (state == PREMONOMORPHIC) {
|
||||
StringLengthStub string_length_stub(kind(), !object->IsString());
|
||||
stub = string_length_stub.GetCode();
|
||||
stub = string_length_stub.GetCode(isolate());
|
||||
} else if (state == MONOMORPHIC && object->IsStringWrapper()) {
|
||||
StringLengthStub string_length_stub(kind(), true);
|
||||
stub = string_length_stub.GetCode();
|
||||
stub = string_length_stub.GetCode(isolate());
|
||||
} else if (state != MEGAMORPHIC) {
|
||||
ASSERT(state != GENERIC);
|
||||
stub = megamorphic_stub();
|
||||
@ -851,7 +851,7 @@ MaybeObject* LoadIC::Load(State state,
|
||||
stub = pre_monomorphic_stub();
|
||||
} else if (state == PREMONOMORPHIC) {
|
||||
ArrayLengthStub array_length_stub(kind());
|
||||
stub = array_length_stub.GetCode();
|
||||
stub = array_length_stub.GetCode(isolate());
|
||||
} else if (state != MEGAMORPHIC) {
|
||||
ASSERT(state != GENERIC);
|
||||
stub = megamorphic_stub();
|
||||
@ -874,7 +874,7 @@ MaybeObject* LoadIC::Load(State state,
|
||||
stub = pre_monomorphic_stub();
|
||||
} else if (state == PREMONOMORPHIC) {
|
||||
FunctionPrototypeStub function_prototype_stub(kind());
|
||||
stub = function_prototype_stub.GetCode();
|
||||
stub = function_prototype_stub.GetCode(isolate());
|
||||
} else if (state != MEGAMORPHIC) {
|
||||
ASSERT(state != GENERIC);
|
||||
stub = megamorphic_stub();
|
||||
@ -1398,7 +1398,8 @@ MaybeObject* StoreIC::Store(State state,
|
||||
name->Equals(isolate()->heap()->length_symbol()) &&
|
||||
Handle<JSArray>::cast(receiver)->AllowsSetElementsLength() &&
|
||||
receiver->HasFastProperties()) {
|
||||
Handle<Code> stub = StoreArrayLengthStub(kind(), strict_mode).GetCode();
|
||||
Handle<Code> stub =
|
||||
StoreArrayLengthStub(kind(), strict_mode).GetCode(isolate());
|
||||
set_target(*stub);
|
||||
TRACE_IC("StoreIC", name, state, *stub);
|
||||
return receiver->SetProperty(*name, *value, NONE, strict_mode, store_mode);
|
||||
@ -2147,7 +2148,7 @@ RUNTIME_FUNCTION(MaybeObject*, UnaryOp_Patch) {
|
||||
type = UnaryOpIC::ComputeNewType(type, previous_type);
|
||||
|
||||
UnaryOpStub stub(op, mode, type);
|
||||
Handle<Code> code = stub.GetCode();
|
||||
Handle<Code> code = stub.GetCode(isolate);
|
||||
if (!code.is_null()) {
|
||||
if (FLAG_trace_ic) {
|
||||
PrintF("[UnaryOpIC in ");
|
||||
@ -2275,7 +2276,7 @@ RUNTIME_FUNCTION(MaybeObject*, BinaryOp_Patch) {
|
||||
}
|
||||
|
||||
BinaryOpStub stub(key, new_left, new_right, result_type);
|
||||
Handle<Code> code = stub.GetCode();
|
||||
Handle<Code> code = stub.GetCode(isolate);
|
||||
if (!code.is_null()) {
|
||||
#ifdef DEBUG
|
||||
if (FLAG_trace_ic) {
|
||||
@ -2365,9 +2366,9 @@ Code* CompareIC::GetRawUninitialized(Token::Value op) {
|
||||
}
|
||||
|
||||
|
||||
Handle<Code> CompareIC::GetUninitialized(Token::Value op) {
|
||||
Handle<Code> CompareIC::GetUninitialized(Isolate* isolate, Token::Value op) {
|
||||
ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
|
||||
return stub.GetCode();
|
||||
return stub.GetCode(isolate);
|
||||
}
|
||||
|
||||
|
||||
@ -2494,7 +2495,7 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
|
||||
if (state == KNOWN_OBJECTS) {
|
||||
stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
|
||||
}
|
||||
set_target(*stub.GetCode());
|
||||
set_target(*stub.GetCode(isolate()));
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_trace_ic) {
|
||||
@ -2508,7 +2509,7 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
|
||||
GetStateName(new_right),
|
||||
GetStateName(state),
|
||||
Token::Name(op_),
|
||||
static_cast<void*>(*stub.GetCode()));
|
||||
static_cast<void*>(*stub.GetCode(isolate())));
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -2542,7 +2543,7 @@ RUNTIME_FUNCTION(MaybeObject*, ToBoolean_Patch) {
|
||||
old_types.TraceTransition(new_types);
|
||||
|
||||
ToBooleanStub stub(tos, new_types);
|
||||
Handle<Code> code = stub.GetCode();
|
||||
Handle<Code> code = stub.GetCode(isolate);
|
||||
ToBooleanIC ic(isolate);
|
||||
ic.patch(*code);
|
||||
return Smi::FromInt(to_boolean_value ? 1 : 0);
|
||||
|
2
src/ic.h
2
src/ic.h
@ -771,7 +771,7 @@ class CompareIC: public IC {
|
||||
|
||||
|
||||
// Factory method for getting an uninitialized compare stub.
|
||||
static Handle<Code> GetUninitialized(Token::Value op);
|
||||
static Handle<Code> GetUninitialized(Isolate* isolate, Token::Value op);
|
||||
|
||||
// Helper function for computing the condition for a compare operation.
|
||||
static Condition ComputeCondition(Token::Value op);
|
||||
|
@ -2137,8 +2137,8 @@ bool Isolate::Init(Deserializer* des) {
|
||||
if (!Serializer::enabled()) {
|
||||
// Ensure that the stub failure trampoline has been generated.
|
||||
HandleScope scope(this);
|
||||
CodeStub::GenerateFPStubs();
|
||||
StubFailureTrampolineStub::GenerateAheadOfTime();
|
||||
CodeStub::GenerateFPStubs(this);
|
||||
StubFailureTrampolineStub::GenerateAheadOfTime(this);
|
||||
}
|
||||
|
||||
if (FLAG_parallel_recompilation) optimizing_compiler_thread_.Start();
|
||||
|
@ -662,11 +662,11 @@ void FloatingPointHelper::LoadSmis(MacroAssembler* masm,
|
||||
__ mov(scratch1, a0);
|
||||
ConvertToDoubleStub stub1(a3, a2, scratch1, scratch2);
|
||||
__ push(ra);
|
||||
__ Call(stub1.GetCode());
|
||||
__ Call(stub1.GetCode(masm->isolate()));
|
||||
// Write Smi from a1 to a1 and a0 in double format.
|
||||
__ mov(scratch1, a1);
|
||||
ConvertToDoubleStub stub2(a1, a0, scratch1, scratch2);
|
||||
__ Call(stub2.GetCode());
|
||||
__ Call(stub2.GetCode(masm->isolate()));
|
||||
__ pop(ra);
|
||||
}
|
||||
}
|
||||
@ -729,7 +729,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
|
||||
__ mov(scratch1, object);
|
||||
ConvertToDoubleStub stub(dst2, dst1, scratch1, scratch2);
|
||||
__ push(ra);
|
||||
__ Call(stub.GetCode());
|
||||
__ Call(stub.GetCode(masm->isolate()));
|
||||
__ pop(ra);
|
||||
}
|
||||
|
||||
@ -1180,11 +1180,12 @@ bool WriteInt32ToHeapNumberStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
|
||||
Isolate* isolate) {
|
||||
WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
|
||||
WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
|
||||
stub1.GetCode()->set_is_pregenerated(true);
|
||||
stub2.GetCode()->set_is_pregenerated(true);
|
||||
stub1.GetCode(isolate)->set_is_pregenerated(true);
|
||||
stub2.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
|
||||
@ -1381,7 +1382,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
|
||||
__ mov(t6, rhs);
|
||||
ConvertToDoubleStub stub1(a1, a0, t6, t5);
|
||||
__ push(ra);
|
||||
__ Call(stub1.GetCode());
|
||||
__ Call(stub1.GetCode(masm->isolate()));
|
||||
|
||||
__ pop(ra);
|
||||
}
|
||||
@ -1416,7 +1417,7 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
|
||||
__ mov(t6, lhs);
|
||||
ConvertToDoubleStub stub2(a3, a2, t6, t5);
|
||||
__ push(ra);
|
||||
__ Call(stub2.GetCode());
|
||||
__ Call(stub2.GetCode(masm->isolate()));
|
||||
__ pop(ra);
|
||||
// Load rhs to a double in a1, a0.
|
||||
if (rhs.is(a0)) {
|
||||
@ -2383,7 +2384,7 @@ void UnaryOpStub::GenerateHeapNumberCodeBitNot(
|
||||
// WriteInt32ToHeapNumberStub does not trigger GC, so we do not
|
||||
// have to set up a frame.
|
||||
WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3);
|
||||
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
__ bind(&impossible);
|
||||
@ -3930,15 +3931,15 @@ bool CEntryStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void CodeStub::GenerateStubsAheadOfTime() {
|
||||
CEntryStub::GenerateAheadOfTime();
|
||||
WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime();
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
|
||||
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
|
||||
CEntryStub::GenerateAheadOfTime(isolate);
|
||||
WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
|
||||
}
|
||||
|
||||
|
||||
void CodeStub::GenerateFPStubs() {
|
||||
void CodeStub::GenerateFPStubs(Isolate* isolate) {
|
||||
SaveFPRegsMode mode = CpuFeatures::IsSupported(FPU)
|
||||
? kSaveFPRegs
|
||||
: kDontSaveFPRegs;
|
||||
@ -3952,11 +3953,11 @@ void CodeStub::GenerateFPStubs() {
|
||||
if (!save_doubles.FindCodeInCache(&save_doubles_code, ISOLATE)) {
|
||||
if (CpuFeatures::IsSupported(FPU)) {
|
||||
CpuFeatures::Scope scope2(FPU);
|
||||
save_doubles_code = *save_doubles.GetCode();
|
||||
store_buffer_overflow_code = *stub.GetCode();
|
||||
save_doubles_code = *save_doubles.GetCode(isolate);
|
||||
store_buffer_overflow_code = *stub.GetCode(isolate);
|
||||
} else {
|
||||
save_doubles_code = *save_doubles.GetCode();
|
||||
store_buffer_overflow_code = *stub.GetCode();
|
||||
save_doubles_code = *save_doubles.GetCode(isolate);
|
||||
store_buffer_overflow_code = *stub.GetCode(isolate);
|
||||
}
|
||||
save_doubles_code->set_is_pregenerated(true);
|
||||
store_buffer_overflow_code->set_is_pregenerated(true);
|
||||
@ -3965,9 +3966,9 @@ void CodeStub::GenerateFPStubs() {
|
||||
}
|
||||
|
||||
|
||||
void CEntryStub::GenerateAheadOfTime() {
|
||||
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
|
||||
CEntryStub stub(1, kDontSaveFPRegs);
|
||||
Handle<Code> code = stub.GetCode();
|
||||
Handle<Code> code = stub.GetCode(isolate);
|
||||
code->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
@ -7114,7 +7115,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
|
||||
__ bind(&generic_stub);
|
||||
ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
|
||||
CompareIC::GENERIC);
|
||||
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&maybe_undefined1);
|
||||
if (Token::IsOrderedRelationalCompareOp(op_)) {
|
||||
@ -7365,10 +7366,9 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
|
||||
// Push return address (accessible to GC through exit frame pc).
|
||||
// This spot for ra was reserved in EnterExitFrame.
|
||||
masm->sw(ra, MemOperand(sp, kCArgsSlotsSize));
|
||||
masm->li(ra,
|
||||
Operand(reinterpret_cast<intptr_t>(GetCode().location()),
|
||||
RelocInfo::CODE_TARGET),
|
||||
CONSTANT_SIZE);
|
||||
intptr_t loc =
|
||||
reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
|
||||
masm->li(ra, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE);
|
||||
// Call the function.
|
||||
masm->Jump(t9);
|
||||
// Make sure the stored 'ra' points to this position.
|
||||
@ -7699,13 +7699,14 @@ bool StoreBufferOverflowStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
|
||||
Isolate* isolate) {
|
||||
StoreBufferOverflowStub stub1(kDontSaveFPRegs);
|
||||
stub1.GetCode()->set_is_pregenerated(true);
|
||||
stub1.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
|
||||
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
|
||||
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
|
||||
!entry->object.is(no_reg);
|
||||
entry++) {
|
||||
@ -7714,7 +7715,7 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
entry->address,
|
||||
entry->action,
|
||||
kDontSaveFPRegs);
|
||||
stub.GetCode()->set_is_pregenerated(true);
|
||||
stub.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -7991,7 +7992,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
ASSERT(!Serializer::enabled());
|
||||
bool save_fp_regs = CpuFeatures::IsSupported(FPU);
|
||||
CEntryStub ces(1, save_fp_regs ? kSaveFPRegs : kDontSaveFPRegs);
|
||||
__ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
int parameter_count_offset =
|
||||
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
|
||||
__ lw(a1, MemOperand(fp, parameter_count_offset));
|
||||
|
@ -67,7 +67,7 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
@ -313,7 +313,7 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
|
||||
}
|
||||
|
||||
bool IsPregenerated();
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
|
||||
private:
|
||||
Register the_int_;
|
||||
@ -390,7 +390,7 @@ class RecordWriteStub: public PlatformCodeStub {
|
||||
};
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
static void PatchBranchIntoNop(MacroAssembler* masm, int pos) {
|
||||
|
@ -1046,7 +1046,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
||||
|
||||
// Record position before stub call for type feedback.
|
||||
SetSourcePosition(clause->position());
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
|
||||
CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
|
||||
patch_site.EmitPatchInfo();
|
||||
|
||||
@ -1965,7 +1965,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
||||
|
||||
__ bind(&stub_call);
|
||||
BinaryOpStub stub(op, mode);
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->BinaryOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ jmp(&done);
|
||||
@ -2049,7 +2049,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
||||
__ pop(a1);
|
||||
BinaryOpStub stub(op, mode);
|
||||
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->BinaryOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
context()->Plug(v0);
|
||||
@ -2557,7 +2557,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
|
||||
__ li(a2, Operand(cell));
|
||||
|
||||
CallConstructStub stub(RECORD_CALL_TARGET);
|
||||
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
|
||||
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
|
||||
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
|
||||
context()->Plug(v0);
|
||||
}
|
||||
@ -4043,7 +4043,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
|
||||
VisitForAccumulatorValue(expr->expression());
|
||||
SetSourcePosition(expr->position());
|
||||
__ mov(a0, result_register());
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->UnaryOperationFeedbackId());
|
||||
context()->Plug(v0);
|
||||
}
|
||||
@ -4156,7 +4156,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
||||
SetSourcePosition(expr->position());
|
||||
|
||||
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
|
||||
CallIC(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
expr->CountBinOpFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ bind(&done);
|
||||
|
||||
@ -4388,7 +4390,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
||||
}
|
||||
// Record position and call the compare IC.
|
||||
SetSourcePosition(expr->position());
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
|
||||
|
@ -1052,39 +1052,39 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
|
||||
switch (instr->hydrogen()->major_key()) {
|
||||
case CodeStub::RegExpConstructResult: {
|
||||
RegExpConstructResultStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::RegExpExec: {
|
||||
RegExpExecStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::SubString: {
|
||||
SubStringStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::NumberToString: {
|
||||
NumberToStringStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::StringAdd: {
|
||||
StringAddStub stub(NO_STRING_ADD_FLAGS);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::StringCompare: {
|
||||
StringCompareStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::TranscendentalCache: {
|
||||
__ lw(a0, MemOperand(sp, 0));
|
||||
TranscendentalCacheStub stub(instr->transcendental_type(),
|
||||
TranscendentalCacheStub::TAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
@ -1747,7 +1747,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(v0));
|
||||
|
||||
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
// Other arch use a nop here, to signal that there is no inlined
|
||||
// patchable code. Mips does not need the nop, since our marker
|
||||
// instruction (andi zero_reg) will never be used in normal code.
|
||||
@ -2185,7 +2185,7 @@ void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
|
||||
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
||||
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
||||
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
|
||||
Condition condition = ComputeCompareCondition(op);
|
||||
@ -2363,7 +2363,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
|
||||
ASSERT(result.is(v0));
|
||||
|
||||
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
|
||||
__ Branch(&true_label, eq, result, Operand(zero_reg));
|
||||
__ li(result, Operand(factory()->false_value()));
|
||||
@ -2483,7 +2483,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
||||
__ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE);
|
||||
__ StoreToSafepointRegisterSlot(temp, temp);
|
||||
}
|
||||
CallCodeGeneric(stub.GetCode(),
|
||||
CallCodeGeneric(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr,
|
||||
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
|
||||
@ -2498,7 +2498,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
||||
void LCodeGen::DoCmpT(LCmpT* instr) {
|
||||
Token::Value op = instr->op();
|
||||
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
// On MIPS there is no need for a "no inlined smi code" marker (nop).
|
||||
|
||||
@ -3796,7 +3796,7 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(f4));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::LOG,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3804,7 +3804,7 @@ void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(f4));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::TAN,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3812,7 +3812,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(f4));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::COS,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3820,7 +3820,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(f4));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::SIN,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3912,7 +3912,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
}
|
||||
|
||||
@ -3946,7 +3946,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
|
||||
|
||||
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
|
||||
__ li(a0, Operand(instr->arity()));
|
||||
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4319,7 +4319,7 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
|
||||
__ push(ToRegister(instr->left()));
|
||||
__ push(ToRegister(instr->right()));
|
||||
StringAddStub stub(NO_STRING_CHECK_IN_STUB);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -5357,7 +5357,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
||||
FastCloneShallowArrayStub::Mode mode =
|
||||
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
|
||||
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
} else if (instr->hydrogen()->depth() > 1) {
|
||||
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
|
||||
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
|
||||
@ -5368,7 +5368,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
||||
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
|
||||
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
|
||||
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5562,7 +5562,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
|
||||
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
|
||||
} else {
|
||||
FastCloneShallowObjectStub stub(properties_count);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5636,7 +5636,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
|
||||
FastNewClosureStub stub(shared_info->language_mode());
|
||||
__ li(a1, Operand(shared_info));
|
||||
__ push(a1);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
} else {
|
||||
__ li(a2, Operand(shared_info));
|
||||
__ li(a1, Operand(pretenure
|
||||
@ -5909,7 +5909,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
__ LoadRoot(at, Heap::kStackLimitRootIndex);
|
||||
__ Branch(&done, hs, sp, Operand(at));
|
||||
StackCheckStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
EnsureSpaceForLazyDeopt();
|
||||
__ bind(&done);
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
|
@ -2755,7 +2755,7 @@ void MacroAssembler::DebugBreak() {
|
||||
PrepareCEntryFunction(ExternalReference(Runtime::kDebugBreak, isolate()));
|
||||
CEntryStub ces(1);
|
||||
ASSERT(AllowThisStubCall(&ces));
|
||||
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
|
||||
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
|
||||
}
|
||||
|
||||
#endif // ENABLE_DEBUGGER_SUPPORT
|
||||
@ -3941,14 +3941,14 @@ void MacroAssembler::CallStub(CodeStub* stub,
|
||||
const Operand& r2,
|
||||
BranchDelaySlot bd) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
|
||||
Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id,
|
||||
Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id,
|
||||
cond, r1, r2, bd);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::TailCallStub(CodeStub* stub) {
|
||||
ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
|
||||
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
|
||||
Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
@ -4299,7 +4299,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
|
||||
BranchDelaySlot bd) {
|
||||
PrepareCEntryFunction(builtin);
|
||||
CEntryStub stub(1);
|
||||
Jump(stub.GetCode(),
|
||||
Jump(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
al,
|
||||
zero_reg,
|
||||
|
@ -3079,11 +3079,11 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
|
||||
receiver_map->has_external_array_elements()) {
|
||||
Handle<Code> stub = KeyedLoadFastElementStub(
|
||||
receiver_map->instance_type() == JS_ARRAY_TYPE,
|
||||
elements_kind).GetCode();
|
||||
elements_kind).GetCode(isolate());
|
||||
__ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK);
|
||||
} else {
|
||||
Handle<Code> stub =
|
||||
KeyedLoadDictionaryElementStub().GetCode();
|
||||
KeyedLoadDictionaryElementStub().GetCode(isolate());
|
||||
__ DispatchMap(a1, a2, receiver_map, stub, DO_SMI_CHECK);
|
||||
}
|
||||
|
||||
@ -3175,7 +3175,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
|
||||
ElementsKind elements_kind = receiver_map->elements_kind();
|
||||
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
|
||||
Handle<Code> stub =
|
||||
KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
|
||||
KeyedStoreElementStub(is_js_array,
|
||||
elements_kind,
|
||||
grow_mode_).GetCode(isolate());
|
||||
|
||||
__ DispatchMap(a2, a3, receiver_map, stub, DO_SMI_CHECK);
|
||||
|
||||
|
@ -8051,7 +8051,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
|
||||
PrintF("]\n");
|
||||
}
|
||||
InterruptStub interrupt_stub;
|
||||
Handle<Code> check_code = interrupt_stub.GetCode();
|
||||
Handle<Code> check_code = interrupt_stub.GetCode(isolate);
|
||||
Handle<Code> replacement_code = isolate->builtins()->OnStackReplacement();
|
||||
Deoptimizer::RevertStackCheckCode(*unoptimized,
|
||||
*check_code,
|
||||
|
@ -1517,10 +1517,11 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElementPolymorphic(
|
||||
if (IsFastElementsKind(elements_kind) ||
|
||||
IsExternalArrayElementsKind(elements_kind)) {
|
||||
cached_stub =
|
||||
KeyedLoadFastElementStub(is_js_array, elements_kind).GetCode();
|
||||
KeyedLoadFastElementStub(is_js_array,
|
||||
elements_kind).GetCode(isolate());
|
||||
} else {
|
||||
ASSERT(elements_kind == DICTIONARY_ELEMENTS);
|
||||
cached_stub = KeyedLoadDictionaryElementStub().GetCode();
|
||||
cached_stub = KeyedLoadDictionaryElementStub().GetCode(isolate());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1584,12 +1585,12 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElementPolymorphic(
|
||||
transitioned_map->elements_kind(),
|
||||
is_js_array,
|
||||
strict_mode_,
|
||||
grow_mode_).GetCode();
|
||||
grow_mode_).GetCode(isolate());
|
||||
} else {
|
||||
cached_stub = KeyedStoreElementStub(
|
||||
is_js_array,
|
||||
elements_kind,
|
||||
grow_mode_).GetCode();
|
||||
grow_mode_).GetCode(isolate());
|
||||
}
|
||||
ASSERT(!cached_stub.is_null());
|
||||
handler_ics.Add(cached_stub);
|
||||
|
@ -4038,23 +4038,23 @@ bool CEntryStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void CodeStub::GenerateStubsAheadOfTime() {
|
||||
CEntryStub::GenerateAheadOfTime();
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
|
||||
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
|
||||
CEntryStub::GenerateAheadOfTime(isolate);
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
// It is important that the store buffer overflow stubs are generated first.
|
||||
RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
|
||||
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
|
||||
}
|
||||
|
||||
|
||||
void CodeStub::GenerateFPStubs() {
|
||||
void CodeStub::GenerateFPStubs(Isolate* isolate) {
|
||||
}
|
||||
|
||||
|
||||
void CEntryStub::GenerateAheadOfTime() {
|
||||
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
|
||||
CEntryStub stub(1, kDontSaveFPRegs);
|
||||
stub.GetCode()->set_is_pregenerated(true);
|
||||
stub.GetCode(isolate)->set_is_pregenerated(true);
|
||||
CEntryStub save_doubles(1, kSaveFPRegs);
|
||||
save_doubles.GetCode()->set_is_pregenerated(true);
|
||||
save_doubles.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
|
||||
@ -5808,7 +5808,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
|
||||
__ bind(&generic_stub);
|
||||
ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
|
||||
CompareIC::GENERIC);
|
||||
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
|
||||
__ bind(&maybe_undefined1);
|
||||
if (Token::IsOrderedRelationalCompareOp(op_)) {
|
||||
@ -6299,15 +6299,16 @@ bool RecordWriteStub::IsPregenerated() {
|
||||
}
|
||||
|
||||
|
||||
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
|
||||
Isolate* isolate) {
|
||||
StoreBufferOverflowStub stub1(kDontSaveFPRegs);
|
||||
stub1.GetCode()->set_is_pregenerated(true);
|
||||
stub1.GetCode(isolate)->set_is_pregenerated(true);
|
||||
StoreBufferOverflowStub stub2(kSaveFPRegs);
|
||||
stub2.GetCode()->set_is_pregenerated(true);
|
||||
stub2.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
|
||||
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
|
||||
for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
|
||||
!entry->object.is(no_reg);
|
||||
entry++) {
|
||||
@ -6316,7 +6317,7 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
||||
entry->address,
|
||||
entry->action,
|
||||
kDontSaveFPRegs);
|
||||
stub.GetCode()->set_is_pregenerated(true);
|
||||
stub.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -6615,7 +6616,7 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
|
||||
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
ASSERT(!Serializer::enabled());
|
||||
CEntryStub ces(1, kSaveFPRegs);
|
||||
__ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
int parameter_count_offset =
|
||||
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
|
||||
__ movq(rbx, MemOperand(rbp, parameter_count_offset));
|
||||
|
@ -68,7 +68,7 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
virtual bool IsPregenerated() { return true; }
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
private:
|
||||
@ -402,7 +402,7 @@ class RecordWriteStub: public PlatformCodeStub {
|
||||
};
|
||||
|
||||
virtual bool IsPregenerated();
|
||||
static void GenerateFixedRegStubsAheadOfTime();
|
||||
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
|
||||
virtual bool SometimesSetsUpAFrame() { return false; }
|
||||
|
||||
static const byte kTwoByteNopInstruction = 0x3c; // Cmpb al, #imm8.
|
||||
|
@ -1007,7 +1007,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
|
||||
|
||||
// Record position before stub call for type feedback.
|
||||
SetSourcePosition(clause->position());
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
|
||||
CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
|
||||
patch_site.EmitPatchInfo();
|
||||
|
||||
@ -1926,7 +1926,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
|
||||
__ bind(&stub_call);
|
||||
__ movq(rax, rcx);
|
||||
BinaryOpStub stub(op, mode);
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->BinaryOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ jmp(&done, Label::kNear);
|
||||
@ -1976,7 +1976,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
|
||||
__ pop(rdx);
|
||||
BinaryOpStub stub(op, mode);
|
||||
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->BinaryOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
context()->Plug(rax);
|
||||
@ -2457,7 +2457,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
|
||||
__ Move(rbx, cell);
|
||||
|
||||
CallConstructStub stub(RECORD_CALL_TARGET);
|
||||
__ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
|
||||
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
|
||||
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
|
||||
context()->Plug(rax);
|
||||
}
|
||||
@ -3990,7 +3990,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
|
||||
// accumulator register rax.
|
||||
VisitForAccumulatorValue(expr->expression());
|
||||
SetSourcePosition(expr->position());
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET,
|
||||
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
|
||||
expr->UnaryOperationFeedbackId());
|
||||
context()->Plug(rax);
|
||||
}
|
||||
@ -4108,7 +4108,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
|
||||
__ movq(rdx, rax);
|
||||
__ Move(rax, Smi::FromInt(1));
|
||||
BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
|
||||
CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountBinOpFeedbackId());
|
||||
CallIC(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
expr->CountBinOpFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
__ bind(&done);
|
||||
|
||||
@ -4343,7 +4345,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
|
||||
|
||||
// Record position and call the compare IC.
|
||||
SetSourcePosition(expr->position());
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId());
|
||||
patch_site.EmitPatchInfo();
|
||||
|
||||
|
@ -954,38 +954,38 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
|
||||
switch (instr->hydrogen()->major_key()) {
|
||||
case CodeStub::RegExpConstructResult: {
|
||||
RegExpConstructResultStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::RegExpExec: {
|
||||
RegExpExecStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::SubString: {
|
||||
SubStringStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::NumberToString: {
|
||||
NumberToStringStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::StringAdd: {
|
||||
StringAddStub stub(NO_STRING_ADD_FLAGS);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::StringCompare: {
|
||||
StringCompareStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
case CodeStub::TranscendentalCache: {
|
||||
TranscendentalCacheStub stub(instr->transcendental_type(),
|
||||
TranscendentalCacheStub::TAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
@ -1809,7 +1809,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(rax));
|
||||
|
||||
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
__ nop(); // Signals no inlined code.
|
||||
}
|
||||
|
||||
@ -2203,7 +2203,7 @@ void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
|
||||
int true_block = chunk_->LookupDestination(instr->true_block_id());
|
||||
int false_block = chunk_->LookupDestination(instr->false_block_id());
|
||||
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
|
||||
Condition condition = TokenToCondition(op, false);
|
||||
@ -2373,7 +2373,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
|
||||
InstanceofStub stub(InstanceofStub::kNoFlags);
|
||||
__ push(ToRegister(instr->left()));
|
||||
__ push(ToRegister(instr->right()));
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
Label true_value, done;
|
||||
__ testq(rax, rax);
|
||||
__ j(zero, &true_value, Label::kNear);
|
||||
@ -2472,7 +2472,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
||||
// safepoint with two arguments because stub is going to
|
||||
// remove the third argument from the stack before jumping
|
||||
// to instanceof builtin on the slow path.
|
||||
CallCodeGeneric(stub.GetCode(),
|
||||
CallCodeGeneric(stub.GetCode(isolate()),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr,
|
||||
RECORD_SAFEPOINT_WITH_REGISTERS,
|
||||
@ -2507,7 +2507,7 @@ void LCodeGen::DoInstanceSize(LInstanceSize* instr) {
|
||||
void LCodeGen::DoCmpT(LCmpT* instr) {
|
||||
Token::Value op = instr->op();
|
||||
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(op);
|
||||
Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
|
||||
Condition condition = TokenToCondition(op, false);
|
||||
@ -3741,7 +3741,7 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::LOG,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3749,7 +3749,7 @@ void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::TAN,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3757,7 +3757,7 @@ void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::COS,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3765,7 +3765,7 @@ void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
|
||||
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
|
||||
TranscendentalCacheStub stub(TranscendentalCache::SIN,
|
||||
TranscendentalCacheStub::UNTAGGED);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -3857,7 +3857,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
||||
}
|
||||
|
||||
@ -3890,7 +3890,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
|
||||
|
||||
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
|
||||
__ Set(rax, instr->arity());
|
||||
CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -4258,7 +4258,7 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
|
||||
EmitPushTaggedOperand(instr->left());
|
||||
EmitPushTaggedOperand(instr->right());
|
||||
StringAddStub stub(NO_STRING_CHECK_IN_STUB);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
|
||||
|
||||
@ -5133,7 +5133,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
||||
FastCloneShallowArrayStub::Mode mode =
|
||||
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
|
||||
FastCloneShallowArrayStub stub(mode, DONT_TRACK_ALLOCATION_SITE, length);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
} else if (instr->hydrogen()->depth() > 1) {
|
||||
CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
|
||||
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
|
||||
@ -5144,7 +5144,7 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
|
||||
? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
|
||||
: FastCloneShallowArrayStub::CLONE_ELEMENTS;
|
||||
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5340,7 +5340,7 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
|
||||
__ Move(rcx, constant_properties);
|
||||
__ Move(rdx, Smi::FromInt(flags));
|
||||
FastCloneShallowObjectStub stub(properties_count);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5410,7 +5410,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
|
||||
if (!pretenure && shared_info->num_literals() == 0) {
|
||||
FastNewClosureStub stub(shared_info->language_mode());
|
||||
__ Push(shared_info);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
} else {
|
||||
__ push(rsi);
|
||||
__ Push(shared_info);
|
||||
@ -5656,7 +5656,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
|
||||
__ j(above_equal, &done, Label::kNear);
|
||||
StackCheckStub stub;
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
last_lazy_deopt_pc_ = masm()->pc_offset();
|
||||
__ bind(&done);
|
||||
|
@ -546,13 +546,13 @@ void MacroAssembler::Abort(const char* msg) {
|
||||
|
||||
void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
|
||||
ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
|
||||
Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
|
||||
Call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::TailCallStub(CodeStub* stub) {
|
||||
ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
|
||||
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
|
||||
Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
@ -834,7 +834,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
|
||||
// Set the entry point and jump to the C entry runtime stub.
|
||||
LoadAddress(rbx, ext);
|
||||
CEntryStub ces(result_size);
|
||||
jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
|
||||
jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
@ -3139,7 +3139,7 @@ void MacroAssembler::DebugBreak() {
|
||||
LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
|
||||
CEntryStub ces(1);
|
||||
ASSERT(AllowThisStubCall(&ces));
|
||||
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
|
||||
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
|
||||
}
|
||||
#endif // ENABLE_DEBUGGER_SUPPORT
|
||||
|
||||
|
@ -2742,7 +2742,9 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
|
||||
ElementsKind elements_kind = receiver_map->elements_kind();
|
||||
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
|
||||
Handle<Code> stub =
|
||||
KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
|
||||
KeyedStoreElementStub(is_js_array,
|
||||
elements_kind,
|
||||
grow_mode_).GetCode(isolate());
|
||||
|
||||
__ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
|
||||
|
||||
@ -2981,11 +2983,11 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
|
||||
receiver_map->has_external_array_elements()) {
|
||||
Handle<Code> stub = KeyedLoadFastElementStub(
|
||||
receiver_map->instance_type() == JS_ARRAY_TYPE,
|
||||
elements_kind).GetCode();
|
||||
elements_kind).GetCode(isolate());
|
||||
__ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
|
||||
} else {
|
||||
Handle<Code> stub =
|
||||
KeyedLoadDictionaryElementStub().GetCode();
|
||||
KeyedLoadDictionaryElementStub().GetCode(isolate());
|
||||
__ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user