MIPS: Drop IsPregenerated() and allow_stub_calls flag.

Port r18167 (1e4b11e0)

Original commit message:
This also removes the fixed register hack that was required to support RecordWriteStub in the snapshot.

BUG=
R=plind44@gmail.com

Review URL: https://codereview.chromium.org/97373002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18169 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
palfia@homejinni.com 2013-11-30 01:32:07 +00:00
parent dc32799d10
commit 3733905afa
4 changed files with 1 additions and 117 deletions

View File

@ -714,27 +714,6 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
}
bool WriteInt32ToHeapNumberStub::IsPregenerated(Isolate* isolate) {
// These variants are compiled ahead of time. See next method.
if (the_int_.is(a1) &&
the_heap_number_.is(v0) &&
scratch_.is(a2) &&
sign_.is(a3)) {
return true;
}
if (the_int_.is(a2) &&
the_heap_number_.is(v0) &&
scratch_.is(a3) &&
sign_.is(a0)) {
return true;
}
// Other register combinations are generated as and when they are needed,
// so it is unsafe to call them from stubs (we can't generate a stub while
// we are generating a stub).
return false;
}
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
@ -1749,18 +1728,11 @@ bool CEntryStub::NeedsImmovableCode() {
}
bool CEntryStub::IsPregenerated(Isolate* isolate) {
return (!save_doubles_ || isolate->fp_stubs_generated()) &&
result_size_ == 1;
}
void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
CEntryStub::GenerateAheadOfTime(isolate);
WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
BinaryOpStub::GenerateAheadOfTime(isolate);
@ -5506,68 +5478,6 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
}
struct AheadOfTimeWriteBarrierStubList {
Register object, value, address;
RememberedSetAction action;
};
#define REG(Name) { kRegister_ ## Name ## _Code }
static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// Used in RegExpExecStub.
{ REG(s2), REG(s0), REG(t3), EMIT_REMEMBERED_SET },
// Used in CompileArrayPushCall.
// Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
// Also used in KeyedStoreIC::GenerateGeneric.
{ REG(a3), REG(t0), REG(t1), EMIT_REMEMBERED_SET },
// Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
{ REG(a1), REG(a2), REG(a3), EMIT_REMEMBERED_SET },
{ REG(a3), REG(a2), REG(a1), EMIT_REMEMBERED_SET },
// Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
{ REG(a2), REG(a1), REG(a3), EMIT_REMEMBERED_SET },
{ REG(a3), REG(a1), REG(a2), EMIT_REMEMBERED_SET },
// KeyedStoreStubCompiler::GenerateStoreFastElement.
{ REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
{ REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
// ElementsTransitionGenerator::GenerateMapChangeElementTransition
// and ElementsTransitionGenerator::GenerateSmiToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
{ REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
// ElementsTransitionGenerator::GenerateDoubleToObject
{ REG(t2), REG(a2), REG(a0), EMIT_REMEMBERED_SET },
{ REG(a2), REG(t2), REG(t5), EMIT_REMEMBERED_SET },
// StoreArrayLiteralElementStub::Generate
{ REG(t1), REG(a0), REG(t2), EMIT_REMEMBERED_SET },
// FastNewClosureStub::Generate
{ REG(a2), REG(t0), REG(a1), EMIT_REMEMBERED_SET },
// StringAddStub::Generate
{ REG(t3), REG(a1), REG(t0), EMIT_REMEMBERED_SET },
{ REG(t3), REG(a0), REG(t0), EMIT_REMEMBERED_SET },
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
#undef REG
bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
if (object_.is(entry->object) &&
value_.is(entry->value) &&
address_.is(entry->address) &&
remembered_set_action_ == entry->action &&
save_fp_regs_mode_ == kDontSaveFPRegs) {
return true;
}
}
return false;
}
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
StoreBufferOverflowStub stub1(kDontSaveFPRegs);
@ -5578,20 +5488,6 @@ void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
}
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
RecordWriteStub stub(entry->object,
entry->value,
entry->address,
entry->action,
kDontSaveFPRegs);
stub.GetCode(isolate)->set_is_pregenerated(true);
}
}
bool CodeStub::CanUseFPRegisters() {
return true; // FPU is a base requirement for V8.
}
@ -5898,7 +5794,6 @@ void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
AllowStubCallsScope allow_stub_calls(masm, true);
ProfileEntryHookStub stub;
__ push(ra);
__ CallStub(&stub);

View File

@ -69,7 +69,6 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
void Generate(MacroAssembler* masm);
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }
@ -240,7 +239,6 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
ASSERT(SignRegisterBits::is_valid(sign_.code()));
}
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
private:
@ -291,8 +289,6 @@ class RecordWriteStub: public PlatformCodeStub {
INCREMENTAL_COMPACTION
};
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }
static void PatchBranchIntoNop(MacroAssembler* masm, int pos) {

View File

@ -44,7 +44,6 @@ namespace internal {
MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
: Assembler(arg_isolate, buffer, size),
generating_stub_(false),
allow_stub_calls_(true),
has_frame_(false) {
if (isolate() != NULL) {
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
@ -3886,8 +3885,6 @@ void MacroAssembler::CallStub(CodeStub* stub,
void MacroAssembler::TailCallStub(CodeStub* stub) {
ASSERT(allow_stub_calls_ ||
stub->CompilingCallsToThisStubIsGCSafe(isolate()));
Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
}
@ -4027,8 +4024,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
return has_frame_ || !stub->SometimesSetsUpAFrame();
}

View File

@ -1348,8 +1348,6 @@ class MacroAssembler: public Assembler {
// Verify restrictions about code generated in stubs.
void set_generating_stub(bool value) { generating_stub_ = value; }
bool generating_stub() { return generating_stub_; }
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
bool allow_stub_calls() { return allow_stub_calls_; }
void set_has_frame(bool value) { has_frame_ = value; }
bool has_frame() { return has_frame_; }
inline bool AllowThisStubCall(CodeStub* stub);
@ -1646,7 +1644,6 @@ class MacroAssembler: public Assembler {
MemOperand SafepointRegistersAndDoublesSlot(Register reg);
bool generating_stub_;
bool allow_stub_calls_;
bool has_frame_;
// This handle will be patched with the code object on installation.
Handle<Object> code_object_;