X64 Crnakshaft: Added GeneratePrologue implementation.

Review URL: http://codereview.chromium.org/6326003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6324 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
lrn@chromium.org 2011-01-14 13:16:48 +00:00
parent ea7dde421e
commit 9f5c1bfba1
5 changed files with 71 additions and 9 deletions

View File

@ -290,6 +290,17 @@ bool LCodeGen::GeneratePrologue() {
__ j(not_zero, &loop); __ j(not_zero, &loop);
} else { } else {
__ sub(Operand(esp), Immediate(slots * kPointerSize)); __ sub(Operand(esp), Immediate(slots * kPointerSize));
#ifdef _MSC_VER
// On windows, you may not access the stack more than one page below
// the most recently mapped page. To make the allocated area randomly
// accessible, we write to each page in turn (the value is irrelevant).
const int kPageSize = 4 * KB;
for (int offset = slots * kPointerSize - kPageSize;
offset > 0;
offset -= kPageSize) {
__ mov(Operand(esp, offset), eax);
}
#endif
} }
} }

View File

@ -705,6 +705,7 @@ class LiveRange: public ZoneObject {
bool HasAllocatedSpillOperand() const { bool HasAllocatedSpillOperand() const {
return spill_operand_ != NULL && !spill_operand_->IsUnallocated(); return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
} }
LOperand* GetSpillOperand() const { return spill_operand_; } LOperand* GetSpillOperand() const { return spill_operand_; }
void SetSpillOperand(LOperand* operand) { void SetSpillOperand(LOperand* operand) {
ASSERT(!operand->IsUnallocated()); ASSERT(!operand->IsUnallocated());
@ -722,7 +723,6 @@ class LiveRange: public ZoneObject {
bool Covers(LifetimePosition position); bool Covers(LifetimePosition position);
LifetimePosition FirstIntersection(LiveRange* other); LifetimePosition FirstIntersection(LiveRange* other);
// Add a new interval or a new use position to this live range. // Add a new interval or a new use position to this live range.
void EnsureInterval(LifetimePosition start, LifetimePosition end); void EnsureInterval(LifetimePosition start, LifetimePosition end);
void AddUseInterval(LifetimePosition start, LifetimePosition end); void AddUseInterval(LifetimePosition start, LifetimePosition end);

View File

@ -77,7 +77,8 @@ const Address kHandleZapValue =
reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddead)); reinterpret_cast<Address>(V8_UINT64_C(0x1baddead0baddead));
const Address kFromSpaceZapValue = const Address kFromSpaceZapValue =
reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdad)); reinterpret_cast<Address>(V8_UINT64_C(0x1beefdad0beefdad));
const uint64_t kDebugZapValue = 0xbadbaddbbadbaddb; const uint64_t kDebugZapValue = V8_UINT64_C(0xbadbaddbbadbaddb);
const uint64_t kSlotsZapValue = V8_UINT64_C(0xbeefdeadbeefdeed);
#else #else
const Address kZapValue = reinterpret_cast<Address>(0xdeadbeed); const Address kZapValue = reinterpret_cast<Address>(0xdeadbeed);
const Address kHandleZapValue = reinterpret_cast<Address>(0xbaddead); const Address kHandleZapValue = reinterpret_cast<Address>(0xbaddead);

View File

@ -242,8 +242,52 @@ void LCodeGen::Comment(const char* format, ...) {
bool LCodeGen::GeneratePrologue() { bool LCodeGen::GeneratePrologue() {
Abort("Unimplemented: %s", "GeneratePrologue"); ASSERT(is_generating());
return false;
#ifdef DEBUG
if (strlen(FLAG_stop_at) > 0 &&
info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
__ int3();
}
#endif
__ push(rbp); // Caller's frame pointer.
__ movq(rbp, rsp);
__ push(rsi); // Callee's context.
__ push(rdi); // Callee's JS function.
// Reserve space for the stack slots needed by the code.
int slots = StackSlotCount();
if (slots > 0) {
if (FLAG_debug_code) {
__ movl(rax, Immediate(slots));
__ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE);
Label loop;
__ bind(&loop);
__ push(kScratchRegister);
__ decl(rax);
__ j(not_zero, &loop);
} else {
__ subq(rsp, Immediate(slots * kPointerSize));
#ifdef _MSC_VER
// On windows, you may not access the stack more than one page below
// the most recently mapped page. To make the allocated area randomly
// accessible, we write to each page in turn (the value is irrelevant).
const int kPageSize = 4 * KB;
for (int offset = slots * kPointerSize - kPageSize;
offset > 0;
offset -= kPageSize) {
__ moveq(Operand(rsp, offset), rax);
}
#endif
}
}
// Trace the call.
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
return !is_aborted();
} }

View File

@ -305,15 +305,20 @@ void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
int LChunk::GetNextSpillIndex(bool is_double) { int LChunk::GetNextSpillIndex(bool is_double) {
// Need to consider what index means: Is it 32 bit or 64 bit index? return spill_slot_count_++;
UNIMPLEMENTED();
return 0;
} }
LOperand* LChunk::GetNextSpillSlot(bool is_double) { LOperand* LChunk::GetNextSpillSlot(bool is_double) {
UNIMPLEMENTED(); // All stack slots are Double stack slots on x64.
return NULL; // Alternatively, at some point, start using half-size
// stack slots for int32 values.
int index = GetNextSpillIndex(is_double);
if (is_double) {
return LDoubleStackSlot::Create(index);
} else {
return LStackSlot::Create(index);
}
} }
@ -737,6 +742,7 @@ LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
return NULL; return NULL;
} }
void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) { void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
ASSERT(is_building()); ASSERT(is_building());
current_block_ = block; current_block_ = block;