From 9f5c1bfba188cbb3b92bf514acb22f493f1339d9 Mon Sep 17 00:00:00 2001 From: "lrn@chromium.org" Date: Fri, 14 Jan 2011 13:16:48 +0000 Subject: [PATCH] X64 Crnakshaft: Added GeneratePrologue implementation. Review URL: http://codereview.chromium.org/6326003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6324 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/ia32/lithium-codegen-ia32.cc | 11 ++++++++ src/lithium-allocator.h | 2 +- src/v8globals.h | 3 +- src/x64/lithium-codegen-x64.cc | 48 ++++++++++++++++++++++++++++++-- src/x64/lithium-x64.cc | 16 +++++++---- 5 files changed, 71 insertions(+), 9 deletions(-) diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index 85efcc9907..7f9b5bd2cd 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -290,6 +290,17 @@ bool LCodeGen::GeneratePrologue() { __ j(not_zero, &loop); } else { __ sub(Operand(esp), Immediate(slots * kPointerSize)); +#ifdef _MSC_VER + // On windows, you may not access the stack more than one page below + // the most recently mapped page. To make the allocated area randomly + // accessible, we write to each page in turn (the value is irrelevant). + const int kPageSize = 4 * KB; + for (int offset = slots * kPointerSize - kPageSize; + offset > 0; + offset -= kPageSize) { + __ mov(Operand(esp, offset), eax); + } +#endif } } diff --git a/src/lithium-allocator.h b/src/lithium-allocator.h index 3cb28a7be1..dfe1953df6 100644 --- a/src/lithium-allocator.h +++ b/src/lithium-allocator.h @@ -705,6 +705,7 @@ class LiveRange: public ZoneObject { bool HasAllocatedSpillOperand() const { return spill_operand_ != NULL && !spill_operand_->IsUnallocated(); } + LOperand* GetSpillOperand() const { return spill_operand_; } void SetSpillOperand(LOperand* operand) { ASSERT(!operand->IsUnallocated()); @@ -722,7 +723,6 @@ class LiveRange: public ZoneObject { bool Covers(LifetimePosition position); LifetimePosition FirstIntersection(LiveRange* other); - // Add a new interval or a new use position to this live range. void EnsureInterval(LifetimePosition start, LifetimePosition end); void AddUseInterval(LifetimePosition start, LifetimePosition end); diff --git a/src/v8globals.h b/src/v8globals.h index 65bbf6ab24..3f27114bec 100644 --- a/src/v8globals.h +++ b/src/v8globals.h @@ -77,7 +77,8 @@ const Address kHandleZapValue = reinterpret_cast
(V8_UINT64_C(0x1baddead0baddead)); const Address kFromSpaceZapValue = reinterpret_cast
(V8_UINT64_C(0x1beefdad0beefdad)); -const uint64_t kDebugZapValue = 0xbadbaddbbadbaddb; +const uint64_t kDebugZapValue = V8_UINT64_C(0xbadbaddbbadbaddb); +const uint64_t kSlotsZapValue = V8_UINT64_C(0xbeefdeadbeefdeed); #else const Address kZapValue = reinterpret_cast
(0xdeadbeed); const Address kHandleZapValue = reinterpret_cast
(0xbaddead); diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index ebc807d3f5..5763d4bbdb 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -242,8 +242,52 @@ void LCodeGen::Comment(const char* format, ...) { bool LCodeGen::GeneratePrologue() { - Abort("Unimplemented: %s", "GeneratePrologue"); - return false; + ASSERT(is_generating()); + +#ifdef DEBUG + if (strlen(FLAG_stop_at) > 0 && + info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { + __ int3(); + } +#endif + + __ push(rbp); // Caller's frame pointer. + __ movq(rbp, rsp); + __ push(rsi); // Callee's context. + __ push(rdi); // Callee's JS function. + + // Reserve space for the stack slots needed by the code. + int slots = StackSlotCount(); + if (slots > 0) { + if (FLAG_debug_code) { + __ movl(rax, Immediate(slots)); + __ movq(kScratchRegister, kSlotsZapValue, RelocInfo::NONE); + Label loop; + __ bind(&loop); + __ push(kScratchRegister); + __ decl(rax); + __ j(not_zero, &loop); + } else { + __ subq(rsp, Immediate(slots * kPointerSize)); +#ifdef _MSC_VER + // On windows, you may not access the stack more than one page below + // the most recently mapped page. To make the allocated area randomly + // accessible, we write to each page in turn (the value is irrelevant). + const int kPageSize = 4 * KB; + for (int offset = slots * kPointerSize - kPageSize; + offset > 0; + offset -= kPageSize) { + __ moveq(Operand(rsp, offset), rax); + } +#endif + } + } + + // Trace the call. + if (FLAG_trace) { + __ CallRuntime(Runtime::kTraceEnter, 0); + } + return !is_aborted(); } diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc index 4a7b3aa3ae..f13e690873 100644 --- a/src/x64/lithium-x64.cc +++ b/src/x64/lithium-x64.cc @@ -305,15 +305,20 @@ void LAccessArgumentsAt::PrintDataTo(StringStream* stream) { int LChunk::GetNextSpillIndex(bool is_double) { - // Need to consider what index means: Is it 32 bit or 64 bit index? - UNIMPLEMENTED(); - return 0; + return spill_slot_count_++; } LOperand* LChunk::GetNextSpillSlot(bool is_double) { - UNIMPLEMENTED(); - return NULL; + // All stack slots are Double stack slots on x64. + // Alternatively, at some point, start using half-size + // stack slots for int32 values. + int index = GetNextSpillIndex(is_double); + if (is_double) { + return LDoubleStackSlot::Create(index); + } else { + return LStackSlot::Create(index); + } } @@ -737,6 +742,7 @@ LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op, return NULL; } + void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) { ASSERT(is_building()); current_block_ = block;