- Make VirtualFrame and JumpTarget zone allocated.
- Compacted the VirtualFrame representation. -> Improved the compiler speed with 10% Review URL: http://codereview.chromium.org/115345 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@1947 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
f48a6bf30f
commit
c70ec7a3d2
@ -102,7 +102,7 @@ void VirtualFrame::MergeTo(VirtualFrame* expected) {
|
||||
// Fix any sync bit problems from the bottom-up, stopping when we
|
||||
// hit the stack pointer or the top of the frame if the stack
|
||||
// pointer is floating above the frame.
|
||||
int limit = Min(stack_pointer_, elements_.length() - 1);
|
||||
int limit = Min(static_cast<int>(stack_pointer_), elements_.length() - 1);
|
||||
for (int i = 0; i <= limit; i++) {
|
||||
FrameElement source = elements_[i];
|
||||
FrameElement target = expected->elements_[i];
|
||||
@ -134,7 +134,7 @@ void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
|
||||
// On ARM, all elements are in memory.
|
||||
|
||||
#ifdef DEBUG
|
||||
int start = Min(stack_pointer_, elements_.length() - 1);
|
||||
int start = Min(static_cast<int>(stack_pointer_), elements_.length() - 1);
|
||||
for (int i = start; i >= 0; i--) {
|
||||
ASSERT(elements_[i].is_memory());
|
||||
ASSERT(expected->elements_[i].is_memory());
|
||||
|
@ -41,7 +41,7 @@ namespace v8 { namespace internal {
|
||||
// as random access to the expression stack elements, locals, and
|
||||
// parameters.
|
||||
|
||||
class VirtualFrame : public Malloced {
|
||||
class VirtualFrame : public ZoneObject {
|
||||
public:
|
||||
// A utility class to introduce a scope where the virtual frame is
|
||||
// expected to remain spilled. The constructor spills the code
|
||||
@ -335,23 +335,23 @@ class VirtualFrame : public Malloced {
|
||||
CodeGenerator* cgen_;
|
||||
MacroAssembler* masm_;
|
||||
|
||||
List<FrameElement> elements_;
|
||||
ZoneList<FrameElement> elements_;
|
||||
|
||||
// The number of frame-allocated locals and parameters respectively.
|
||||
int parameter_count_;
|
||||
int local_count_;
|
||||
int16_t parameter_count_;
|
||||
int16_t local_count_;
|
||||
|
||||
// The index of the element that is at the processor's stack pointer
|
||||
// (the sp register).
|
||||
int stack_pointer_;
|
||||
int16_t stack_pointer_;
|
||||
|
||||
// The index of the element that is at the processor's frame pointer
|
||||
// (the fp register).
|
||||
int frame_pointer_;
|
||||
int16_t frame_pointer_;
|
||||
|
||||
// The index of the register frame element using each register, or
|
||||
// kIllegalIndex if a register is not on the frame.
|
||||
int register_locations_[kNumRegisters];
|
||||
int16_t register_locations_[kNumRegisters];
|
||||
|
||||
// The index of the first parameter. The receiver lies below the first
|
||||
// parameter.
|
||||
|
@ -244,7 +244,7 @@ void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
|
||||
FrameElement memory_element = FrameElement::MemoryElement();
|
||||
// Loop downward from the stack pointer or the top of the frame if
|
||||
// the stack pointer is floating above the frame.
|
||||
int start = Min(stack_pointer_, elements_.length() - 1);
|
||||
int start = Min(static_cast<int>(stack_pointer_), elements_.length() - 1);
|
||||
for (int i = start; i >= 0; i--) {
|
||||
FrameElement target = expected->elements_[i];
|
||||
if (target.is_memory()) {
|
||||
|
@ -41,7 +41,7 @@ namespace v8 { namespace internal {
|
||||
// as random access to the expression stack elements, locals, and
|
||||
// parameters.
|
||||
|
||||
class VirtualFrame : public Malloced {
|
||||
class VirtualFrame : public ZoneObject {
|
||||
public:
|
||||
// A utility class to introduce a scope where the virtual frame is
|
||||
// expected to remain spilled. The constructor spills the code
|
||||
@ -349,23 +349,23 @@ class VirtualFrame : public Malloced {
|
||||
CodeGenerator* cgen_;
|
||||
MacroAssembler* masm_;
|
||||
|
||||
List<FrameElement> elements_;
|
||||
ZoneList<FrameElement> elements_;
|
||||
|
||||
// The number of frame-allocated locals and parameters respectively.
|
||||
int parameter_count_;
|
||||
int local_count_;
|
||||
int16_t parameter_count_;
|
||||
int16_t local_count_;
|
||||
|
||||
// The index of the element that is at the processor's stack pointer
|
||||
// (the esp register).
|
||||
int stack_pointer_;
|
||||
int16_t stack_pointer_;
|
||||
|
||||
// The index of the element that is at the processor's frame pointer
|
||||
// (the ebp register).
|
||||
int frame_pointer_;
|
||||
int16_t frame_pointer_;
|
||||
|
||||
// The index of the register frame element using each register, or
|
||||
// kIllegalIndex if a register is not on the frame.
|
||||
int register_locations_[kNumRegisters];
|
||||
int16_t register_locations_[kNumRegisters];
|
||||
|
||||
// The index of the first parameter. The receiver lies below the first
|
||||
// parameter.
|
||||
|
@ -41,11 +41,11 @@ bool JumpTarget::compiling_deferred_code_ = false;
|
||||
JumpTarget::JumpTarget(CodeGenerator* cgen, Directionality direction)
|
||||
: cgen_(cgen),
|
||||
direction_(direction),
|
||||
is_bound_(false),
|
||||
is_linked_(false),
|
||||
reaching_frames_(0),
|
||||
merge_labels_(0),
|
||||
entry_frame_(NULL),
|
||||
is_bound_(false),
|
||||
is_linked_(false) {
|
||||
entry_frame_(NULL) {
|
||||
ASSERT(cgen != NULL);
|
||||
masm_ = cgen->masm();
|
||||
}
|
||||
@ -55,11 +55,11 @@ JumpTarget::JumpTarget()
|
||||
: cgen_(NULL),
|
||||
masm_(NULL),
|
||||
direction_(FORWARD_ONLY),
|
||||
is_bound_(false),
|
||||
is_linked_(false),
|
||||
reaching_frames_(0),
|
||||
merge_labels_(0),
|
||||
entry_frame_(NULL),
|
||||
is_bound_(false),
|
||||
is_linked_(false) {
|
||||
entry_frame_(NULL) {
|
||||
}
|
||||
|
||||
|
||||
@ -73,19 +73,6 @@ void JumpTarget::Initialize(CodeGenerator* cgen, Directionality direction) {
|
||||
|
||||
|
||||
void JumpTarget::Unuse() {
|
||||
// We should not deallocate jump targets that have unresolved jumps
|
||||
// to them. In the event of a compile-time stack overflow or an
|
||||
// uninitialized jump target, we don't care.
|
||||
ASSERT(!is_linked() || cgen_ == NULL || cgen_->HasStackOverflow());
|
||||
for (int i = 0; i < reaching_frames_.length(); i++) {
|
||||
delete reaching_frames_[i];
|
||||
}
|
||||
delete entry_frame_;
|
||||
Reset();
|
||||
}
|
||||
|
||||
|
||||
void JumpTarget::Reset() {
|
||||
reaching_frames_.Clear();
|
||||
merge_labels_.Clear();
|
||||
entry_frame_ = NULL;
|
||||
@ -662,10 +649,8 @@ ShadowTarget::ShadowTarget(BreakTarget* shadowed) {
|
||||
// While shadowing this shadow target saves the state of the original.
|
||||
shadowed->CopyTo(this);
|
||||
|
||||
// The original's state is reset. We do not Unuse it because that
|
||||
// would delete the expected frame and assert that the target is not
|
||||
// linked.
|
||||
shadowed->Reset();
|
||||
// The original's state is reset.
|
||||
shadowed->Unuse();
|
||||
ASSERT(cgen_ != NULL);
|
||||
ASSERT(cgen_->has_valid_frame());
|
||||
shadowed->set_expected_height(cgen_->frame()->height());
|
||||
@ -691,7 +676,7 @@ void ShadowTarget::StopShadowing() {
|
||||
other_target_->CopyTo(&temp);
|
||||
CopyTo(other_target_);
|
||||
temp.CopyTo(this);
|
||||
temp.Reset(); // So the destructor does not deallocate virtual frames.
|
||||
temp.Unuse();
|
||||
|
||||
#ifdef DEBUG
|
||||
is_shadowing_ = false;
|
||||
|
@ -52,7 +52,7 @@ class VirtualFrame;
|
||||
// In particular, this means that at least one of the control-flow
|
||||
// graph edges reaching the target must be a forward edge.
|
||||
|
||||
class JumpTarget : public Malloced { // Shadows are dynamically allocated.
|
||||
class JumpTarget : public ZoneObject { // Shadows are dynamically allocated.
|
||||
public:
|
||||
// Forward-only jump targets can only be reached by forward CFG edges.
|
||||
enum Directionality { FORWARD_ONLY, BIDIRECTIONAL };
|
||||
@ -75,18 +75,9 @@ class JumpTarget : public Malloced { // Shadows are dynamically allocated.
|
||||
virtual void Initialize(CodeGenerator* cgen,
|
||||
Directionality direction = FORWARD_ONLY);
|
||||
|
||||
virtual ~JumpTarget() { Unuse(); }
|
||||
|
||||
// Treat the jump target as a fresh one. The state is reset and
|
||||
// pointed-to virtual frames are deallocated. There should be no
|
||||
// dangling jumps to the target.
|
||||
// Treat the jump target as a fresh one. The state is reset.
|
||||
void Unuse();
|
||||
|
||||
// Reset the internal state of this jump target. Pointed-to virtual
|
||||
// frames are not deallocated and dangling jumps to the target are
|
||||
// left dangling.
|
||||
void Reset();
|
||||
|
||||
// Accessors.
|
||||
CodeGenerator* code_generator() const { return cgen_; }
|
||||
|
||||
@ -175,11 +166,17 @@ class JumpTarget : public Malloced { // Shadows are dynamically allocated.
|
||||
// Directionality flag set at initialization time.
|
||||
Directionality direction_;
|
||||
|
||||
// A target is bound if its Bind member function has been called.
|
||||
// It is linked if it is not bound but its Jump, Branch, or Call
|
||||
// member functions have been called.
|
||||
bool is_bound_;
|
||||
bool is_linked_;
|
||||
|
||||
// A list of frames reaching this block via forward jumps.
|
||||
List<VirtualFrame*> reaching_frames_;
|
||||
ZoneList<VirtualFrame*> reaching_frames_;
|
||||
|
||||
// A parallel list of labels for merge code.
|
||||
List<Label> merge_labels_;
|
||||
ZoneList<Label> merge_labels_;
|
||||
|
||||
// The frame used on entry to the block and expected at backward
|
||||
// jumps to the block. Set when the jump target is bound, but may
|
||||
@ -189,12 +186,6 @@ class JumpTarget : public Malloced { // Shadows are dynamically allocated.
|
||||
// The actual entry label of the block.
|
||||
Label entry_label_;
|
||||
|
||||
// A target is bound if its Bind member function has been called.
|
||||
// It is linked if it is not bound but its Jump, Branch, or Call
|
||||
// member functions have been called.
|
||||
bool is_bound_;
|
||||
bool is_linked_;
|
||||
|
||||
// Implementations of Jump, Branch, and Bind with all arguments and
|
||||
// return values using the virtual frame.
|
||||
void DoJump();
|
||||
@ -296,10 +287,6 @@ class ShadowTarget : public BreakTarget {
|
||||
// flow intended for the shadowed one.
|
||||
explicit ShadowTarget(BreakTarget* shadowed);
|
||||
|
||||
virtual ~ShadowTarget() {
|
||||
ASSERT(!is_shadowing_);
|
||||
}
|
||||
|
||||
// End shadowing. After shadowing ends, the original jump target
|
||||
// again gives access to the formerly shadowed target and the shadow
|
||||
// target object gives access to the formerly shadowing target.
|
||||
|
Loading…
Reference in New Issue
Block a user