Reverting 2768.

Review URL: http://codereview.chromium.org/173561

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2769 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
sgjesse@chromium.org 2009-08-27 07:41:31 +00:00
parent 1c46ee8c10
commit a6973777bb
7 changed files with 761 additions and 932 deletions

View File

@ -133,7 +133,14 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// problem here, because it is always greater than the maximum
// instance size that can be represented in a byte.
ASSERT(Heap::MaxObjectSizeInPagedSpace() >= JSObject::kMaxInstanceSize);
__ AllocateObjectInNewSpace(edi, ebx, edi, no_reg, &rt_call, false);
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address();
__ mov(ebx, Operand::StaticVariable(new_space_allocation_top));
__ add(edi, Operand(ebx)); // Calculate new top
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
__ cmp(edi, Operand::StaticVariable(new_space_allocation_limit));
__ j(above_equal, &rt_call);
// Allocated the JSObject, now initialize the fields.
// eax: initial map
// ebx: JSObject
@ -166,6 +173,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// ebx: JSObject
// edi: start of next object
__ or_(Operand(ebx), Immediate(kHeapObjectTag));
__ mov(Operand::StaticVariable(new_space_allocation_top), edi);
// Check if a non-empty properties array is needed.
// Allocate and initialize a FixedArray if it is.
@ -190,14 +198,10 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// edx: number of elements in properties array
ASSERT(Heap::MaxObjectSizeInPagedSpace() >
(FixedArray::kHeaderSize + 255*kPointerSize));
__ AllocateObjectInNewSpace(FixedArray::kHeaderSize,
times_pointer_size,
edx,
edi,
ecx,
no_reg,
&undo_allocation,
true);
__ lea(ecx, Operand(edi, edx, times_pointer_size, FixedArray::kHeaderSize));
__ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
__ j(above_equal, &undo_allocation);
__ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
// Initialize the FixedArray.
// ebx: JSObject
@ -241,7 +245,8 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// allocated objects unused properties.
// ebx: JSObject (previous new top)
__ bind(&undo_allocation);
__ UndoAllocationInNewSpace(ebx);
__ xor_(Operand(ebx), Immediate(kHeapObjectTag)); // clear the heap tag
__ mov(Operand::StaticVariable(new_space_allocation_top), ebx);
}
// Allocate the new receiver object using the runtime call.

View File

@ -1,56 +1,56 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_IA32_CODEGEN_IA32_INL_H_
#define V8_IA32_CODEGEN_IA32_INL_H_
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm_)
// Platform-specific inline functions.
void DeferredCode::Jump() { __ jmp(&entry_label_); }
void DeferredCode::Branch(Condition cc) { __ j(cc, &entry_label_); }
void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
GenerateFastMathOp(SIN, args);
}
void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
GenerateFastMathOp(COS, args);
}
#undef __
} } // namespace v8::internal
#endif // V8_IA32_CODEGEN_IA32_INL_H_
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_IA32_CODEGEN_IA32_INL_H_
#define V8_IA32_CODEGEN_IA32_INL_H_
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm_)
// Platform-specific inline functions.
void DeferredCode::Jump() { __ jmp(&entry_label_); }
void DeferredCode::Branch(Condition cc) { __ j(cc, &entry_label_); }
void CodeGenerator::GenerateMathSin(ZoneList<Expression*>* args) {
GenerateFastMathOp(SIN, args);
}
void CodeGenerator::GenerateMathCos(ZoneList<Expression*>* args) {
GenerateFastMathOp(COS, args);
}
#undef __
} } // namespace v8::internal
#endif // V8_IA32_CODEGEN_IA32_INL_H_

View File

@ -6947,18 +6947,21 @@ void FloatingPointHelper::AllocateHeapNumber(MacroAssembler* masm,
Register scratch1,
Register scratch2,
Register result) {
// Allocate heap number in new space.
__ AllocateObjectInNewSpace(HeapNumber::kSize,
result,
scratch1,
scratch2,
need_gc,
false);
// Set the map and tag the result.
ExternalReference allocation_top =
ExternalReference::new_space_allocation_top_address();
ExternalReference allocation_limit =
ExternalReference::new_space_allocation_limit_address();
__ mov(Operand(scratch1), Immediate(allocation_top));
__ mov(result, Operand(scratch1, 0));
__ lea(scratch2, Operand(result, HeapNumber::kSize)); // scratch2: new top
__ cmp(scratch2, Operand::StaticVariable(allocation_limit));
__ j(above, need_gc, not_taken);
__ mov(Operand(scratch1, 0), scratch2); // store new top
__ mov(Operand(result, HeapObject::kMapOffset),
Immediate(Factory::heap_number_map()));
__ or_(Operand(result), Immediate(kHeapObjectTag));
// Tag old top and use as result.
__ add(Operand(result), Immediate(kHeapObjectTag));
}

File diff suppressed because it is too large Load Diff

View File

@ -620,146 +620,6 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
void MacroAssembler::LoadAllocationTopHelper(
Register result,
Register result_end,
Register scratch,
bool result_contains_top_on_entry) {
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address();
// Just return if allocation top is already known.
if (result_contains_top_on_entry) {
// No use of scratch if allocation top is provided.
ASSERT(scratch.is(no_reg));
return;
}
// Move address of new object to result. Use scratch register if available.
if (scratch.is(no_reg)) {
mov(result, Operand::StaticVariable(new_space_allocation_top));
} else {
ASSERT(!scratch.is(result_end));
mov(Operand(scratch), Immediate(new_space_allocation_top));
mov(result, Operand(scratch, 0));
}
}
void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
Register scratch) {
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address();
// Update new top. Use scratch if available.
if (scratch.is(no_reg)) {
mov(Operand::StaticVariable(new_space_allocation_top), result_end);
} else {
mov(Operand(scratch, 0), result_end);
}
}
void MacroAssembler::AllocateObjectInNewSpace(
int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
bool result_contains_top_on_entry) {
ASSERT(!result.is(result_end));
// Load address of new object into result.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
LoadAllocationTopHelper(result,
result_end,
scratch,
result_contains_top_on_entry);
// Calculate new top and bail out if new space is exhausted.
lea(result_end, Operand(result, object_size));
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
}
void MacroAssembler::AllocateObjectInNewSpace(
int header_size,
ScaleFactor element_size,
Register element_count,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
bool result_contains_top_on_entry) {
ASSERT(!result.is(result_end));
// Load address of new object into result.
LoadAllocationTopHelper(result,
result_end,
scratch,
result_contains_top_on_entry);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
lea(result_end, Operand(result, element_count, element_size, header_size));
cmp(result, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required);
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
}
void MacroAssembler::AllocateObjectInNewSpace(
Register object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
bool result_contains_top_on_entry) {
ASSERT(!result.is(result_end));
// Load address of new object into result.
LoadAllocationTopHelper(result,
result_end,
scratch,
result_contains_top_on_entry);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
if (!object_size.is(result_end)) {
mov(result_end, object_size);
}
add(result_end, Operand(result));
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
}
void MacroAssembler::UndoAllocationInNewSpace(Register object) {
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address();
// Make sure the object has no tag before resetting top.
and_(Operand(object), Immediate(~kHeapObjectTagMask));
#ifdef DEBUG
cmp(object, Operand::StaticVariable(new_space_allocation_top));
Check(below, "Undo allocation of non allocated memory");
#endif
mov(Operand::StaticVariable(new_space_allocation_top), object);
}
void MacroAssembler::NegativeZeroTest(CodeGenerator* cgen,
Register result,
Register op,

View File

@ -183,48 +183,6 @@ class MacroAssembler: public Assembler {
Label* miss);
// ---------------------------------------------------------------------------
// Allocation support
// Allocate an object in new space. If the new space is exhausted control
// continues at the gc_required label. The allocated object is returned in
// result and end of the new object is returned in result_end. The register
// scratch can be passed as no_reg in which case an additional object
// reference will be added to the reloc info. The returned pointers in result
// and result_end have not yet been tagged as heap objects. If
// result_contains_top_on_entry is true the contnt of result is known to be
// the allocation top on entry (could be result_end from a previous call to
// AllocateObjectInNewSpace). If result_contains_top_on_entry is true scratch
// should be no_reg as it is never used.
void AllocateObjectInNewSpace(int object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
bool result_contains_top_on_entry);
void AllocateObjectInNewSpace(int header_size,
ScaleFactor element_size,
Register element_count,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
bool result_contains_top_on_entry);
void AllocateObjectInNewSpace(Register object_size,
Register result,
Register result_end,
Register scratch,
Label* gc_required,
bool result_contains_top_on_entry);
// Undo allocation in new space. The object passed and objects allocated after
// it will no longer be allocated. Make sure that no pointers are left to the
// object(s) no longer allocated as they would be invalid when allocation is
// un-done.
void UndoAllocationInNewSpace(Register object);
// ---------------------------------------------------------------------------
// Support functions.
@ -345,13 +303,6 @@ class MacroAssembler: public Assembler {
// Activation support.
void EnterFrame(StackFrame::Type type);
void LeaveFrame(StackFrame::Type type);
// Allocation support helpers.
void LoadAllocationTopHelper(Register result,
Register result_end,
Register scratch,
bool result_contains_top_on_entry);
void UpdateAllocationTopHelper(Register result_end, Register scratch);
};

View File

@ -1786,7 +1786,17 @@ Object* ConstructStubCompiler::CompileConstructStub(
// Make sure that the maximum heap object size will never cause us
// problems here.
ASSERT(Heap::MaxObjectSizeInPagedSpace() >= JSObject::kMaxInstanceSize);
__ AllocateObjectInNewSpace(ecx, edx, ecx, no_reg, &generic_stub_call, false);
ExternalReference new_space_allocation_top =
ExternalReference::new_space_allocation_top_address();
__ mov(edx, Operand::StaticVariable(new_space_allocation_top));
__ add(ecx, Operand(edx)); // Calculate new top.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
__ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
__ j(above_equal, &generic_stub_call);
// Update new space top.
__ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
// Allocated the JSObject, now initialize the fields and add the heap tag.
// ebx: initial map
@ -1850,9 +1860,9 @@ Object* ConstructStubCompiler::CompileConstructStub(
__ mov(ebx, eax);
__ pop(eax);
// Remove caller arguments and receiver from the stack and return.
// Remove caller arguments from the stack and return.
__ pop(ecx);
__ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
__ lea(esp, Operand(esp, ebx, times_4, 1 * kPointerSize)); // 1 ~ receiver
__ push(ecx);
__ IncrementCounter(&Counters::constructed_objects, 1);
__ IncrementCounter(&Counters::constructed_objects_stub, 1);