Add hydrogen support for ArrayPop, and remove the handwritten call stubs.

BUG=
R=mvstanton@chromium.org

Review URL: https://codereview.chromium.org/137783023

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18709 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
verwaest@chromium.org 2014-01-21 12:42:24 +00:00
parent 1864f7388e
commit 2d9a4eb355
6 changed files with 48 additions and 288 deletions

View File

@ -1571,79 +1571,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
}
Handle<Code> CallStubCompiler::CompileArrayPopCall(
Handle<Object> object,
Handle<JSObject> holder,
Handle<Cell> cell,
Handle<JSFunction> function,
Handle<String> name,
Code::StubType type) {
// If object is not an array or is observed or sealed, bail out to regular
// call.
if (!object->IsJSArray() ||
!cell.is_null() ||
Handle<JSArray>::cast(object)->map()->is_observed() ||
!Handle<JSArray>::cast(object)->map()->is_extensible()) {
return Handle<Code>::null();
}
Label miss, return_undefined, call_builtin;
Register receiver = r0;
Register scratch = r1;
Register elements = r3;
HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
// Get the elements array of the object.
__ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CheckMap(elements,
scratch,
Heap::kFixedArrayMapRootIndex,
&call_builtin,
DONT_DO_SMI_CHECK);
// Get the array's length into r4 and calculate new length.
__ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC);
__ b(lt, &return_undefined);
// Get the last element.
__ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
// We can't address the last element in one operation. Compute the more
// expensive shift first, and use an offset later on.
__ add(elements, elements, Operand::PointerOffsetFromSmiKey(r4));
__ ldr(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ cmp(scratch, r6);
__ b(eq, &call_builtin);
// Set the array's length.
__ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Fill with the hole.
__ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
const int argc = arguments().immediate();
__ Drop(argc + 1);
__ mov(r0, scratch);
__ Ret();
__ bind(&return_undefined);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ Drop(argc + 1);
__ Ret();
__ bind(&call_builtin);
__ TailCallExternalReference(
ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
HandlerFrontendFooter(&miss);
// Return the generated code.
return GetCode(type, name);
}
Handle<Code> CallStubCompiler::CompileFastApiCall(
const CallOptimization& optimization,
Handle<Object> object,

View File

@ -7614,6 +7614,53 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
return true;
}
break;
case kArrayPop: {
if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) {
return false;
}
if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
ElementsKind elements_kind = receiver_map->elements_kind();
if (!IsFastElementsKind(elements_kind)) return false;
AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
Drop(expr->arguments()->length());
HValue* result;
HValue* checked_object;
HValue* reduced_length;
HValue* receiver = Pop();
{ NoObservableSideEffectsScope scope(this);
checked_object = AddCheckMap(receiver, receiver_map);
HValue* elements = AddLoadElements(checked_object);
// Ensure that we aren't popping from a copy-on-write array.
if (IsFastSmiOrObjectElementsKind(elements_kind)) {
Add<HCheckMaps>(
elements, isolate()->factory()->fixed_array_map(), top_info());
}
HValue* length = Add<HLoadNamedField>(
checked_object, HObjectAccess::ForArrayLength(elements_kind));
reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
HValue* bounds_check = Add<HBoundsCheck>(
graph()->GetConstant0(), length);
result = AddElementAccess(elements, reduced_length, NULL,
bounds_check, elements_kind, false);
Factory* factory = isolate()->factory();
double nan_double = FixedDoubleArray::hole_nan_as_double();
HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
? Add<HConstant>(factory->the_hole_value())
: Add<HConstant>(nan_double);
if (IsFastSmiOrObjectElementsKind(elements_kind)) {
elements_kind = FAST_HOLEY_ELEMENTS;
}
AddElementAccess(
elements, reduced_length, hole, bounds_check, elements_kind, true);
}
Add<HStoreNamedField>(
checked_object, HObjectAccess::ForArrayLength(elements_kind),
reduced_length);
Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
ast_context()->ReturnValue(result);
return true;
}
default:
// Not yet supported for inlining.
break;

View File

@ -1666,76 +1666,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
}
Handle<Code> CallStubCompiler::CompileArrayPopCall(
Handle<Object> object,
Handle<JSObject> holder,
Handle<Cell> cell,
Handle<JSFunction> function,
Handle<String> name,
Code::StubType type) {
// If object is not an array or is observed or sealed, bail out to regular
// call.
if (!object->IsJSArray() ||
!cell.is_null() ||
Handle<JSArray>::cast(object)->map()->is_observed() ||
!Handle<JSArray>::cast(object)->map()->is_extensible()) {
return Handle<Code>::null();
}
Label miss, return_undefined, call_builtin;
HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
// Get the elements array of the object.
__ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Immediate(factory()->fixed_array_map()));
__ j(not_equal, &call_builtin);
// Get the array's length into ecx and calculate new length.
__ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
__ sub(ecx, Immediate(Smi::FromInt(1)));
__ j(negative, &return_undefined);
// Get the last element.
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ mov(eax, FieldOperand(ebx,
ecx, times_half_pointer_size,
FixedArray::kHeaderSize));
__ cmp(eax, Immediate(factory()->the_hole_value()));
__ j(equal, &call_builtin);
// Set the array's length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx);
// Fill with the hole.
__ mov(FieldOperand(ebx,
ecx, times_half_pointer_size,
FixedArray::kHeaderSize),
Immediate(factory()->the_hole_value()));
const int argc = arguments().immediate();
__ ret((argc + 1) * kPointerSize);
__ bind(&return_undefined);
__ mov(eax, Immediate(factory()->undefined_value()));
__ ret((argc + 1) * kPointerSize);
__ bind(&call_builtin);
__ TailCallExternalReference(
ExternalReference(Builtins::c_ArrayPop, isolate()),
argc + 1,
1);
HandlerFrontendFooter(&miss);
// Return the generated code.
return GetCode(type, name);
}
Handle<Code> CallStubCompiler::CompileFastApiCall(
const CallOptimization& optimization,
Handle<Object> object,

View File

@ -1556,78 +1556,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
}
Handle<Code> CallStubCompiler::CompileArrayPopCall(
Handle<Object> object,
Handle<JSObject> holder,
Handle<Cell> cell,
Handle<JSFunction> function,
Handle<String> name,
Code::StubType type) {
// If object is not an array or is observed or sealed, bail out to regular
// call.
if (!object->IsJSArray() ||
!cell.is_null() ||
Handle<JSArray>::cast(object)->map()->is_observed() ||
!Handle<JSArray>::cast(object)->map()->is_extensible()) {
return Handle<Code>::null();
}
Label miss, return_undefined, call_builtin;
Register receiver = a0;
Register scratch = a1;
Register elements = a3;
HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
// Get the elements array of the object.
__ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CheckMap(elements,
scratch,
Heap::kFixedArrayMapRootIndex,
&call_builtin,
DONT_DO_SMI_CHECK);
// Get the array's length into t0 and calculate new length.
__ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ Subu(t0, t0, Operand(Smi::FromInt(1)));
__ Branch(&return_undefined, lt, t0, Operand(zero_reg));
// Get the last element.
__ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
// We can't address the last element in one operation. Compute the more
// expensive shift first, and use an offset later on.
__ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(elements, elements, t1);
__ lw(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ Branch(&call_builtin, eq, scratch, Operand(t2));
// Set the array's length.
__ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Fill with the hole.
__ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
const int argc = arguments().immediate();
__ mov(v0, scratch);
__ DropAndRet(argc + 1);
__ bind(&return_undefined);
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
__ DropAndRet(argc + 1);
__ bind(&call_builtin);
__ TailCallExternalReference(
ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
HandlerFrontendFooter(&miss);
// Return the generated code.
return GetCode(type, name);
}
Handle<Code> CallStubCompiler::CompileFastApiCall(
const CallOptimization& optimization,
Handle<Object> object,

View File

@ -888,8 +888,7 @@ class KeyedStoreStubCompiler: public StoreStubCompiler {
// Subset of FUNCTIONS_WITH_ID_LIST with custom constant/global call
// IC stubs.
#define CUSTOM_CALL_IC_GENERATORS(V) \
V(ArrayPush) \
V(ArrayPop)
V(ArrayPush)
class CallStubCompiler: public StubCompiler {

View File

@ -1593,77 +1593,6 @@ Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
}
Handle<Code> CallStubCompiler::CompileArrayPopCall(
Handle<Object> object,
Handle<JSObject> holder,
Handle<Cell> cell,
Handle<JSFunction> function,
Handle<String> name,
Code::StubType type) {
// If object is not an array or is observed or sealed, bail out to regular
// call.
if (!object->IsJSArray() ||
!cell.is_null() ||
Handle<JSArray>::cast(object)->map()->is_observed() ||
!Handle<JSArray>::cast(object)->map()->is_extensible()) {
return Handle<Code>::null();
}
Label miss, return_undefined, call_builtin;
HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
// Get the elements array of the object.
__ movp(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
// Check that the elements are in fast mode and writable.
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
Heap::kFixedArrayMapRootIndex);
__ j(not_equal, &call_builtin);
// Get the array's length into rcx and calculate new length.
__ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
__ subl(rcx, Immediate(1));
__ j(negative, &return_undefined);
// Get the last element.
__ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
__ movp(rax, FieldOperand(rbx,
rcx, times_pointer_size,
FixedArray::kHeaderSize));
// Check if element is already the hole.
__ cmpq(rax, r9);
// If so, call slow-case to also check prototypes for value.
__ j(equal, &call_builtin);
// Set the array's length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
// Fill with the hole and return original value.
__ movp(FieldOperand(rbx,
rcx, times_pointer_size,
FixedArray::kHeaderSize),
r9);
const int argc = arguments().immediate();
__ ret((argc + 1) * kPointerSize);
__ bind(&return_undefined);
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
__ ret((argc + 1) * kPointerSize);
__ bind(&call_builtin);
__ TailCallExternalReference(
ExternalReference(Builtins::c_ArrayPop, isolate()),
argc + 1,
1);
HandlerFrontendFooter(&miss);
// Return the generated code.
return GetCode(type, name);
}
Handle<Code> CallStubCompiler::CompileFastApiCall(
const CallOptimization& optimization,
Handle<Object> object,