Remove %_SwapElements. This inlined runtime contained an optimization that was dangerous in the presence of incremental compaction. It also prevented QuickSort from array.js from being optimized by Crankshaft, so it is probably better to do without it. We have high hopes that this will fix bug=117879.

Review URL: https://chromiumcodereview.appspot.com/10392150

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11588 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
erik.corry@gmail.com 2012-05-17 20:53:32 +00:00
parent 401fbbaef6
commit 3d45e98b5e
8 changed files with 8 additions and 429 deletions

View File

@ -3466,104 +3466,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
Label done;
Label slow_case;
Register object = r0;
Register index1 = r1;
Register index2 = r2;
Register elements = r3;
Register scratch1 = r4;
Register scratch2 = r5;
__ ldr(object, MemOperand(sp, 2 * kPointerSize));
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
__ b(ne, &slow_case);
// Map is now in scratch1.
__ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
__ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
__ b(ne, &slow_case);
// Check the object's elements are in fast case and writable.
__ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset));
__ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
__ cmp(scratch1, ip);
__ b(ne, &slow_case);
// Check that both indices are smis.
__ ldr(index1, MemOperand(sp, 1 * kPointerSize));
__ ldr(index2, MemOperand(sp, 0));
__ JumpIfNotBothSmi(index1, index2, &slow_case);
// Check that both indices are valid.
__ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
__ cmp(scratch1, index1);
__ cmp(scratch1, index2, hi);
__ b(ls, &slow_case);
// Bring the address of the elements into index1 and index2.
__ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ add(index1,
scratch1,
Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
__ add(index2,
scratch1,
Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
// Swap elements.
__ ldr(scratch1, MemOperand(index1, 0));
__ ldr(scratch2, MemOperand(index2, 0));
__ str(scratch1, MemOperand(index2, 0));
__ str(scratch2, MemOperand(index1, 0));
Label no_remembered_set;
__ CheckPageFlag(elements,
scratch1,
1 << MemoryChunk::SCAN_ON_SCAVENGE,
ne,
&no_remembered_set);
// Possible optimization: do a check that both values are Smis
// (or them and test against Smi mask.)
// We are swapping two objects in an array and the incremental marker never
// pauses in the middle of scanning a single object. Therefore the
// incremental marker is not disturbed, so we don't need to call the
// RecordWrite stub that notifies the incremental marker.
__ RememberedSetHelper(elements,
index1,
scratch2,
kDontSaveFPRegs,
MacroAssembler::kFallThroughAtEnd);
__ RememberedSetHelper(elements,
index2,
scratch2,
kDontSaveFPRegs,
MacroAssembler::kFallThroughAtEnd);
__ bind(&no_remembered_set);
// We are done. Drop elements from the stack, and return undefined.
__ Drop(3);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
__ jmp(&done);
__ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
__ bind(&done);
context()->Plug(r0);
}
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());

View File

@ -827,7 +827,8 @@ function ArraySort(comparefn) {
var element = a[i];
var order = %_CallFunction(receiver, element, pivot, comparefn);
if (order < 0) {
%_SwapElements(a, i, low_end);
a[i] = a[low_end];
a[low_end] = element;
low_end++;
} else if (order > 0) {
do {
@ -836,9 +837,12 @@ function ArraySort(comparefn) {
var top_elem = a[high_start];
order = %_CallFunction(receiver, top_elem, pivot, comparefn);
} while (order > 0);
%_SwapElements(a, i, high_start);
a[i] = a[high_start];
a[high_start] = element;
if (order < 0) {
%_SwapElements(a, i, low_end);
element = a[i];
a[i] = a[low_end];
a[low_end] = element;
low_end++;
}
}

View File

@ -8135,14 +8135,6 @@ void HGraphBuilder::GenerateNumberToString(CallRuntime* call) {
}
// Fast swapping of elements. Takes three expressions, the object and two
// indices. This should only be used if the indices are known to be
// non-negative and within bounds of the elements array at the call site.
void HGraphBuilder::GenerateSwapElements(CallRuntime* call) {
return Bailout("inlined runtime function: SwapElements");
}
// Fast call for custom callbacks.
void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
// 1 ~ The function to call is not itself an argument to the call.

View File

@ -3405,99 +3405,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
Label done;
Label slow_case;
Register object = eax;
Register index_1 = ebx;
Register index_2 = ecx;
Register elements = edi;
Register temp = edx;
__ mov(object, Operand(esp, 2 * kPointerSize));
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CmpObjectType(object, JS_ARRAY_TYPE, temp);
__ j(not_equal, &slow_case);
__ test_b(FieldOperand(temp, Map::kBitFieldOffset),
KeyedLoadIC::kSlowCaseBitFieldMask);
__ j(not_zero, &slow_case);
// Check the object's elements are in fast case and writable.
__ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
__ cmp(FieldOperand(elements, HeapObject::kMapOffset),
Immediate(isolate()->factory()->fixed_array_map()));
__ j(not_equal, &slow_case);
// Check that both indices are smis.
__ mov(index_1, Operand(esp, 1 * kPointerSize));
__ mov(index_2, Operand(esp, 0));
__ mov(temp, index_1);
__ or_(temp, index_2);
__ JumpIfNotSmi(temp, &slow_case);
// Check that both indices are valid.
__ mov(temp, FieldOperand(object, JSArray::kLengthOffset));
__ cmp(temp, index_1);
__ j(below_equal, &slow_case);
__ cmp(temp, index_2);
__ j(below_equal, &slow_case);
// Bring addresses into index1 and index2.
__ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
__ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
// Swap elements. Use object and temp as scratch registers.
__ mov(object, Operand(index_1, 0));
__ mov(temp, Operand(index_2, 0));
__ mov(Operand(index_2, 0), object);
__ mov(Operand(index_1, 0), temp);
Label no_remembered_set;
__ CheckPageFlag(elements,
temp,
1 << MemoryChunk::SCAN_ON_SCAVENGE,
not_zero,
&no_remembered_set,
Label::kNear);
// Possible optimization: do a check that both values are Smis
// (or them and test against Smi mask.)
// We are swapping two objects in an array and the incremental marker never
// pauses in the middle of scanning a single object. Therefore the
// incremental marker is not disturbed, so we don't need to call the
// RecordWrite stub that notifies the incremental marker.
__ RememberedSetHelper(elements,
index_1,
temp,
kDontSaveFPRegs,
MacroAssembler::kFallThroughAtEnd);
__ RememberedSetHelper(elements,
index_2,
temp,
kDontSaveFPRegs,
MacroAssembler::kFallThroughAtEnd);
__ bind(&no_remembered_set);
// We are done. Drop elements from the stack, and return undefined.
__ add(esp, Immediate(3 * kPointerSize));
__ mov(eax, isolate()->factory()->undefined_value());
__ jmp(&done);
__ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
__ bind(&done);
context()->Plug(eax);
}
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());

View File

@ -3500,104 +3500,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
Label done;
Label slow_case;
Register object = a0;
Register index1 = a1;
Register index2 = a2;
Register elements = a3;
Register scratch1 = t0;
Register scratch2 = t1;
__ lw(object, MemOperand(sp, 2 * kPointerSize));
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ GetObjectType(object, scratch1, scratch2);
__ Branch(&slow_case, ne, scratch2, Operand(JS_ARRAY_TYPE));
// Map is now in scratch1.
__ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
__ And(scratch2, scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
__ Branch(&slow_case, ne, scratch2, Operand(zero_reg));
// Check the object's elements are in fast case and writable.
__ lw(elements, FieldMemOperand(object, JSObject::kElementsOffset));
__ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
__ LoadRoot(scratch2, Heap::kFixedArrayMapRootIndex);
__ Branch(&slow_case, ne, scratch1, Operand(scratch2));
// Check that both indices are smis.
__ lw(index1, MemOperand(sp, 1 * kPointerSize));
__ lw(index2, MemOperand(sp, 0));
__ JumpIfNotBothSmi(index1, index2, &slow_case);
// Check that both indices are valid.
Label not_hi;
__ lw(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
__ Branch(&slow_case, ls, scratch1, Operand(index1));
__ Branch(&not_hi, NegateCondition(hi), scratch1, Operand(index1));
__ Branch(&slow_case, ls, scratch1, Operand(index2));
__ bind(&not_hi);
// Bring the address of the elements into index1 and index2.
__ Addu(scratch1, elements,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ sll(index1, index1, kPointerSizeLog2 - kSmiTagSize);
__ Addu(index1, scratch1, index1);
__ sll(index2, index2, kPointerSizeLog2 - kSmiTagSize);
__ Addu(index2, scratch1, index2);
// Swap elements.
__ lw(scratch1, MemOperand(index1, 0));
__ lw(scratch2, MemOperand(index2, 0));
__ sw(scratch1, MemOperand(index2, 0));
__ sw(scratch2, MemOperand(index1, 0));
Label no_remembered_set;
__ CheckPageFlag(elements,
scratch1,
1 << MemoryChunk::SCAN_ON_SCAVENGE,
ne,
&no_remembered_set);
// Possible optimization: do a check that both values are Smis
// (or them and test against Smi mask).
// We are swapping two objects in an array and the incremental marker never
// pauses in the middle of scanning a single object. Therefore the
// incremental marker is not disturbed, so we don't need to call the
// RecordWrite stub that notifies the incremental marker.
__ RememberedSetHelper(elements,
index1,
scratch2,
kDontSaveFPRegs,
MacroAssembler::kFallThroughAtEnd);
__ RememberedSetHelper(elements,
index2,
scratch2,
kDontSaveFPRegs,
MacroAssembler::kFallThroughAtEnd);
__ bind(&no_remembered_set);
// We are done. Drop elements from the stack, and return undefined.
__ Drop(3);
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
__ jmp(&done);
__ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
__ bind(&done);
context()->Plug(v0);
}
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());

View File

@ -10026,36 +10026,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_EstimateNumberOfElements) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_SwapElements) {
HandleScope handle_scope(isolate);
ASSERT_EQ(3, args.length());
CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
Handle<Object> key1 = args.at<Object>(1);
Handle<Object> key2 = args.at<Object>(2);
uint32_t index1, index2;
if (!key1->ToArrayIndex(&index1)
|| !key2->ToArrayIndex(&index2)) {
return isolate->ThrowIllegalOperation();
}
Handle<JSObject> jsobject = Handle<JSObject>::cast(object);
Handle<Object> tmp1 = Object::GetElement(jsobject, index1);
RETURN_IF_EMPTY_HANDLE(isolate, tmp1);
Handle<Object> tmp2 = Object::GetElement(jsobject, index2);
RETURN_IF_EMPTY_HANDLE(isolate, tmp2);
RETURN_IF_EMPTY_HANDLE(
isolate, JSObject::SetElement(jsobject, index1, tmp2, NONE, kStrictMode));
RETURN_IF_EMPTY_HANDLE(
isolate, JSObject::SetElement(jsobject, index2, tmp1, NONE, kStrictMode));
return isolate->heap()->undefined_value();
}
// Returns an array that tells you where in the [0, length) interval an array
// might have elements. Can either return keys (positive integers) or
// intervals (pair of a negative integer (-start-1) followed by a

View File

@ -272,7 +272,6 @@ namespace internal {
F(GetArrayKeys, 2, 1) \
F(MoveArrayContents, 2, 1) \
F(EstimateNumberOfElements, 1, 1) \
F(SwapElements, 3, 1) \
\
/* Getters and Setters */ \
F(LookupAccessor, 3, 1) \
@ -536,8 +535,7 @@ namespace internal {
F(RegExpExec, 4, 1) \
F(RegExpConstructResult, 3, 1) \
F(GetFromCache, 2, 1) \
F(NumberToString, 1, 1) \
F(SwapElements, 3, 1)
F(NumberToString, 1, 1)
//---------------------------------------------------------------------------

View File

@ -3360,102 +3360,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
Label done;
Label slow_case;
Register object = rax;
Register index_1 = rbx;
Register index_2 = rcx;
Register elements = rdi;
Register temp = rdx;
__ movq(object, Operand(rsp, 2 * kPointerSize));
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CmpObjectType(object, JS_ARRAY_TYPE, temp);
__ j(not_equal, &slow_case);
__ testb(FieldOperand(temp, Map::kBitFieldOffset),
Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
__ j(not_zero, &slow_case);
// Check the object's elements are in fast case and writable.
__ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
__ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
Heap::kFixedArrayMapRootIndex);
__ j(not_equal, &slow_case);
// Check that both indices are smis.
__ movq(index_1, Operand(rsp, 1 * kPointerSize));
__ movq(index_2, Operand(rsp, 0 * kPointerSize));
__ JumpIfNotBothSmi(index_1, index_2, &slow_case);
// Check that both indices are valid.
// The JSArray length field is a smi since the array is in fast case mode.
__ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
__ SmiCompare(temp, index_1);
__ j(below_equal, &slow_case);
__ SmiCompare(temp, index_2);
__ j(below_equal, &slow_case);
__ SmiToInteger32(index_1, index_1);
__ SmiToInteger32(index_2, index_2);
// Bring addresses into index1 and index2.
__ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
FixedArray::kHeaderSize));
__ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
FixedArray::kHeaderSize));
// Swap elements. Use object and temp as scratch registers.
__ movq(object, Operand(index_1, 0));
__ movq(temp, Operand(index_2, 0));
__ movq(Operand(index_2, 0), object);
__ movq(Operand(index_1, 0), temp);
Label no_remembered_set;
__ CheckPageFlag(elements,
temp,
1 << MemoryChunk::SCAN_ON_SCAVENGE,
not_zero,
&no_remembered_set,
Label::kNear);
// Possible optimization: do a check that both values are Smis
// (or them and test against Smi mask.)
// We are swapping two objects in an array and the incremental marker never
// pauses in the middle of scanning a single object. Therefore the
// incremental marker is not disturbed, so we don't need to call the
// RecordWrite stub that notifies the incremental marker.
__ RememberedSetHelper(elements,
index_1,
temp,
kDontSaveFPRegs,
MacroAssembler::kFallThroughAtEnd);
__ RememberedSetHelper(elements,
index_2,
temp,
kDontSaveFPRegs,
MacroAssembler::kFallThroughAtEnd);
__ bind(&no_remembered_set);
// We are done. Drop elements from the stack, and return undefined.
__ addq(rsp, Immediate(3 * kPointerSize));
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
__ jmp(&done);
__ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
__ bind(&done);
context()->Plug(rax);
}
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());