Revert 20348 - "Fix LoadFieldByIndex to take mutable heap-numbers into account."

Reason for revert: crashes benchmarks/sunspider/string-fasta on ia32.debug

This also reverts r20350 and r20352

> Fix LoadFieldByIndex to take mutable heap-numbers into account.
>
> BUG=
> R=ishell@chromium.org
>
> Review URL: https://codereview.chromium.org/213213002

BUG=none
LOG=n
TBR=verwaest@chromium.org

Revert "Use sarq on x64"

This reverts commit e2a8ef9321345c6bc091054443bf2b9535ff6b1c.

Revert "Don't | int and bool"

This reverts commit c90d713d3a8ceba4fec41933a63beb6e50a3d7c0.

Review URL: https://codereview.chromium.org/219393002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20354 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
jochen@chromium.org 2014-03-31 13:23:32 +00:00
parent b7039334ae
commit 163044e7ba
20 changed files with 4 additions and 270 deletions

View File

@ -2539,9 +2539,7 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* index = UseRegister(instr->index());
LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
LInstruction* result = DefineSameAsFirst(load);
return AssignPointerMap(result);
return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
}
} } // namespace v8::internal

View File

@ -5716,61 +5716,13 @@ void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
}
void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,
Register object,
Register index) {
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
__ Push(object);
__ Push(index);
__ mov(cp, Operand::Zero());
__ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
RecordSafepointWithRegisters(
instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
__ StoreToSafepointRegisterSlot(r0, result);
}
void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode {
public:
DeferredLoadMutableDouble(LCodeGen* codegen,
LLoadFieldByIndex* instr,
Register result,
Register object,
Register index)
: LDeferredCode(codegen),
instr_(instr),
result_(result),
object_(object),
index_(index) {
}
virtual void Generate() V8_OVERRIDE {
codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_);
}
virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
private:
LLoadFieldByIndex* instr_;
Register result_;
Register object_;
Register index_;
};
Register object = ToRegister(instr->object());
Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
Register scratch = scratch0();
DeferredLoadMutableDouble* deferred;
deferred = new(zone()) DeferredLoadMutableDouble(
this, instr, result, object, index);
Label out_of_object, done;
__ tst(index, Operand(Smi::FromInt(1)));
__ b(ne, deferred->entry());
__ mov(index, Operand(index, ASR, 1));
__ cmp(index, Operand::Zero());
__ b(lt, &out_of_object);
@ -5786,7 +5738,6 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
__ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
__ ldr(result, FieldMemOperand(scratch,
FixedArray::kHeaderSize - kPointerSize));
__ bind(deferred->exit());
__ bind(&done);
}

View File

@ -141,10 +141,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,
Register object,
Register index);
// Parallel move support.
void DoParallelMove(LParallelMove* move);

View File

@ -2562,9 +2562,7 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
LOperand* object = UseRegisterAtStart(instr->object());
LOperand* index = UseRegister(instr->index());
LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
LInstruction* result = DefineSameAsFirst(load);
return AssignPointerMap(result);
return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
}

View File

@ -5879,61 +5879,14 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
}
void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,
Register object,
Register index) {
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
__ Push(object);
__ Push(index);
__ Mov(cp, 0);
__ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
RecordSafepointWithRegisters(
instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
__ StoreToSafepointRegisterSlot(x0, result);
}
void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode {
public:
DeferredLoadMutableDouble(LCodeGen* codegen,
LLoadFieldByIndex* instr,
Register result,
Register object,
Register index)
: LDeferredCode(codegen),
instr_(instr),
result_(result),
object_(object),
index_(index) {
}
virtual void Generate() V8_OVERRIDE {
codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_);
}
virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
private:
LLoadFieldByIndex* instr_;
Register result_;
Register object_;
Register index_;
};
Register object = ToRegister(instr->object());
Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
__ AssertSmi(index);
DeferredLoadMutableDouble* deferred;
deferred = new(zone()) DeferredLoadMutableDouble(
this, instr, result, object, index);
Label out_of_object, done;
__ TestAndBranchIfAnySet(
index, reinterpret_cast<uint64_t>(Smi::FromInt(1)), deferred->entry());
__ Mov(index, Operand(index, ASR, 1));
__ Cmp(index, Smi::FromInt(0));
__ B(lt, &out_of_object);
@ -5949,7 +5902,6 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
__ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ Ldr(result, FieldMemOperand(result,
FixedArray::kHeaderSize - kPointerSize));
__ Bind(deferred->exit());
__ Bind(&done);
}

View File

@ -149,10 +149,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,
Register object,
Register index);
Operand ToOperand32(LOperand* op, IntegerSignedness signedness);

View File

@ -682,10 +682,6 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
if (field_index >= map->inobject_properties()) {
field_index = -(field_index - map->inobject_properties() + 1);
}
field_index = field_index << 1;
if (details.representation().IsDouble()) {
field_index |= 1;
}
indices->set(index, Smi::FromInt(field_index));
}
}

View File

@ -7504,7 +7504,6 @@ class HLoadFieldByIndex V8_FINAL : public HTemplateInstruction<2> {
HValue* index) {
SetOperandAt(0, object);
SetOperandAt(1, index);
SetChangesFlag(kNewSpacePromotion);
set_representation(Representation::Tagged());
}

View File

@ -6323,56 +6323,11 @@ void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
}
void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register object,
Register index) {
PushSafepointRegistersScope scope(this);
__ push(object);
__ push(index);
__ xor_(esi, esi);
__ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
RecordSafepointWithRegisters(
instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
__ StoreToSafepointRegisterSlot(object, eax);
}
void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode {
public:
DeferredLoadMutableDouble(LCodeGen* codegen,
LLoadFieldByIndex* instr,
Register object,
Register index,
const X87Stack& x87_stack)
: LDeferredCode(codegen, x87_stack),
instr_(instr),
object_(object),
index_(index) {
}
virtual void Generate() V8_OVERRIDE {
codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_);
}
virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
private:
LLoadFieldByIndex* instr_;
Register object_;
Register index_;
};
Register object = ToRegister(instr->object());
Register index = ToRegister(instr->index());
DeferredLoadMutableDouble* deferred;
deferred = new(zone()) DeferredLoadMutableDouble(
this, instr, object, index, x87_stack_);
Label out_of_object, done;
__ test(index, Immediate(Smi::FromInt(1)));
__ j(not_zero, deferred->entry());
__ sar(index, 1);
__ cmp(index, Immediate(0));
__ j(less, &out_of_object, Label::kNear);
__ mov(object, FieldOperand(object,
@ -6389,7 +6344,6 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
index,
times_half_pointer_size,
FixedArray::kHeaderSize - kPointerSize));
__ bind(deferred->exit());
__ bind(&done);
}

View File

@ -163,9 +163,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register object,
Register index);
// Parallel move support.
void DoParallelMove(LParallelMove* move);

View File

@ -2702,9 +2702,7 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* index = UseTempRegister(instr->index());
LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
LInstruction* result = DefineSameAsFirst(load);
return AssignPointerMap(result);
return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
}

View File

@ -5641,15 +5641,6 @@ Handle<JSObject> JSObject::Copy(Handle<JSObject> object) {
}
Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object,
Representation representation,
int index) {
Isolate* isolate = object->GetIsolate();
CALL_HEAP_FUNCTION(isolate,
object->FastPropertyAt(representation, index), Object);
}
template<class ContextObject>
class JSObjectWalkVisitor {
public:

View File

@ -2616,9 +2616,6 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT inline MaybeObject* FastPropertyAt(
Representation representation,
int index);
static Handle<Object> FastPropertyAt(Handle<JSObject> object,
Representation representation,
int index);
inline Object* RawFastPropertyAt(int index);
inline void FastPropertyAtPut(int index, Object* value);

View File

@ -14508,19 +14508,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyContextDisposed) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_LoadMutableDouble) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
CONVERT_ARG_HANDLE_CHECKED(Smi, index, 1);
int idx = index->value() >> 1;
if (idx < 0) {
idx = -idx + object->map()->inobject_properties() - 1;
}
return *JSObject::FastPropertyAt(object, Representation::Double(), idx);
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_TryMigrateInstance) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);

View File

@ -100,7 +100,6 @@ namespace internal {
F(DebugCallbackSupportsStepping, 1, 1) \
F(DebugPrepareStepInIfStepping, 1, 1) \
F(FlattenString, 1, 1) \
F(LoadMutableDouble, 2, 1) \
F(TryMigrateInstance, 1, 1) \
F(NotifyContextDisposed, 0, 1) \
\

View File

@ -5564,55 +5564,11 @@ void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
}
void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register object,
Register index) {
PushSafepointRegistersScope scope(this);
__ Push(object);
__ Push(index);
__ xorp(rsi, rsi);
__ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
RecordSafepointWithRegisters(
instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
__ StoreToSafepointRegisterSlot(object, rax);
}
void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode {
public:
DeferredLoadMutableDouble(LCodeGen* codegen,
LLoadFieldByIndex* instr,
Register object,
Register index)
: LDeferredCode(codegen),
instr_(instr),
object_(object),
index_(index) {
}
virtual void Generate() V8_OVERRIDE {
codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_);
}
virtual LInstruction* instr() V8_OVERRIDE { return instr_; }
private:
LLoadFieldByIndex* instr_;
Register object_;
Register index_;
};
Register object = ToRegister(instr->object());
Register index = ToRegister(instr->index());
DeferredLoadMutableDouble* deferred;
deferred = new(zone()) DeferredLoadMutableDouble(this, instr, object, index);
Label out_of_object, done;
__ Move(kScratchRegister, Smi::FromInt(1));
__ testp(index, kScratchRegister);
__ j(not_zero, deferred->entry());
__ sarp(index, Immediate(1));
__ SmiToInteger32(index, index);
__ cmpl(index, Immediate(0));
__ j(less, &out_of_object, Label::kNear);
@ -5630,7 +5586,6 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
index,
times_pointer_size,
FixedArray::kHeaderSize - kPointerSize));
__ bind(deferred->exit());
__ bind(&done);
}

View File

@ -116,9 +116,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register object,
Register index);
// Parallel move support.
void DoParallelMove(LParallelMove* move);

View File

@ -2589,9 +2589,7 @@ LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* index = UseTempRegister(instr->index());
LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
LInstruction* result = DefineSameAsFirst(load);
return AssignPointerMap(result);
return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
}

View File

@ -182,9 +182,6 @@ var knownProblems = {
// Only applicable to TypedArrays.
"_TypedArrayInitialize": true,
// Only applicable to loading mutable doubles.
"LoadMutableDouble": true,
// Only applicable to generators.
"_GeneratorNext": true,
"_GeneratorThrow": true,

View File

@ -1,22 +0,0 @@
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
var o = {a:1.5, b:{}};
function f(o) {
var result = [];
for (var k in o) {
result[result.length] = o[k];
}
return result;
}
f(o);
f(o);
%OptimizeFunctionOnNextCall(f);
var array = f(o);
o.a = 1.7;
assertEquals(1.5, array[0]);