From 6115a006fdfc285e2a8b76590b95648bfd303474 Mon Sep 17 00:00:00 2001 From: "mvstanton@chromium.org" Date: Thu, 6 Mar 2014 13:07:51 +0000 Subject: [PATCH] Bugfix for 349874: we incorrectly believe we saw a growing store When we set an out of bounds array index, the index might be so large that it causes the array to go to dictionary mode. It's better to avoid "learning" that this was a growing store in that case. This fix also partially reverts a fix for bug 347543, as this fix is comprehensive and satisfies that repro case as well (partial revert of v19591). BUG=349874 LOG=N R=verwaest@chromium.org Review URL: https://codereview.chromium.org/188643002 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19691 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/a64/lithium-codegen-a64.cc | 6 +----- src/arm/lithium-codegen-arm.cc | 6 +----- src/ia32/lithium-codegen-ia32.cc | 6 +----- src/ic.cc | 7 ++++++- src/mips/lithium-codegen-mips.cc | 6 +----- src/objects.cc | 15 +++++++++++++++ src/objects.h | 3 +++ src/x64/lithium-codegen-x64.cc | 6 +----- test/mjsunit/regress/regress-349885.js | 15 +++++++++++++++ 9 files changed, 44 insertions(+), 26 deletions(-) create mode 100644 test/mjsunit/regress/regress-349885.js diff --git a/src/a64/lithium-codegen-a64.cc b/src/a64/lithium-codegen-a64.cc index 0148349a14..7a93754857 100644 --- a/src/a64/lithium-codegen-a64.cc +++ b/src/a64/lithium-codegen-a64.cc @@ -1489,11 +1489,7 @@ void LCodeGen::DoAllocate(LAllocate* instr) { if (instr->size()->IsConstantOperand()) { int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); - if (size <= Page::kMaxRegularHeapObjectSize) { - __ Allocate(size, result, temp1, temp2, deferred->entry(), flags); - } else { - __ B(deferred->entry()); - } + __ Allocate(size, result, temp1, temp2, deferred->entry(), flags); } else { Register size = ToRegister32(instr->size()); __ Sxtw(size.X(), size); diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index dd012af579..2edf4fcefb 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -5262,11 +5262,7 @@ void LCodeGen::DoAllocate(LAllocate* instr) { if (instr->size()->IsConstantOperand()) { int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); - if (size <= Page::kMaxRegularHeapObjectSize) { - __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); - } else { - __ jmp(deferred->entry()); - } + __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); } else { Register size = ToRegister(instr->size()); __ Allocate(size, diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index 3e8224b24f..54bc3382e9 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -5789,11 +5789,7 @@ void LCodeGen::DoAllocate(LAllocate* instr) { if (instr->size()->IsConstantOperand()) { int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); - if (size <= Page::kMaxRegularHeapObjectSize) { - __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); - } else { - __ jmp(deferred->entry()); - } + __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); } else { Register size = ToRegister(instr->size()); __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); diff --git a/src/ic.cc b/src/ic.cc index 3c9f1b6692..a5a7ba2a96 100644 --- a/src/ic.cc +++ b/src/ic.cc @@ -1701,7 +1701,12 @@ MaybeObject* KeyedStoreIC::Store(Handle object, if (!(receiver->map()->DictionaryElementsInPrototypeChainOnly())) { KeyedAccessStoreMode store_mode = GetStoreMode(receiver, key, value); - stub = StoreElementStub(receiver, store_mode); + // Use the generic stub if the store would send the receiver to + // dictionary mode. + if (!IsGrowStoreMode(store_mode) || + !receiver->WouldConvertToSlowElements(key)) { + stub = StoreElementStub(receiver, store_mode); + } } } } diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc index b9607dea7d..72f3296f54 100644 --- a/src/mips/lithium-codegen-mips.cc +++ b/src/mips/lithium-codegen-mips.cc @@ -5217,11 +5217,7 @@ void LCodeGen::DoAllocate(LAllocate* instr) { } if (instr->size()->IsConstantOperand()) { int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); - if (size <= Page::kMaxRegularHeapObjectSize) { - __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); - } else { - __ jmp(deferred->entry()); - } + __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); } else { Register size = ToRegister(instr->size()); __ Allocate(size, diff --git a/src/objects.cc b/src/objects.cc index 2a4cfe30a6..d06f916d20 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -13113,6 +13113,21 @@ void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) { } +bool JSObject::WouldConvertToSlowElements(Handle key) { + uint32_t index; + if (HasFastElements() && key->ToArrayIndex(&index)) { + Handle backing_store(FixedArrayBase::cast(elements())); + uint32_t capacity = static_cast(backing_store->length()); + if (index >= capacity) { + if ((index - capacity) >= kMaxGap) return true; + uint32_t new_capacity = NewElementsCapacity(index + 1); + return ShouldConvertToSlowElements(new_capacity); + } + } + return false; +} + + bool JSObject::ShouldConvertToSlowElements(int new_capacity) { STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <= kMaxUncheckedFastElementsLength); diff --git a/src/objects.h b/src/objects.h index f90662fc6c..17a60fb4e2 100644 --- a/src/objects.h +++ b/src/objects.h @@ -2412,6 +2412,9 @@ class JSObject: public JSReceiver { uint32_t arg_count, EnsureElementsMode mode); + // Would we convert a fast elements array to dictionary mode given + // an access at key? + bool WouldConvertToSlowElements(Handle key); // Do we want to keep the elements in fast case when increasing the // capacity? bool ShouldConvertToSlowElements(int new_capacity); diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index 36ebcad53c..383cf3783f 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -5085,11 +5085,7 @@ void LCodeGen::DoAllocate(LAllocate* instr) { if (instr->size()->IsConstantOperand()) { int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); - if (size <= Page::kMaxRegularHeapObjectSize) { - __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); - } else { - __ jmp(deferred->entry()); - } + __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); } else { Register size = ToRegister(instr->size()); __ Allocate(size, result, temp, no_reg, deferred->entry(), flags); diff --git a/test/mjsunit/regress/regress-349885.js b/test/mjsunit/regress/regress-349885.js new file mode 100644 index 0000000000..dd3e795260 --- /dev/null +++ b/test/mjsunit/regress/regress-349885.js @@ -0,0 +1,15 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +// The bug 349885 + +function foo(a) { + a[292755462] = new Object(); +} +foo(new Array(5)); +foo(new Array(5)); +%OptimizeFunctionOnNextCall(foo); +foo(new Array(10));