[test] Move cctest/test-dictionary to unittests/

... objects/dictionary-unittest.

Bug: v8:12781
Change-Id: I7faeb9fedf7857b25a85bf32f14323ba3da207c0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3706968
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: 王澳 <wangao.james@bytedance.com>
Cr-Commit-Position: refs/heads/main@{#81395}
This commit is contained in:
jameslahm 2022-06-20 08:42:38 +08:00 committed by V8 LUCI CQ
parent 471e739b93
commit 9f783f229b
6 changed files with 437 additions and 325 deletions

View File

@ -179,7 +179,6 @@ v8_source_set("cctest_sources") {
"test-debug-helper.cc",
"test-debug.cc",
"test-descriptor-array.cc",
"test-dictionary.cc",
"test-disasm-regex-helper.cc",
"test-disasm-regex-helper.h",
"test-feedback-vector.cc",

View File

@ -1,324 +0,0 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "src/init/v8.h"
#include "test/cctest/cctest.h"
#include "src/builtins/builtins-constructor.h"
#include "src/debug/debug.h"
#include "src/execution/execution.h"
#include "src/handles/global-handles.h"
#include "src/heap/factory.h"
#include "src/heap/spaces.h"
#include "src/objects/hash-table-inl.h"
#include "src/objects/objects-inl.h"
#include "src/roots/roots.h"
#include "test/cctest/heap/heap-utils.h"
namespace v8 {
namespace internal {
namespace {
template<typename HashMap>
static void TestHashMap(Handle<HashMap> table) {
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Handle<JSObject> a = factory->NewJSArray(7);
Handle<JSObject> b = factory->NewJSArray(11);
table = HashMap::Put(table, a, b);
CHECK_EQ(1, table->NumberOfElements());
CHECK_EQ(table->Lookup(a), *b);
// When the key does not exist in the map, Lookup returns the hole.
ReadOnlyRoots roots(CcTest::heap());
CHECK_EQ(table->Lookup(b), roots.the_hole_value());
// Keys still have to be valid after objects were moved.
CcTest::CollectGarbage(NEW_SPACE);
CHECK_EQ(1, table->NumberOfElements());
CHECK_EQ(table->Lookup(a), *b);
CHECK_EQ(table->Lookup(b), roots.the_hole_value());
// Keys that are overwritten should not change number of elements.
table = HashMap::Put(table, a, factory->NewJSArray(13));
CHECK_EQ(1, table->NumberOfElements());
CHECK_NE(table->Lookup(a), *b);
// Keys that have been removed are mapped to the hole.
bool was_present = false;
table = HashMap::Remove(isolate, table, a, &was_present);
CHECK(was_present);
CHECK_EQ(0, table->NumberOfElements());
CHECK_EQ(table->Lookup(a), roots.the_hole_value());
// Keys should map back to their respective values and also should get
// an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
Handle<JSObject> value = factory->NewJSArray(11);
table = HashMap::Put(table, key, value);
CHECK_EQ(table->NumberOfElements(), i + 1);
CHECK(table->FindEntry(isolate, key).is_found());
CHECK_EQ(table->Lookup(key), *value);
CHECK(key->GetIdentityHash().IsSmi());
}
// Keys never added to the map which already have an identity hash
// code should not be found.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
CHECK(key->GetOrCreateIdentityHash(isolate).IsSmi());
CHECK(table->FindEntry(isolate, key).is_not_found());
CHECK_EQ(table->Lookup(key), roots.the_hole_value());
CHECK(key->GetIdentityHash().IsSmi());
}
// Keys that don't have an identity hash should not be found and also
// should not get an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
CHECK_EQ(table->Lookup(key), roots.the_hole_value());
Object identity_hash = key->GetIdentityHash();
CHECK_EQ(roots.undefined_value(), identity_hash);
}
}
TEST(HashMap) {
LocalContext context;
v8::HandleScope scope(context->GetIsolate());
Isolate* isolate = CcTest::i_isolate();
TestHashMap(ObjectHashTable::New(isolate, 23));
}
template <typename HashSet>
static void TestHashSet(Handle<HashSet> table) {
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Handle<JSObject> a = factory->NewJSArray(7);
Handle<JSObject> b = factory->NewJSArray(11);
table = HashSet::Add(isolate, table, a);
CHECK_EQ(1, table->NumberOfElements());
CHECK(table->Has(isolate, a));
CHECK(!table->Has(isolate, b));
// Keys still have to be valid after objects were moved.
CcTest::CollectGarbage(NEW_SPACE);
CHECK_EQ(1, table->NumberOfElements());
CHECK(table->Has(isolate, a));
CHECK(!table->Has(isolate, b));
// Keys that are overwritten should not change number of elements.
table = HashSet::Add(isolate, table, a);
CHECK_EQ(1, table->NumberOfElements());
CHECK(table->Has(isolate, a));
CHECK(!table->Has(isolate, b));
// Keys that have been removed are mapped to the hole.
// TODO(cbruni): not implemented yet.
// bool was_present = false;
// table = HashSet::Remove(table, a, &was_present);
// CHECK(was_present);
// CHECK_EQ(0, table->NumberOfElements());
// CHECK(!table->Has(a));
// CHECK(!table->Has(b));
// Keys should map back to their respective values and also should get
// an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
table = HashSet::Add(isolate, table, key);
CHECK_EQ(table->NumberOfElements(), i + 2);
CHECK(table->Has(isolate, key));
CHECK(key->GetIdentityHash().IsSmi());
}
// Keys never added to the map which already have an identity hash
// code should not be found.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
CHECK(key->GetOrCreateIdentityHash(isolate).IsSmi());
CHECK(!table->Has(isolate, key));
CHECK(key->GetIdentityHash().IsSmi());
}
// Keys that don't have an identity hash should not be found and also
// should not get an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
CHECK(!table->Has(isolate, key));
Object identity_hash = key->GetIdentityHash();
CHECK_EQ(ReadOnlyRoots(CcTest::heap()).undefined_value(), identity_hash);
}
}
TEST(HashSet) {
LocalContext context;
v8::HandleScope scope(context->GetIsolate());
Isolate* isolate = CcTest::i_isolate();
TestHashSet(ObjectHashSet::New(isolate, 23));
}
class ObjectHashTableTest: public ObjectHashTable {
public:
explicit ObjectHashTableTest(ObjectHashTable o) : ObjectHashTable(o) {}
void insert(InternalIndex entry, int key, int value) {
set(EntryToIndex(entry), Smi::FromInt(key));
set(EntryToIndex(entry) + 1, Smi::FromInt(value));
}
int lookup(int key) {
Handle<Object> key_obj(Smi::FromInt(key), CcTest::i_isolate());
return Smi::ToInt(Lookup(key_obj));
}
int capacity() {
return Capacity();
}
};
TEST(HashTableRehash) {
LocalContext context;
Isolate* isolate = CcTest::i_isolate();
v8::HandleScope scope(context->GetIsolate());
// Test almost filled table.
{
Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 100);
ObjectHashTableTest t(*table);
int capacity = t.capacity();
for (int i = 0; i < capacity - 1; i++) {
t.insert(InternalIndex(i), i * i, i);
}
t.Rehash(isolate);
for (int i = 0; i < capacity - 1; i++) {
CHECK_EQ(i, t.lookup(i * i));
}
}
// Test half-filled table.
{
Handle<ObjectHashTable> table = ObjectHashTable::New(isolate, 100);
ObjectHashTableTest t(*table);
int capacity = t.capacity();
for (int i = 0; i < capacity / 2; i++) {
t.insert(InternalIndex(i), i * i, i);
}
t.Rehash(isolate);
for (int i = 0; i < capacity / 2; i++) {
CHECK_EQ(i, t.lookup(i * i));
}
}
}
#ifdef DEBUG
template<class HashSet>
static void TestHashSetCausesGC(Handle<HashSet> table) {
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Handle<JSObject> key = factory->NewJSArray(0);
// Simulate a full heap so that generating an identity hash code
// in subsequent calls will request GC.
heap::SimulateFullSpace(CcTest::heap()->new_space());
heap::SimulateFullSpace(CcTest::heap()->old_space());
// Calling Contains() should not cause GC ever.
int gc_count = isolate->heap()->gc_count();
CHECK(!table->Contains(key));
CHECK(gc_count == isolate->heap()->gc_count());
// Calling Remove() will not cause GC in this case.
bool was_present = false;
table = HashSet::Remove(table, key, &was_present);
CHECK(!was_present);
CHECK(gc_count == isolate->heap()->gc_count());
// Calling Add() should cause GC.
table = HashSet::Add(table, key);
CHECK(gc_count < isolate->heap()->gc_count());
}
#endif
#ifdef DEBUG
template <class HashMap>
static void TestHashMapDoesNotCauseGC(Handle<HashMap> table) {
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Handle<JSObject> key = factory->NewJSArray(0);
// Even though we simulate a full heap, generating an identity hash
// code in subsequent calls will not request GC.
if (!FLAG_single_generation) {
heap::SimulateFullSpace(CcTest::heap()->new_space());
}
heap::SimulateFullSpace(CcTest::heap()->old_space());
// Calling Lookup() should not cause GC ever.
CHECK(table->Lookup(key).IsTheHole(isolate));
// Calling Put() should request GC by returning a failure.
int gc_count = isolate->heap()->gc_count();
HashMap::Put(table, key, key);
CHECK(gc_count == isolate->heap()->gc_count());
}
TEST(ObjectHashTableCausesGC) {
i::FLAG_stress_compaction = false;
// For SimulateFullSpace in TestHashMapDoesNotCauseGC.
i::FLAG_stress_concurrent_allocation = false;
LocalContext context;
v8::HandleScope scope(context->GetIsolate());
Isolate* isolate = CcTest::i_isolate();
TestHashMapDoesNotCauseGC(ObjectHashTable::New(isolate, 1));
}
#endif
TEST(MaximumClonedShallowObjectProperties) {
// Assert that a NameDictionary with kMaximumClonedShallowObjectProperties is
// not in large-object space.
const int max_capacity = NameDictionary::ComputeCapacity(
ConstructorBuiltins::kMaximumClonedShallowObjectProperties);
const InternalIndex max_literal_entry(max_capacity /
NameDictionary::kEntrySize);
const int max_literal_index = NameDictionary::EntryToIndex(max_literal_entry);
CHECK_LE(NameDictionary::OffsetOfElementAt(max_literal_index),
kMaxRegularHeapObjectSize);
}
} // namespace
} // namespace internal
} // namespace v8

View File

@ -426,6 +426,7 @@ v8_source_set("unittests_sources") {
"objects/concurrent-script-context-table-unittest.cc",
"objects/concurrent-string-unittest.cc",
"objects/concurrent-transition-array-unittest.cc",
"objects/dictionary-unittest.cc",
"objects/elements-kind-unittest.cc",
"objects/global-object-unittest.cc",
"objects/hashcode-unittest.cc",

View File

@ -38,5 +38,120 @@ void HeapInternalsBase::SimulateIncrementalMarking(Heap* heap,
CHECK(marking->IsComplete());
}
void HeapInternalsBase::SimulateFullSpace(
v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles) {
// If you see this check failing, disable the flag at the start of your test:
// FLAG_stress_concurrent_allocation = false;
// Background thread allocating concurrently interferes with this function.
CHECK(!FLAG_stress_concurrent_allocation);
while (FillCurrentPage(space, out_handles) || space->AddFreshPage()) {
}
}
void HeapInternalsBase::SimulateFullSpace(v8::internal::PagedSpace* space) {
// If you see this check failing, disable the flag at the start of your test:
// FLAG_stress_concurrent_allocation = false;
// Background thread allocating concurrently interferes with this function.
CHECK(!FLAG_stress_concurrent_allocation);
CodePageCollectionMemoryModificationScopeForTesting code_scope(space->heap());
i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted(
MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only);
}
space->FreeLinearAllocationArea();
space->ResetFreeList();
}
bool HeapInternalsBase::FillCurrentPage(
v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles) {
return FillCurrentPageButNBytes(space, 0, out_handles);
}
namespace {
int GetSpaceRemainingOnCurrentPage(v8::internal::NewSpace* space) {
Address top = space->top();
if ((top & kPageAlignmentMask) == 0) {
// `top` points to the start of a page signifies that there is not room in
// the current page.
return 0;
}
return static_cast<int>(Page::FromAddress(space->top())->area_end() - top);
}
} // namespace
bool HeapInternalsBase::FillCurrentPageButNBytes(
v8::internal::NewSpace* space, int extra_bytes,
std::vector<Handle<FixedArray>>* out_handles) {
PauseAllocationObserversScope pause_observers(space->heap());
// We cannot rely on `space->limit()` to point to the end of the current page
// in the case where inline allocations are disabled, it actually points to
// the current allocation pointer.
DCHECK_IMPLIES(!space->IsInlineAllocationEnabled(),
space->limit() == space->top());
int space_remaining = GetSpaceRemainingOnCurrentPage(space);
CHECK(space_remaining >= extra_bytes);
int new_linear_size = space_remaining - extra_bytes;
if (new_linear_size == 0) return false;
std::vector<Handle<FixedArray>> handles =
CreatePadding(space->heap(), space_remaining, i::AllocationType::kYoung);
if (out_handles != nullptr) {
out_handles->insert(out_handles->end(), handles.begin(), handles.end());
}
return true;
}
int HeapInternalsBase::FixedArrayLenFromSize(int size) {
return std::min({(size - FixedArray::kHeaderSize) / kTaggedSize,
FixedArray::kMaxRegularLength});
}
std::vector<Handle<FixedArray>> HeapInternalsBase::CreatePadding(
Heap* heap, int padding_size, AllocationType allocation, int object_size) {
std::vector<Handle<FixedArray>> handles;
Isolate* isolate = heap->isolate();
int allocate_memory;
int length;
int free_memory = padding_size;
if (allocation == i::AllocationType::kOld) {
heap->old_space()->FreeLinearAllocationArea();
int overall_free_memory = static_cast<int>(heap->old_space()->Available());
CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
} else {
int overall_free_memory = static_cast<int>(heap->new_space()->Available());
CHECK(padding_size <= overall_free_memory || overall_free_memory == 0);
}
while (free_memory > 0) {
if (free_memory > object_size) {
allocate_memory = object_size;
length = FixedArrayLenFromSize(allocate_memory);
} else {
allocate_memory = free_memory;
length = FixedArrayLenFromSize(allocate_memory);
if (length <= 0) {
// Not enough room to create another FixedArray, so create a filler.
if (allocation == i::AllocationType::kOld) {
heap->CreateFillerObjectAt(
*heap->old_space()->allocation_top_address(), free_memory);
} else {
heap->CreateFillerObjectAt(
*heap->new_space()->allocation_top_address(), free_memory);
}
break;
}
}
handles.push_back(isolate->factory()->NewFixedArray(length, allocation));
CHECK((allocation == AllocationType::kYoung &&
heap->new_space()->Contains(*handles.back())) ||
(allocation == AllocationType::kOld &&
heap->InOldSpace(*handles.back())) ||
FLAG_single_generation);
free_memory -= handles.back()->Size();
}
return handles;
}
} // namespace internal
} // namespace v8

View File

@ -17,6 +17,19 @@ namespace internal {
class HeapInternalsBase {
protected:
void SimulateIncrementalMarking(Heap* heap, bool force_completion);
void SimulateFullSpace(
v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr);
void SimulateFullSpace(v8::internal::PagedSpace* space);
bool FillCurrentPageButNBytes(
v8::internal::NewSpace* space, int extra_bytes,
std::vector<Handle<FixedArray>>* out_handles = nullptr);
bool FillCurrentPage(v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr);
std::vector<Handle<FixedArray>> CreatePadding(
Heap* heap, int padding_size, AllocationType allocation,
int object_size = kMaxRegularHeapObjectSize);
int FixedArrayLenFromSize(int size);
};
template <typename TMixin>
@ -44,6 +57,15 @@ class WithHeapInternals : public TMixin, HeapInternalsBase {
return HeapInternalsBase::SimulateIncrementalMarking(heap(),
force_completion);
}
void SimulateFullSpace(
v8::internal::NewSpace* space,
std::vector<Handle<FixedArray>>* out_handles = nullptr) {
return HeapInternalsBase::SimulateFullSpace(space, out_handles);
}
void SimulateFullSpace(v8::internal::PagedSpace* space) {
return HeapInternalsBase::SimulateFullSpace(space);
}
};
using TestWithHeapInternals = //

View File

@ -0,0 +1,299 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "src/builtins/builtins-constructor.h"
#include "src/debug/debug.h"
#include "src/execution/execution.h"
#include "src/handles/global-handles.h"
#include "src/heap/factory.h"
#include "src/heap/spaces.h"
#include "src/init/v8.h"
#include "src/objects/hash-table-inl.h"
#include "src/objects/objects-inl.h"
#include "src/roots/roots.h"
#include "test/unittests/heap/heap-utils.h"
namespace v8 {
namespace internal {
class DictionaryTest : public TestWithHeapInternalsAndContext {
public:
template <typename HashMap>
void TestHashMap(Handle<HashMap> table) {
Factory* factory = isolate()->factory();
Handle<JSObject> a = factory->NewJSArray(7);
Handle<JSObject> b = factory->NewJSArray(11);
table = HashMap::Put(table, a, b);
CHECK_EQ(1, table->NumberOfElements());
CHECK_EQ(table->Lookup(a), *b);
// When the key does not exist in the map, Lookup returns the hole.
ReadOnlyRoots roots(heap());
CHECK_EQ(table->Lookup(b), roots.the_hole_value());
// Keys still have to be valid after objects were moved.
CollectGarbage(NEW_SPACE);
CHECK_EQ(1, table->NumberOfElements());
CHECK_EQ(table->Lookup(a), *b);
CHECK_EQ(table->Lookup(b), roots.the_hole_value());
// Keys that are overwritten should not change number of elements.
table = HashMap::Put(table, a, factory->NewJSArray(13));
CHECK_EQ(1, table->NumberOfElements());
CHECK_NE(table->Lookup(a), *b);
// Keys that have been removed are mapped to the hole.
bool was_present = false;
table = HashMap::Remove(isolate(), table, a, &was_present);
CHECK(was_present);
CHECK_EQ(0, table->NumberOfElements());
CHECK_EQ(table->Lookup(a), roots.the_hole_value());
// Keys should map back to their respective values and also should get
// an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
Handle<JSObject> value = factory->NewJSArray(11);
table = HashMap::Put(table, key, value);
CHECK_EQ(table->NumberOfElements(), i + 1);
CHECK(table->FindEntry(isolate(), key).is_found());
CHECK_EQ(table->Lookup(key), *value);
CHECK(key->GetIdentityHash().IsSmi());
}
// Keys never added to the map which already have an identity hash
// code should not be found.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
CHECK(key->GetOrCreateIdentityHash(isolate()).IsSmi());
CHECK(table->FindEntry(isolate(), key).is_not_found());
CHECK_EQ(table->Lookup(key), roots.the_hole_value());
CHECK(key->GetIdentityHash().IsSmi());
}
// Keys that don't have an identity hash should not be found and also
// should not get an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
CHECK_EQ(table->Lookup(key), roots.the_hole_value());
Object identity_hash = key->GetIdentityHash();
CHECK_EQ(roots.undefined_value(), identity_hash);
}
}
template <typename HashSet>
void TestHashSet(Handle<HashSet> table) {
Factory* factory = isolate()->factory();
Handle<JSObject> a = factory->NewJSArray(7);
Handle<JSObject> b = factory->NewJSArray(11);
table = HashSet::Add(isolate(), table, a);
CHECK_EQ(1, table->NumberOfElements());
CHECK(table->Has(isolate(), a));
CHECK(!table->Has(isolate(), b));
// Keys still have to be valid after objects were moved.
CollectGarbage(NEW_SPACE);
CHECK_EQ(1, table->NumberOfElements());
CHECK(table->Has(isolate(), a));
CHECK(!table->Has(isolate(), b));
// Keys that are overwritten should not change number of elements.
table = HashSet::Add(isolate(), table, a);
CHECK_EQ(1, table->NumberOfElements());
CHECK(table->Has(isolate(), a));
CHECK(!table->Has(isolate(), b));
// Keys that have been removed are mapped to the hole.
// TODO(cbruni): not implemented yet.
// bool was_present = false;
// table = HashSet::Remove(table, a, &was_present);
// CHECK(was_present);
// CHECK_EQ(0, table->NumberOfElements());
// CHECK(!table->Has(a));
// CHECK(!table->Has(b));
// Keys should map back to their respective values and also should get
// an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
table = HashSet::Add(isolate(), table, key);
CHECK_EQ(table->NumberOfElements(), i + 2);
CHECK(table->Has(isolate(), key));
CHECK(key->GetIdentityHash().IsSmi());
}
// Keys never added to the map which already have an identity hash
// code should not be found.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
CHECK(key->GetOrCreateIdentityHash(isolate()).IsSmi());
CHECK(!table->Has(isolate(), key));
CHECK(key->GetIdentityHash().IsSmi());
}
// Keys that don't have an identity hash should not be found and also
// should not get an identity hash code generated.
for (int i = 0; i < 100; i++) {
Handle<JSReceiver> key = factory->NewJSArray(7);
CHECK(!table->Has(isolate(), key));
Object identity_hash = key->GetIdentityHash();
CHECK_EQ(ReadOnlyRoots(heap()).undefined_value(), identity_hash);
}
}
#ifdef DEBUG
template <class HashSet>
void TestHashSetCausesGC(Handle<HashSet> table) {
Factory* factory = isolate()->factory();
Handle<JSObject> key = factory->NewJSArray(0);
// Simulate a full heap so that generating an identity hash code
// in subsequent calls will request GC.
SimulateFullSpace(heap()->new_space());
SimulateFullSpace(heap()->old_space());
// Calling Contains() should not cause GC ever.
int gc_count = heap()->gc_count();
CHECK(!table->Contains(key));
CHECK(gc_count == heap()->gc_count());
// Calling Remove() will not cause GC in this case.
bool was_present = false;
table = HashSet::Remove(table, key, &was_present);
CHECK(!was_present);
CHECK(gc_count == heap()->gc_count());
// Calling Add() should cause GC.
table = HashSet::Add(table, key);
CHECK(gc_count < heap()->gc_count());
}
#endif
#ifdef DEBUG
template <class HashMap>
void TestHashMapDoesNotCauseGC(Handle<HashMap> table) {
Factory* factory = isolate()->factory();
Handle<JSObject> key = factory->NewJSArray(0);
// Even though we simulate a full heap, generating an identity hash
// code in subsequent calls will not request GC.
if (!FLAG_single_generation) {
SimulateFullSpace(heap()->new_space());
}
SimulateFullSpace(heap()->old_space());
// Calling Lookup() should not cause GC ever.
CHECK(table->Lookup(key).IsTheHole(isolate()));
// Calling Put() should request GC by returning a failure.
int gc_count = heap()->gc_count();
HashMap::Put(table, key, key);
CHECK(gc_count == heap()->gc_count());
}
#endif
};
TEST_F(DictionaryTest, HashMap) {
TestHashMap(ObjectHashTable::New(isolate(), 23));
}
TEST_F(DictionaryTest, HashSet) {
TestHashSet(ObjectHashSet::New(isolate(), 23));
}
class ObjectHashTableTest : public ObjectHashTable {
public:
explicit ObjectHashTableTest(ObjectHashTable o) : ObjectHashTable(o) {}
void insert(InternalIndex entry, int key, int value) {
set(EntryToIndex(entry), Smi::FromInt(key));
set(EntryToIndex(entry) + 1, Smi::FromInt(value));
}
int lookup(int key, Isolate* isolate) {
Handle<Object> key_obj(Smi::FromInt(key), isolate);
return Smi::ToInt(Lookup(key_obj));
}
int capacity() { return Capacity(); }
};
TEST_F(DictionaryTest, HashTableRehash) {
// Test almost filled table.
{
Handle<ObjectHashTable> table = ObjectHashTable::New(isolate(), 100);
ObjectHashTableTest t(*table);
int capacity = t.capacity();
for (int i = 0; i < capacity - 1; i++) {
t.insert(InternalIndex(i), i * i, i);
}
t.Rehash(isolate());
for (int i = 0; i < capacity - 1; i++) {
CHECK_EQ(i, t.lookup(i * i, isolate()));
}
}
// Test half-filled table.
{
Handle<ObjectHashTable> table = ObjectHashTable::New(isolate(), 100);
ObjectHashTableTest t(*table);
int capacity = t.capacity();
for (int i = 0; i < capacity / 2; i++) {
t.insert(InternalIndex(i), i * i, i);
}
t.Rehash(isolate());
for (int i = 0; i < capacity / 2; i++) {
CHECK_EQ(i, t.lookup(i * i, isolate()));
}
}
}
#ifdef DEBUG
TEST_F(DictionaryTest, ObjectHashTableCausesGC) {
i::FLAG_stress_compaction = false;
// For SimulateFullSpace in TestHashMapDoesNotCauseGC.
i::FLAG_stress_concurrent_allocation = false;
TestHashMapDoesNotCauseGC(ObjectHashTable::New(isolate(), 1));
}
#endif
TEST_F(DictionaryTest, MaximumClonedShallowObjectProperties) {
// Assert that a NameDictionary with kMaximumClonedShallowObjectProperties is
// not in large-object space.
const int max_capacity = NameDictionary::ComputeCapacity(
ConstructorBuiltins::kMaximumClonedShallowObjectProperties);
const InternalIndex max_literal_entry(max_capacity /
NameDictionary::kEntrySize);
const int max_literal_index = NameDictionary::EntryToIndex(max_literal_entry);
CHECK_LE(NameDictionary::OffsetOfElementAt(max_literal_index),
kMaxRegularHeapObjectSize);
}
} // namespace internal
} // namespace v8