2014-11-11 10:24:52 +00:00
|
|
|
// Copyright 2014 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <utility>
|
|
|
|
|
|
|
|
#include "src/v8.h"
|
|
|
|
|
2017-02-10 08:05:25 +00:00
|
|
|
#include "src/accessors.h"
|
2017-02-23 11:46:29 +00:00
|
|
|
#include "src/api.h"
|
2014-11-11 10:24:52 +00:00
|
|
|
#include "src/compilation-cache.h"
|
|
|
|
#include "src/execution.h"
|
|
|
|
#include "src/factory.h"
|
2016-02-16 13:28:12 +00:00
|
|
|
#include "src/field-type.h"
|
2014-11-11 10:24:52 +00:00
|
|
|
#include "src/global-handles.h"
|
2017-02-23 11:46:29 +00:00
|
|
|
#include "src/heap/incremental-marking.h"
|
|
|
|
#include "src/heap/spaces.h"
|
2014-11-11 10:24:52 +00:00
|
|
|
#include "src/ic/ic.h"
|
2017-02-23 11:46:29 +00:00
|
|
|
#include "src/layout-descriptor.h"
|
2014-11-11 10:24:52 +00:00
|
|
|
#include "src/macro-assembler.h"
|
2017-02-23 11:46:29 +00:00
|
|
|
#include "src/objects-inl.h"
|
|
|
|
#include "src/property.h"
|
2014-11-11 10:24:52 +00:00
|
|
|
#include "test/cctest/cctest.h"
|
2016-05-20 13:30:22 +00:00
|
|
|
#include "test/cctest/heap/heap-utils.h"
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
using namespace v8::base;
|
|
|
|
using namespace v8::internal;
|
|
|
|
|
2015-03-27 21:55:27 +00:00
|
|
|
#if V8_DOUBLE_FIELDS_UNBOXING
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
|
2015-01-30 12:55:15 +00:00
|
|
|
//
|
|
|
|
// Helper functions.
|
|
|
|
//
|
|
|
|
|
2015-02-13 10:29:11 +00:00
|
|
|
|
|
|
|
static void InitializeVerifiedMapDescriptors(
|
|
|
|
Map* map, DescriptorArray* descriptors,
|
|
|
|
LayoutDescriptor* layout_descriptor) {
|
|
|
|
map->InitializeDescriptors(descriptors, layout_descriptor);
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(layout_descriptor->IsConsistentWithMap(map, true));
|
2015-02-13 10:29:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-30 12:55:15 +00:00
|
|
|
static Handle<String> MakeString(const char* str) {
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
return factory->InternalizeUtf8String(str);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static Handle<String> MakeName(const char* str, int suffix) {
|
|
|
|
EmbeddedVector<char, 128> buffer;
|
|
|
|
SNPrintF(buffer, "%s%d", str, suffix);
|
|
|
|
return MakeString(buffer.start());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-28 18:04:03 +00:00
|
|
|
Handle<JSObject> GetObject(const char* name) {
|
2015-11-16 16:48:43 +00:00
|
|
|
return Handle<JSObject>::cast(
|
|
|
|
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
|
|
|
|
CcTest::global()
|
|
|
|
->Get(v8::Isolate::GetCurrent()->GetCurrentContext(),
|
|
|
|
v8_str(name))
|
|
|
|
.ToLocalChecked())));
|
2015-03-28 18:04:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) {
|
|
|
|
if (obj->IsUnboxedDoubleField(field_index)) {
|
|
|
|
return obj->RawFastDoublePropertyAt(field_index);
|
|
|
|
} else {
|
|
|
|
Object* value = obj->RawFastPropertyAt(field_index);
|
2015-12-07 05:36:41 +00:00
|
|
|
CHECK(value->IsMutableHeapNumber());
|
2014-11-11 10:24:52 +00:00
|
|
|
return HeapNumber::cast(value)->value();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-20 17:56:28 +00:00
|
|
|
void WriteToField(JSObject* object, int descriptor, Object* value) {
|
|
|
|
DescriptorArray* descriptors = object->map()->instance_descriptors();
|
|
|
|
PropertyDetails details = descriptors->GetDetails(descriptor);
|
|
|
|
object->WriteToField(descriptor, details, value);
|
|
|
|
}
|
|
|
|
|
2014-12-15 12:13:08 +00:00
|
|
|
const int kNumberOfBits = 32;
|
|
|
|
|
2014-12-12 13:05:56 +00:00
|
|
|
enum TestPropertyKind {
|
2017-02-10 08:05:25 +00:00
|
|
|
PROP_ACCESSOR_INFO,
|
2014-11-11 10:24:52 +00:00
|
|
|
PROP_SMI,
|
|
|
|
PROP_DOUBLE,
|
|
|
|
PROP_TAGGED,
|
2014-11-24 14:54:26 +00:00
|
|
|
PROP_KIND_NUMBER
|
2014-11-11 10:24:52 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
static Representation representations[PROP_KIND_NUMBER] = {
|
|
|
|
Representation::None(), Representation::Smi(), Representation::Double(),
|
|
|
|
Representation::Tagged()};
|
|
|
|
|
|
|
|
|
|
|
|
static Handle<DescriptorArray> CreateDescriptorArray(Isolate* isolate,
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind* props,
|
2014-11-11 10:24:52 +00:00
|
|
|
int kPropsCount) {
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
DescriptorArray::Allocate(isolate, 0, kPropsCount);
|
|
|
|
|
|
|
|
int next_field_offset = 0;
|
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
EmbeddedVector<char, 64> buffer;
|
|
|
|
SNPrintF(buffer, "prop%d", i);
|
|
|
|
Handle<String> name = factory->InternalizeUtf8String(buffer.start());
|
|
|
|
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind kind = props[i];
|
2014-11-11 10:24:52 +00:00
|
|
|
|
2017-02-10 08:05:25 +00:00
|
|
|
Descriptor d;
|
|
|
|
if (kind == PROP_ACCESSOR_INFO) {
|
|
|
|
Handle<AccessorInfo> info =
|
|
|
|
Accessors::MakeAccessor(isolate, name, nullptr, nullptr, NONE);
|
|
|
|
d = Descriptor::AccessorConstant(name, info, NONE);
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
} else {
|
2017-02-10 08:05:25 +00:00
|
|
|
d = Descriptor::DataField(name, next_field_offset, NONE,
|
|
|
|
representations[kind]);
|
|
|
|
}
|
|
|
|
descriptors->Append(&d);
|
|
|
|
PropertyDetails details = d.GetDetails();
|
|
|
|
if (details.location() == kField) {
|
|
|
|
next_field_offset += details.field_width_in_words();
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return descriptors;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorBasicFast) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
LayoutDescriptor* layout_desc = LayoutDescriptor::FastPointerLayout();
|
|
|
|
|
|
|
|
CHECK(!layout_desc->IsSlowLayout());
|
|
|
|
CHECK(layout_desc->IsFastPointerLayout());
|
|
|
|
CHECK_EQ(kSmiValueSize, layout_desc->capacity());
|
|
|
|
|
|
|
|
for (int i = 0; i < kSmiValueSize + 13; i++) {
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_desc->IsTagged(i));
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_desc->IsTagged(-1));
|
|
|
|
CHECK(layout_desc->IsTagged(-12347));
|
|
|
|
CHECK(layout_desc->IsTagged(15635));
|
2014-11-11 10:24:52 +00:00
|
|
|
CHECK(layout_desc->IsFastPointerLayout());
|
|
|
|
|
|
|
|
for (int i = 0; i < kSmiValueSize; i++) {
|
|
|
|
layout_desc = layout_desc->SetTaggedForTesting(i, false);
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(!layout_desc->IsTagged(i));
|
2014-11-11 10:24:52 +00:00
|
|
|
layout_desc = layout_desc->SetTaggedForTesting(i, true);
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_desc->IsTagged(i));
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
CHECK(layout_desc->IsFastPointerLayout());
|
2014-12-15 12:13:08 +00:00
|
|
|
|
|
|
|
int sequence_length;
|
|
|
|
CHECK_EQ(true, layout_desc->IsTagged(0, std::numeric_limits<int>::max(),
|
|
|
|
&sequence_length));
|
|
|
|
CHECK_EQ(std::numeric_limits<int>::max(), sequence_length);
|
|
|
|
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_desc->IsTagged(0, 7, &sequence_length));
|
2014-12-15 12:13:08 +00:00
|
|
|
CHECK_EQ(7, sequence_length);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorBasicSlow) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind props[kPropsCount];
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
// All properties tagged.
|
|
|
|
props[i] = PROP_TAGGED;
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
Handle<Map> map = Map::Create(isolate, kPropsCount);
|
|
|
|
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
|
|
|
CHECK_EQ(kSmiValueSize, layout_descriptor->capacity());
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
props[0] = PROP_DOUBLE;
|
|
|
|
props[kPropsCount - 1] = PROP_DOUBLE;
|
|
|
|
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
{
|
|
|
|
int inobject_properties = kPropsCount - 1;
|
|
|
|
Handle<Map> map = Map::Create(isolate, inobject_properties);
|
|
|
|
|
|
|
|
// Should be fast as the only double property is the first one.
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
CHECK(!layout_descriptor->IsFastPointerLayout());
|
|
|
|
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(!layout_descriptor->IsTagged(0));
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int i = 1; i < kPropsCount; i++) {
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_descriptor->IsTagged(i));
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int inobject_properties = kPropsCount;
|
|
|
|
Handle<Map> map = Map::Create(isolate, inobject_properties);
|
|
|
|
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
CHECK(!layout_descriptor->IsFastPointerLayout());
|
|
|
|
CHECK(layout_descriptor->capacity() > kSmiValueSize);
|
|
|
|
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(!layout_descriptor->IsTagged(0));
|
|
|
|
CHECK(!layout_descriptor->IsTagged(kPropsCount - 1));
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int i = 1; i < kPropsCount - 1; i++) {
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_descriptor->IsTagged(i));
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
// Here we have truly slow layout descriptor, so play with the bits.
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_descriptor->IsTagged(-1));
|
|
|
|
CHECK(layout_descriptor->IsTagged(-12347));
|
|
|
|
CHECK(layout_descriptor->IsTagged(15635));
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
LayoutDescriptor* layout_desc = *layout_descriptor;
|
|
|
|
// Play with the bits but leave it in consistent state with map at the end.
|
|
|
|
for (int i = 1; i < kPropsCount - 1; i++) {
|
|
|
|
layout_desc = layout_desc->SetTaggedForTesting(i, false);
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(!layout_desc->IsTagged(i));
|
2014-11-11 10:24:52 +00:00
|
|
|
layout_desc = layout_desc->SetTaggedForTesting(i, true);
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_desc->IsTagged(i));
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
CHECK(layout_desc->IsSlowLayout());
|
|
|
|
CHECK(!layout_desc->IsFastPointerLayout());
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(layout_descriptor->IsConsistentWithMap(*map, true));
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-15 12:13:08 +00:00
|
|
|
static void TestLayoutDescriptorQueries(int layout_descriptor_length,
|
|
|
|
int* bit_flip_positions,
|
|
|
|
int max_sequence_length) {
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor = LayoutDescriptor::NewForTesting(
|
|
|
|
CcTest::i_isolate(), layout_descriptor_length);
|
|
|
|
layout_descriptor_length = layout_descriptor->capacity();
|
|
|
|
LayoutDescriptor* layout_desc = *layout_descriptor;
|
|
|
|
|
|
|
|
{
|
|
|
|
// Fill in the layout descriptor.
|
|
|
|
int cur_bit_flip_index = 0;
|
|
|
|
bool tagged = true;
|
|
|
|
for (int i = 0; i < layout_descriptor_length; i++) {
|
|
|
|
if (i == bit_flip_positions[cur_bit_flip_index]) {
|
|
|
|
tagged = !tagged;
|
|
|
|
++cur_bit_flip_index;
|
|
|
|
CHECK(i < bit_flip_positions[cur_bit_flip_index]); // check test data
|
|
|
|
}
|
|
|
|
layout_desc = layout_desc->SetTaggedForTesting(i, tagged);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (layout_desc->IsFastPointerLayout()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
// Check queries.
|
|
|
|
int cur_bit_flip_index = 0;
|
|
|
|
bool tagged = true;
|
|
|
|
for (int i = 0; i < layout_descriptor_length; i++) {
|
|
|
|
if (i == bit_flip_positions[cur_bit_flip_index]) {
|
|
|
|
tagged = !tagged;
|
|
|
|
++cur_bit_flip_index;
|
|
|
|
}
|
|
|
|
CHECK_EQ(tagged, layout_desc->IsTagged(i));
|
|
|
|
|
|
|
|
int next_bit_flip_position = bit_flip_positions[cur_bit_flip_index];
|
|
|
|
int expected_sequence_length;
|
|
|
|
if (next_bit_flip_position < layout_desc->capacity()) {
|
|
|
|
expected_sequence_length = next_bit_flip_position - i;
|
|
|
|
} else {
|
|
|
|
expected_sequence_length = tagged ? std::numeric_limits<int>::max()
|
|
|
|
: (layout_desc->capacity() - i);
|
|
|
|
}
|
|
|
|
expected_sequence_length =
|
|
|
|
Min(expected_sequence_length, max_sequence_length);
|
|
|
|
int sequence_length;
|
|
|
|
CHECK_EQ(tagged,
|
|
|
|
layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
|
|
|
|
CHECK(sequence_length > 0);
|
|
|
|
|
|
|
|
CHECK_EQ(expected_sequence_length, sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
int sequence_length;
|
|
|
|
CHECK_EQ(true,
|
|
|
|
layout_desc->IsTagged(layout_descriptor_length,
|
|
|
|
max_sequence_length, &sequence_length));
|
|
|
|
CHECK_EQ(max_sequence_length, sequence_length);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static void TestLayoutDescriptorQueriesFast(int max_sequence_length) {
|
|
|
|
{
|
|
|
|
LayoutDescriptor* layout_desc = LayoutDescriptor::FastPointerLayout();
|
|
|
|
int sequence_length;
|
|
|
|
for (int i = 0; i < kNumberOfBits; i++) {
|
|
|
|
CHECK_EQ(true,
|
|
|
|
layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
|
|
|
|
CHECK(sequence_length > 0);
|
|
|
|
CHECK_EQ(max_sequence_length, sequence_length);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[] = {1000};
|
|
|
|
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[] = {0, 1000};
|
|
|
|
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[kNumberOfBits + 1];
|
|
|
|
for (int i = 0; i <= kNumberOfBits; i++) {
|
|
|
|
bit_flip_positions[i] = i;
|
|
|
|
}
|
|
|
|
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[] = {3, 7, 8, 10, 15, 21, 30, 1000};
|
|
|
|
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[] = {0, 1, 2, 3, 5, 7, 9,
|
|
|
|
12, 15, 18, 22, 26, 29, 1000};
|
|
|
|
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorQueriesFastLimited7) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
TestLayoutDescriptorQueriesFast(7);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorQueriesFastLimited13) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
TestLayoutDescriptorQueriesFast(13);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorQueriesFastUnlimited) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
TestLayoutDescriptorQueriesFast(std::numeric_limits<int>::max());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static void TestLayoutDescriptorQueriesSlow(int max_sequence_length) {
|
|
|
|
{
|
|
|
|
int bit_flip_positions[] = {10000};
|
|
|
|
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[] = {0, 10000};
|
|
|
|
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[kMaxNumberOfDescriptors + 1];
|
|
|
|
for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
|
|
|
|
bit_flip_positions[i] = i;
|
|
|
|
}
|
|
|
|
bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
|
|
|
|
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[] = {3, 7, 8, 10, 15, 21, 30,
|
|
|
|
37, 54, 80, 99, 383, 10000};
|
|
|
|
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[] = {0, 10, 20, 30, 50, 70, 90,
|
|
|
|
120, 150, 180, 220, 260, 290, 10000};
|
|
|
|
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[kMaxNumberOfDescriptors + 1];
|
|
|
|
int cur = 0;
|
|
|
|
for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
|
|
|
|
bit_flip_positions[i] = cur;
|
|
|
|
cur = (cur + 1) * 2;
|
|
|
|
}
|
|
|
|
CHECK(cur < 10000);
|
|
|
|
bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
|
|
|
|
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int bit_flip_positions[kMaxNumberOfDescriptors + 1];
|
|
|
|
int cur = 3;
|
|
|
|
for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
|
|
|
|
bit_flip_positions[i] = cur;
|
|
|
|
cur = (cur + 1) * 2;
|
|
|
|
}
|
|
|
|
CHECK(cur < 10000);
|
|
|
|
bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
|
|
|
|
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
|
|
|
max_sequence_length);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorQueriesSlowLimited7) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
TestLayoutDescriptorQueriesSlow(7);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorQueriesSlowLimited13) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
TestLayoutDescriptorQueriesSlow(13);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorQueriesSlowLimited42) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
TestLayoutDescriptorQueriesSlow(42);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorQueriesSlowUnlimited) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
TestLayoutDescriptorQueriesSlow(std::numeric_limits<int>::max());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
TEST(LayoutDescriptorCreateNewFast) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind props[] = {
|
2017-02-10 08:05:25 +00:00
|
|
|
PROP_ACCESSOR_INFO,
|
2014-11-11 10:24:52 +00:00
|
|
|
PROP_TAGGED, // field #0
|
2017-02-10 08:05:25 +00:00
|
|
|
PROP_ACCESSOR_INFO,
|
2014-11-11 10:24:52 +00:00
|
|
|
PROP_DOUBLE, // field #1
|
2017-02-10 08:05:25 +00:00
|
|
|
PROP_ACCESSOR_INFO,
|
2014-11-11 10:24:52 +00:00
|
|
|
PROP_TAGGED, // field #2
|
2017-02-10 08:05:25 +00:00
|
|
|
PROP_ACCESSOR_INFO,
|
2014-11-11 10:24:52 +00:00
|
|
|
};
|
|
|
|
const int kPropsCount = arraysize(props);
|
|
|
|
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
{
|
|
|
|
Handle<Map> map = Map::Create(isolate, 0);
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
Handle<Map> map = Map::Create(isolate, 1);
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
Handle<Map> map = Map::Create(isolate, 2);
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_descriptor->IsTagged(0));
|
|
|
|
CHECK(!layout_descriptor->IsTagged(1));
|
|
|
|
CHECK(layout_descriptor->IsTagged(2));
|
|
|
|
CHECK(layout_descriptor->IsTagged(125));
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorCreateNewSlow) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind props[kPropsCount];
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
2014-12-12 13:05:56 +00:00
|
|
|
props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
{
|
|
|
|
Handle<Map> map = Map::Create(isolate, 0);
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
Handle<Map> map = Map::Create(isolate, 1);
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_EQ(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
Handle<Map> map = Map::Create(isolate, 2);
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_descriptor->IsTagged(0));
|
|
|
|
CHECK(!layout_descriptor->IsTagged(1));
|
|
|
|
CHECK(layout_descriptor->IsTagged(2));
|
|
|
|
CHECK(layout_descriptor->IsTagged(125));
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
int inobject_properties = kPropsCount / 2;
|
|
|
|
Handle<Map> map = Map::Create(isolate, inobject_properties);
|
|
|
|
layout_descriptor = LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
CHECK_NE(LayoutDescriptor::FastPointerLayout(), *layout_descriptor);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
for (int i = 0; i < inobject_properties; i++) {
|
2015-01-19 17:49:13 +00:00
|
|
|
// PROP_DOUBLE has index 1 among DATA properties.
|
2014-11-11 10:24:52 +00:00
|
|
|
const bool tagged = (i % (PROP_KIND_NUMBER - 1)) != 1;
|
|
|
|
CHECK_EQ(tagged, layout_descriptor->IsTagged(i));
|
|
|
|
}
|
|
|
|
// Every property after inobject_properties must be tagged.
|
|
|
|
for (int i = inobject_properties; i < kPropsCount; i++) {
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(layout_descriptor->IsTagged(i));
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
// Now test LayoutDescriptor::cast_gc_safe().
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor_copy =
|
|
|
|
LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
LayoutDescriptor* layout_desc = *layout_descriptor;
|
|
|
|
CHECK_EQ(layout_desc, LayoutDescriptor::cast(layout_desc));
|
|
|
|
CHECK_EQ(layout_desc, LayoutDescriptor::cast_gc_safe(layout_desc));
|
2017-05-31 13:06:28 +00:00
|
|
|
CHECK(layout_desc->IsSlowLayout());
|
2014-11-11 10:24:52 +00:00
|
|
|
// Now make it look like a forwarding pointer to layout_descriptor_copy.
|
|
|
|
MapWord map_word = layout_desc->map_word();
|
|
|
|
CHECK(!map_word.IsForwardingAddress());
|
|
|
|
layout_desc->set_map_word(
|
|
|
|
MapWord::FromForwardingAddress(*layout_descriptor_copy));
|
|
|
|
CHECK(layout_desc->map_word().IsForwardingAddress());
|
2017-06-23 11:58:41 +00:00
|
|
|
CHECK_EQ(layout_desc, LayoutDescriptor::cast_gc_safe(layout_desc));
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
// Restore it back.
|
|
|
|
layout_desc->set_map_word(map_word);
|
|
|
|
CHECK_EQ(layout_desc, LayoutDescriptor::cast(layout_desc));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static Handle<LayoutDescriptor> TestLayoutDescriptorAppend(
|
2014-12-12 13:05:56 +00:00
|
|
|
Isolate* isolate, int inobject_properties, TestPropertyKind* props,
|
2014-11-11 10:24:52 +00:00
|
|
|
int kPropsCount) {
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
DescriptorArray::Allocate(isolate, 0, kPropsCount);
|
|
|
|
|
|
|
|
Handle<Map> map = Map::Create(isolate, inobject_properties);
|
|
|
|
map->InitializeDescriptors(*descriptors,
|
|
|
|
LayoutDescriptor::FastPointerLayout());
|
|
|
|
|
|
|
|
int next_field_offset = 0;
|
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
EmbeddedVector<char, 64> buffer;
|
|
|
|
SNPrintF(buffer, "prop%d", i);
|
|
|
|
Handle<String> name = factory->InternalizeUtf8String(buffer.start());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind kind = props[i];
|
2017-02-10 08:05:25 +00:00
|
|
|
Descriptor d;
|
|
|
|
if (kind == PROP_ACCESSOR_INFO) {
|
|
|
|
Handle<AccessorInfo> info =
|
|
|
|
Accessors::MakeAccessor(isolate, name, nullptr, nullptr, NONE);
|
|
|
|
d = Descriptor::AccessorConstant(name, info, NONE);
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
} else {
|
2017-02-10 08:05:25 +00:00
|
|
|
d = Descriptor::DataField(name, next_field_offset, NONE,
|
|
|
|
representations[kind]);
|
|
|
|
}
|
|
|
|
PropertyDetails details = d.GetDetails();
|
|
|
|
layout_descriptor = LayoutDescriptor::ShareAppend(map, details);
|
|
|
|
descriptors->Append(&d);
|
|
|
|
if (details.location() == kField) {
|
|
|
|
int field_width_in_words = details.field_width_in_words();
|
2014-11-11 10:24:52 +00:00
|
|
|
next_field_offset += field_width_in_words;
|
|
|
|
|
2017-02-10 08:05:25 +00:00
|
|
|
int field_index = details.field_index();
|
2015-08-11 19:36:04 +00:00
|
|
|
bool is_inobject = field_index < map->GetInObjectProperties();
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int bit = 0; bit < field_width_in_words; bit++) {
|
|
|
|
CHECK_EQ(is_inobject && (kind == PROP_DOUBLE),
|
|
|
|
!layout_descriptor->IsTagged(field_index + bit));
|
|
|
|
}
|
|
|
|
CHECK(layout_descriptor->IsTagged(next_field_offset));
|
|
|
|
}
|
|
|
|
map->InitializeDescriptors(*descriptors, *layout_descriptor);
|
|
|
|
}
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor(map->layout_descriptor(), isolate);
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(layout_descriptor->IsConsistentWithMap(*map, true));
|
2014-11-11 10:24:52 +00:00
|
|
|
return layout_descriptor;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorAppend) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind props[kPropsCount];
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
2014-12-12 13:05:56 +00:00
|
|
|
props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, 0, props, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, 13, props, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, kSmiValueSize, props, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize * 2,
|
|
|
|
props, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, kPropsCount, props, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorAppendAllDoubles) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind props[kPropsCount];
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
props[i] = PROP_DOUBLE;
|
|
|
|
}
|
|
|
|
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, 0, props, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, 13, props, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, kSmiValueSize, props, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize + 1,
|
|
|
|
props, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppend(isolate, kSmiValueSize * 2,
|
|
|
|
props, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, kPropsCount, props, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
{
|
|
|
|
// Ensure layout descriptor switches into slow mode at the right moment.
|
|
|
|
layout_descriptor =
|
|
|
|
TestLayoutDescriptorAppend(isolate, kPropsCount, props, kSmiValueSize);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppend(isolate, kPropsCount, props,
|
|
|
|
kSmiValueSize + 1);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static Handle<LayoutDescriptor> TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
Isolate* isolate, int inobject_properties,
|
|
|
|
Handle<DescriptorArray> descriptors, int number_of_descriptors) {
|
2015-12-11 16:41:04 +00:00
|
|
|
Handle<Map> initial_map = Map::Create(isolate, inobject_properties);
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
Handle<LayoutDescriptor> full_layout_descriptor = LayoutDescriptor::New(
|
2015-12-11 16:41:04 +00:00
|
|
|
initial_map, descriptors, descriptors->number_of_descriptors());
|
2014-11-11 10:24:52 +00:00
|
|
|
|
|
|
|
int nof = 0;
|
|
|
|
bool switched_to_slow_mode = false;
|
|
|
|
|
2015-12-11 16:41:04 +00:00
|
|
|
// This method calls LayoutDescriptor::AppendIfFastOrUseFull() internally
|
|
|
|
// and does all the required map-descriptors related book keeping.
|
|
|
|
Handle<Map> last_map = Map::AddMissingTransitionsForTesting(
|
|
|
|
initial_map, descriptors, full_layout_descriptor);
|
2014-11-11 10:24:52 +00:00
|
|
|
|
2015-12-11 16:41:04 +00:00
|
|
|
// Follow back pointers to construct a sequence of maps from |map|
|
|
|
|
// to |last_map|.
|
|
|
|
int descriptors_length = descriptors->number_of_descriptors();
|
|
|
|
std::vector<Handle<Map>> maps(descriptors_length);
|
|
|
|
{
|
2015-12-23 15:34:54 +00:00
|
|
|
CHECK(last_map->is_stable());
|
2015-12-11 16:41:04 +00:00
|
|
|
Map* map = *last_map;
|
|
|
|
for (int i = 0; i < descriptors_length; i++) {
|
|
|
|
maps[descriptors_length - 1 - i] = handle(map, isolate);
|
|
|
|
Object* maybe_map = map->GetBackPointer();
|
|
|
|
CHECK(maybe_map->IsMap());
|
|
|
|
map = Map::cast(maybe_map);
|
2015-12-23 15:34:54 +00:00
|
|
|
CHECK(!map->is_stable());
|
2015-12-11 16:41:04 +00:00
|
|
|
}
|
|
|
|
CHECK_EQ(1, maps[0]->NumberOfOwnDescriptors());
|
|
|
|
}
|
2014-11-11 10:24:52 +00:00
|
|
|
|
2015-12-11 16:41:04 +00:00
|
|
|
Handle<Map> map;
|
|
|
|
// Now check layout descriptors of all intermediate maps.
|
|
|
|
for (int i = 0; i < number_of_descriptors; i++) {
|
|
|
|
PropertyDetails details = descriptors->GetDetails(i);
|
|
|
|
map = maps[i];
|
2014-11-11 10:24:52 +00:00
|
|
|
LayoutDescriptor* layout_desc = map->layout_descriptor();
|
|
|
|
|
|
|
|
if (layout_desc->IsSlowLayout()) {
|
|
|
|
switched_to_slow_mode = true;
|
|
|
|
CHECK_EQ(*full_layout_descriptor, layout_desc);
|
|
|
|
} else {
|
|
|
|
CHECK(!switched_to_slow_mode);
|
2017-01-12 16:37:12 +00:00
|
|
|
if (details.location() == kField) {
|
2014-11-11 10:24:52 +00:00
|
|
|
nof++;
|
|
|
|
int field_index = details.field_index();
|
|
|
|
int field_width_in_words = details.field_width_in_words();
|
|
|
|
|
2015-08-11 19:36:04 +00:00
|
|
|
bool is_inobject = field_index < map->GetInObjectProperties();
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int bit = 0; bit < field_width_in_words; bit++) {
|
|
|
|
CHECK_EQ(is_inobject && details.representation().IsDouble(),
|
|
|
|
!layout_desc->IsTagged(field_index + bit));
|
|
|
|
}
|
|
|
|
CHECK(layout_desc->IsTagged(field_index + field_width_in_words));
|
|
|
|
}
|
|
|
|
}
|
2015-02-16 11:29:32 +00:00
|
|
|
CHECK(map->layout_descriptor()->IsConsistentWithMap(*map));
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
|
2014-11-12 09:47:32 +00:00
|
|
|
Handle<LayoutDescriptor> layout_descriptor(map->GetLayoutDescriptor(),
|
|
|
|
isolate);
|
2015-02-16 11:29:32 +00:00
|
|
|
CHECK(layout_descriptor->IsConsistentWithMap(*map));
|
2014-11-11 10:24:52 +00:00
|
|
|
return layout_descriptor;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorAppendIfFastOrUseFull) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind props[kPropsCount];
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
2014-12-12 13:05:56 +00:00
|
|
|
props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
|
2014-11-11 10:24:52 +00:00
|
|
|
}
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, 0, descriptors, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, 13, descriptors, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kSmiValueSize, descriptors, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kSmiValueSize * 2, descriptors, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kPropsCount, descriptors, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorAppendIfFastOrUseFullAllDoubles) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind props[kPropsCount];
|
2014-11-11 10:24:52 +00:00
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
props[i] = PROP_DOUBLE;
|
|
|
|
}
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, 0, descriptors, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, 13, descriptors, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kSmiValueSize, descriptors, kPropsCount);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kSmiValueSize + 1, descriptors, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kSmiValueSize * 2, descriptors, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kPropsCount, descriptors, kPropsCount);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
{
|
|
|
|
// Ensure layout descriptor switches into slow mode at the right moment.
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kPropsCount, descriptors, kSmiValueSize);
|
|
|
|
CHECK(!layout_descriptor->IsSlowLayout());
|
|
|
|
|
|
|
|
layout_descriptor = TestLayoutDescriptorAppendIfFastOrUseFull(
|
|
|
|
isolate, kPropsCount, descriptors, kSmiValueSize + 1);
|
|
|
|
CHECK(layout_descriptor->IsSlowLayout());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-11-27 10:10:48 +00:00
|
|
|
TEST(Regress436816) {
|
2017-06-13 14:39:46 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2014-11-27 10:10:48 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
2017-04-25 00:13:18 +00:00
|
|
|
// Force a GC to free up space before we allocate objects whose
|
|
|
|
// mid-test states would fail heap verification.
|
2017-04-26 22:16:41 +00:00
|
|
|
CcTest::CollectAllGarbage();
|
2017-04-25 00:13:18 +00:00
|
|
|
|
2014-11-27 10:10:48 +00:00
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
2014-12-12 13:05:56 +00:00
|
|
|
TestPropertyKind props[kPropsCount];
|
2014-11-27 10:10:48 +00:00
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
props[i] = PROP_DOUBLE;
|
|
|
|
}
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
Handle<Map> map = Map::Create(isolate, kPropsCount);
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor =
|
|
|
|
LayoutDescriptor::New(map, descriptors, kPropsCount);
|
|
|
|
map->InitializeDescriptors(*descriptors, *layout_descriptor);
|
|
|
|
|
|
|
|
Handle<JSObject> object = factory->NewJSObjectFromMap(map, TENURED);
|
|
|
|
|
|
|
|
Address fake_address = reinterpret_cast<Address>(~kHeapObjectTagMask);
|
|
|
|
HeapObject* fake_object = HeapObject::FromAddress(fake_address);
|
|
|
|
CHECK(fake_object->IsHeapObject());
|
|
|
|
|
2017-01-23 18:18:48 +00:00
|
|
|
uint64_t boom_value = bit_cast<uint64_t>(fake_object);
|
2014-11-27 10:10:48 +00:00
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
FieldIndex index = FieldIndex::ForDescriptor(*map, i);
|
|
|
|
CHECK(map->IsUnboxedDoubleField(index));
|
2017-01-23 18:18:48 +00:00
|
|
|
object->RawFastDoublePropertyAsBitsAtPut(index, boom_value);
|
2014-11-27 10:10:48 +00:00
|
|
|
}
|
|
|
|
CHECK(object->HasFastProperties());
|
|
|
|
CHECK(!object->map()->HasFastPointerLayout());
|
|
|
|
|
|
|
|
Handle<Map> normalized_map =
|
|
|
|
Map::Normalize(map, KEEP_INOBJECT_PROPERTIES, "testing");
|
|
|
|
JSObject::MigrateToMap(object, normalized_map);
|
|
|
|
CHECK(!object->HasFastProperties());
|
|
|
|
CHECK(object->map()->HasFastPointerLayout());
|
|
|
|
|
|
|
|
// Trigger GCs and heap verification.
|
2017-04-26 22:16:41 +00:00
|
|
|
CcTest::CollectAllGarbage();
|
2014-11-27 10:10:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-30 17:03:41 +00:00
|
|
|
TEST(DescriptorArrayTrimming) {
|
2017-06-13 14:39:46 +00:00
|
|
|
ManualGCScope manual_gc_scope;
|
2015-03-30 17:03:41 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
|
|
|
|
const int kFieldCount = 128;
|
|
|
|
const int kSplitFieldIndex = 32;
|
|
|
|
const int kTrimmedLayoutDescriptorLength = 64;
|
|
|
|
|
2016-01-26 15:03:40 +00:00
|
|
|
Handle<FieldType> any_type = FieldType::Any(isolate);
|
2015-03-30 17:03:41 +00:00
|
|
|
Handle<Map> map = Map::Create(isolate, kFieldCount);
|
|
|
|
for (int i = 0; i < kSplitFieldIndex; i++) {
|
2017-02-10 08:05:25 +00:00
|
|
|
map = Map::CopyWithField(map, MakeName("prop", i), any_type, NONE, kMutable,
|
|
|
|
Representation::Smi(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-03-30 17:03:41 +00:00
|
|
|
}
|
2017-02-10 08:05:25 +00:00
|
|
|
map =
|
|
|
|
Map::CopyWithField(map, MakeName("dbl", kSplitFieldIndex), any_type, NONE,
|
|
|
|
kMutable, Representation::Double(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
|
|
|
|
CHECK(map->layout_descriptor()->IsSlowLayout());
|
|
|
|
CHECK(map->owns_descriptors());
|
2017-05-31 13:06:28 +00:00
|
|
|
CHECK_EQ(8, map->layout_descriptor()->length());
|
2015-03-30 17:03:41 +00:00
|
|
|
|
|
|
|
{
|
|
|
|
// Add transitions to double fields.
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<Map> tmp_map = map;
|
|
|
|
for (int i = kSplitFieldIndex + 1; i < kFieldCount; i++) {
|
|
|
|
tmp_map = Map::CopyWithField(tmp_map, MakeName("dbl", i), any_type, NONE,
|
2017-02-10 08:05:25 +00:00
|
|
|
kMutable, Representation::Double(),
|
|
|
|
INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
|
|
|
|
}
|
|
|
|
// Check that descriptors are shared.
|
|
|
|
CHECK(tmp_map->owns_descriptors());
|
|
|
|
CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
|
|
|
|
CHECK_EQ(map->layout_descriptor(), tmp_map->layout_descriptor());
|
|
|
|
}
|
|
|
|
CHECK(map->layout_descriptor()->IsSlowLayout());
|
2017-05-31 13:06:28 +00:00
|
|
|
CHECK_EQ(16, map->layout_descriptor()->length());
|
2015-03-30 17:03:41 +00:00
|
|
|
|
|
|
|
// The unused tail of the layout descriptor is now "durty" because of sharing.
|
|
|
|
CHECK(map->layout_descriptor()->IsConsistentWithMap(*map));
|
|
|
|
for (int i = kSplitFieldIndex + 1; i < kTrimmedLayoutDescriptorLength; i++) {
|
|
|
|
CHECK(!map->layout_descriptor()->IsTagged(i));
|
|
|
|
}
|
|
|
|
CHECK_LT(map->NumberOfOwnDescriptors(),
|
|
|
|
map->instance_descriptors()->number_of_descriptors());
|
|
|
|
|
|
|
|
// Call GC that should trim both |map|'s descriptor array and layout
|
|
|
|
// descriptor.
|
2017-04-26 22:16:41 +00:00
|
|
|
CcTest::CollectAllGarbage();
|
2015-03-30 17:03:41 +00:00
|
|
|
|
|
|
|
// The unused tail of the layout descriptor is now "clean" again.
|
|
|
|
CHECK(map->layout_descriptor()->IsConsistentWithMap(*map, true));
|
|
|
|
CHECK(map->owns_descriptors());
|
|
|
|
CHECK_EQ(map->NumberOfOwnDescriptors(),
|
|
|
|
map->instance_descriptors()->number_of_descriptors());
|
|
|
|
CHECK(map->layout_descriptor()->IsSlowLayout());
|
2017-05-31 13:06:28 +00:00
|
|
|
CHECK_EQ(8, map->layout_descriptor()->length());
|
2015-03-30 17:03:41 +00:00
|
|
|
|
|
|
|
{
|
|
|
|
// Add transitions to tagged fields.
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<Map> tmp_map = map;
|
|
|
|
for (int i = kSplitFieldIndex + 1; i < kFieldCount - 1; i++) {
|
|
|
|
tmp_map = Map::CopyWithField(tmp_map, MakeName("tagged", i), any_type,
|
2017-02-10 08:05:25 +00:00
|
|
|
NONE, kMutable, Representation::Tagged(),
|
|
|
|
INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
|
|
|
|
}
|
2017-02-10 08:05:25 +00:00
|
|
|
tmp_map =
|
|
|
|
Map::CopyWithField(tmp_map, MakeString("dbl"), any_type, NONE, kMutable,
|
|
|
|
Representation::Double(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(tmp_map->layout_descriptor()->IsConsistentWithMap(*tmp_map, true));
|
|
|
|
// Check that descriptors are shared.
|
|
|
|
CHECK(tmp_map->owns_descriptors());
|
|
|
|
CHECK_EQ(map->instance_descriptors(), tmp_map->instance_descriptors());
|
|
|
|
}
|
|
|
|
CHECK(map->layout_descriptor()->IsSlowLayout());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-01 14:19:11 +00:00
|
|
|
TEST(DoScavenge) {
|
|
|
|
CcTest::InitializeVM();
|
2015-03-27 21:55:27 +00:00
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
2014-12-01 14:19:11 +00:00
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
|
2015-03-27 21:55:27 +00:00
|
|
|
// The plan: create |obj| with double field in new space, do scanvenge so
|
|
|
|
// that |obj| is moved to old space, construct a double value that looks like
|
|
|
|
// a pointer to "from space" pointer. Do scavenge one more time and ensure
|
|
|
|
// that it didn't crash or corrupt the double value stored in the object.
|
2014-12-01 14:19:11 +00:00
|
|
|
|
2016-01-26 15:03:40 +00:00
|
|
|
Handle<FieldType> any_type = FieldType::Any(isolate);
|
2015-03-27 21:55:27 +00:00
|
|
|
Handle<Map> map = Map::Create(isolate, 10);
|
2017-02-10 08:05:25 +00:00
|
|
|
map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE, kMutable,
|
|
|
|
Representation::Double(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2014-12-01 14:19:11 +00:00
|
|
|
|
2015-03-27 21:55:27 +00:00
|
|
|
// Create object in new space.
|
2015-07-13 08:26:21 +00:00
|
|
|
Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED);
|
2015-03-27 21:55:27 +00:00
|
|
|
|
|
|
|
Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
|
2017-01-20 17:56:28 +00:00
|
|
|
WriteToField(*obj, 0, *heap_number);
|
2014-12-01 14:19:11 +00:00
|
|
|
|
|
|
|
{
|
|
|
|
// Ensure the object is properly set up.
|
2015-03-27 21:55:27 +00:00
|
|
|
FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
|
2014-12-01 14:19:11 +00:00
|
|
|
CHECK(field_index.is_inobject() && field_index.is_double());
|
|
|
|
CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
|
|
|
|
CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
|
|
|
|
}
|
|
|
|
CHECK(isolate->heap()->new_space()->Contains(*obj));
|
|
|
|
|
2015-03-27 21:55:27 +00:00
|
|
|
// Do scavenge so that |obj| is moved to survivor space.
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
2014-12-01 14:19:11 +00:00
|
|
|
|
|
|
|
// Create temp object in the new space.
|
2017-06-30 11:26:14 +00:00
|
|
|
Handle<JSArray> temp = factory->NewJSArray(0, PACKED_ELEMENTS);
|
2014-12-01 14:19:11 +00:00
|
|
|
CHECK(isolate->heap()->new_space()->Contains(*temp));
|
|
|
|
|
|
|
|
// Construct a double value that looks like a pointer to the new space object
|
|
|
|
// and store it into the obj.
|
|
|
|
Address fake_object = reinterpret_cast<Address>(*temp) + kPointerSize;
|
|
|
|
double boom_value = bit_cast<double>(fake_object);
|
|
|
|
|
|
|
|
FieldIndex field_index = FieldIndex::ForDescriptor(obj->map(), 0);
|
|
|
|
Handle<HeapNumber> boom_number = factory->NewHeapNumber(boom_value, MUTABLE);
|
|
|
|
obj->FastPropertyAtPut(field_index, *boom_number);
|
|
|
|
|
2015-03-27 21:55:27 +00:00
|
|
|
// Now |obj| moves to old gen and it has a double field that looks like
|
2014-12-01 14:19:11 +00:00
|
|
|
// a pointer to a from semi-space.
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE);
|
2014-12-01 14:19:11 +00:00
|
|
|
|
2015-04-07 11:31:57 +00:00
|
|
|
CHECK(isolate->heap()->old_space()->Contains(*obj));
|
2014-12-01 14:19:11 +00:00
|
|
|
|
|
|
|
CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-27 21:55:27 +00:00
|
|
|
TEST(DoScavengeWithIncrementalWriteBarrier) {
|
|
|
|
if (FLAG_never_compact || !FLAG_incremental_marking) return;
|
2017-05-29 11:06:13 +00:00
|
|
|
FLAG_stress_incremental_marking = false;
|
2015-03-27 21:55:27 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
2015-04-07 11:31:57 +00:00
|
|
|
PagedSpace* old_space = heap->old_space();
|
2015-03-27 21:55:27 +00:00
|
|
|
|
|
|
|
// The plan: create |obj_value| in old space and ensure that it is allocated
|
|
|
|
// on evacuation candidate page, create |obj| with double and tagged fields
|
|
|
|
// in new space and write |obj_value| to tagged field of |obj|, do two
|
|
|
|
// scavenges to promote |obj| to old space, a GC in old space and ensure that
|
|
|
|
// the tagged value was properly updated after candidates evacuation.
|
|
|
|
|
2016-01-26 15:03:40 +00:00
|
|
|
Handle<FieldType> any_type = FieldType::Any(isolate);
|
2015-03-27 21:55:27 +00:00
|
|
|
Handle<Map> map = Map::Create(isolate, 10);
|
2017-02-10 08:05:25 +00:00
|
|
|
map = Map::CopyWithField(map, MakeName("prop", 0), any_type, NONE, kMutable,
|
|
|
|
Representation::Double(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
|
|
|
map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE, kMutable,
|
|
|
|
Representation::Tagged(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-03-27 21:55:27 +00:00
|
|
|
|
|
|
|
// Create |obj_value| in old space.
|
|
|
|
Handle<HeapObject> obj_value;
|
|
|
|
Page* ec_page;
|
|
|
|
{
|
|
|
|
AlwaysAllocateScope always_allocate(isolate);
|
|
|
|
// Make sure |obj_value| is placed on an old-space evacuation candidate.
|
2016-05-20 13:30:22 +00:00
|
|
|
heap::SimulateFullSpace(old_space);
|
2017-06-30 11:26:14 +00:00
|
|
|
obj_value = factory->NewJSArray(32 * KB, HOLEY_ELEMENTS, TENURED);
|
2015-03-27 21:55:27 +00:00
|
|
|
ec_page = Page::FromAddress(obj_value->address());
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create object in new space.
|
2015-07-13 08:26:21 +00:00
|
|
|
Handle<JSObject> obj = factory->NewJSObjectFromMap(map, NOT_TENURED);
|
2015-03-27 21:55:27 +00:00
|
|
|
|
|
|
|
Handle<HeapNumber> heap_number = factory->NewHeapNumber(42.5);
|
2017-01-20 17:56:28 +00:00
|
|
|
WriteToField(*obj, 0, *heap_number);
|
|
|
|
WriteToField(*obj, 1, *obj_value);
|
2015-03-27 21:55:27 +00:00
|
|
|
|
|
|
|
{
|
|
|
|
// Ensure the object is properly set up.
|
|
|
|
FieldIndex field_index = FieldIndex::ForDescriptor(*map, 0);
|
|
|
|
CHECK(field_index.is_inobject() && field_index.is_double());
|
|
|
|
CHECK_EQ(FLAG_unbox_double_fields, map->IsUnboxedDoubleField(field_index));
|
|
|
|
CHECK_EQ(42.5, GetDoubleFieldValue(*obj, field_index));
|
|
|
|
|
|
|
|
field_index = FieldIndex::ForDescriptor(*map, 1);
|
|
|
|
CHECK(field_index.is_inobject() && !field_index.is_double());
|
|
|
|
CHECK(!map->IsUnboxedDoubleField(field_index));
|
|
|
|
}
|
|
|
|
CHECK(isolate->heap()->new_space()->Contains(*obj));
|
|
|
|
|
|
|
|
// Heap is ready, force |ec_page| to become an evacuation candidate and
|
|
|
|
// simulate incremental marking.
|
|
|
|
FLAG_stress_compaction = true;
|
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
2016-11-03 12:12:38 +00:00
|
|
|
heap::ForceEvacuationCandidate(ec_page);
|
2016-05-20 13:30:22 +00:00
|
|
|
heap::SimulateIncrementalMarking(heap);
|
2015-03-27 21:55:27 +00:00
|
|
|
// Disable stress compaction mode in order to let GC do scavenge.
|
|
|
|
FLAG_stress_compaction = false;
|
|
|
|
|
|
|
|
// Check that everything is ready for triggering incremental write barrier
|
|
|
|
// during scavenge (i.e. that |obj| is black and incremental marking is
|
|
|
|
// in compacting mode and |obj_value|'s page is an evacuation candidate).
|
|
|
|
IncrementalMarking* marking = heap->incremental_marking();
|
|
|
|
CHECK(marking->IsCompacting());
|
2017-08-12 12:17:52 +00:00
|
|
|
IncrementalMarking::MarkingState* marking_state =
|
|
|
|
heap->incremental_marking()->marking_state();
|
2017-08-10 16:54:55 +00:00
|
|
|
CHECK(marking_state->IsBlack(*obj));
|
2015-03-27 21:55:27 +00:00
|
|
|
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
|
|
|
|
|
|
|
// Trigger GCs so that |obj| moves to old gen.
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE); // in survivor space now
|
|
|
|
CcTest::CollectGarbage(i::NEW_SPACE); // in old gen now
|
2015-03-27 21:55:27 +00:00
|
|
|
|
2015-04-07 11:31:57 +00:00
|
|
|
CHECK(isolate->heap()->old_space()->Contains(*obj));
|
|
|
|
CHECK(isolate->heap()->old_space()->Contains(*obj_value));
|
2015-03-27 21:55:27 +00:00
|
|
|
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
|
|
|
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(i::OLD_SPACE);
|
2015-03-27 21:55:27 +00:00
|
|
|
|
|
|
|
// |obj_value| must be evacuated.
|
|
|
|
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
|
|
|
|
|
|
|
FieldIndex field_index = FieldIndex::ForDescriptor(*map, 1);
|
|
|
|
CHECK_EQ(*obj_value, obj->RawFastPropertyAt(field_index));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-15 12:13:08 +00:00
|
|
|
static void TestLayoutDescriptorHelper(Isolate* isolate,
|
|
|
|
int inobject_properties,
|
|
|
|
Handle<DescriptorArray> descriptors,
|
|
|
|
int number_of_descriptors) {
|
|
|
|
Handle<Map> map = Map::Create(isolate, inobject_properties);
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor = LayoutDescriptor::New(
|
|
|
|
map, descriptors, descriptors->number_of_descriptors());
|
2015-02-13 10:29:11 +00:00
|
|
|
InitializeVerifiedMapDescriptors(*map, *descriptors, *layout_descriptor);
|
2014-12-15 12:13:08 +00:00
|
|
|
|
|
|
|
LayoutDescriptorHelper helper(*map);
|
|
|
|
bool all_fields_tagged = true;
|
|
|
|
|
|
|
|
int instance_size = map->instance_size();
|
|
|
|
|
|
|
|
int end_offset = instance_size * 2;
|
|
|
|
int first_non_tagged_field_offset = end_offset;
|
|
|
|
for (int i = 0; i < number_of_descriptors; i++) {
|
|
|
|
PropertyDetails details = descriptors->GetDetails(i);
|
2017-01-12 16:37:12 +00:00
|
|
|
if (details.location() != kField) continue;
|
2014-12-15 12:13:08 +00:00
|
|
|
FieldIndex index = FieldIndex::ForDescriptor(*map, i);
|
|
|
|
if (!index.is_inobject()) continue;
|
|
|
|
all_fields_tagged &= !details.representation().IsDouble();
|
|
|
|
bool expected_tagged = !index.is_double();
|
|
|
|
if (!expected_tagged) {
|
|
|
|
first_non_tagged_field_offset =
|
|
|
|
Min(first_non_tagged_field_offset, index.offset());
|
|
|
|
}
|
|
|
|
|
|
|
|
int end_of_region_offset;
|
|
|
|
CHECK_EQ(expected_tagged, helper.IsTagged(index.offset()));
|
|
|
|
CHECK_EQ(expected_tagged, helper.IsTagged(index.offset(), instance_size,
|
|
|
|
&end_of_region_offset));
|
|
|
|
CHECK(end_of_region_offset > 0);
|
|
|
|
CHECK(end_of_region_offset % kPointerSize == 0);
|
|
|
|
CHECK(end_of_region_offset <= instance_size);
|
|
|
|
|
|
|
|
for (int offset = index.offset(); offset < end_of_region_offset;
|
|
|
|
offset += kPointerSize) {
|
|
|
|
CHECK_EQ(expected_tagged, helper.IsTagged(index.offset()));
|
|
|
|
}
|
|
|
|
if (end_of_region_offset < instance_size) {
|
|
|
|
CHECK_EQ(!expected_tagged, helper.IsTagged(end_of_region_offset));
|
|
|
|
} else {
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(helper.IsTagged(end_of_region_offset));
|
2014-12-15 12:13:08 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (int offset = 0; offset < JSObject::kHeaderSize; offset += kPointerSize) {
|
|
|
|
// Header queries
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(helper.IsTagged(offset));
|
2014-12-15 12:13:08 +00:00
|
|
|
int end_of_region_offset;
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(helper.IsTagged(offset, end_offset, &end_of_region_offset));
|
2014-12-15 12:13:08 +00:00
|
|
|
CHECK_EQ(first_non_tagged_field_offset, end_of_region_offset);
|
|
|
|
|
|
|
|
// Out of bounds queries
|
2017-02-07 14:20:02 +00:00
|
|
|
CHECK(helper.IsTagged(offset + instance_size));
|
2014-12-15 12:13:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
CHECK_EQ(all_fields_tagged, helper.all_fields_tagged());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorHelperMixed) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
|
|
|
TestPropertyKind props[kPropsCount];
|
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
|
|
|
|
}
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
|
|
|
|
kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorHelperAllTagged) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
|
|
|
TestPropertyKind props[kPropsCount];
|
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
props[i] = PROP_TAGGED;
|
|
|
|
}
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
|
|
|
|
kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(LayoutDescriptorHelperAllDoubles) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
|
|
|
Handle<LayoutDescriptor> layout_descriptor;
|
|
|
|
const int kPropsCount = kSmiValueSize * 3;
|
|
|
|
TestPropertyKind props[kPropsCount];
|
|
|
|
for (int i = 0; i < kPropsCount; i++) {
|
|
|
|
props[i] = PROP_DOUBLE;
|
|
|
|
}
|
|
|
|
Handle<DescriptorArray> descriptors =
|
|
|
|
CreateDescriptorArray(isolate, props, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
|
|
|
|
kPropsCount);
|
|
|
|
|
|
|
|
TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-30 12:55:15 +00:00
|
|
|
TEST(LayoutDescriptorSharing) {
|
|
|
|
CcTest::InitializeVM();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
2016-01-26 15:03:40 +00:00
|
|
|
Handle<FieldType> any_type = FieldType::Any(isolate);
|
2015-01-30 12:55:15 +00:00
|
|
|
|
|
|
|
Handle<Map> split_map;
|
|
|
|
{
|
|
|
|
Handle<Map> map = Map::Create(isolate, 64);
|
|
|
|
for (int i = 0; i < 32; i++) {
|
|
|
|
Handle<String> name = MakeName("prop", i);
|
2017-02-10 08:05:25 +00:00
|
|
|
map = Map::CopyWithField(map, name, any_type, NONE, kMutable,
|
|
|
|
Representation::Smi(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-01-30 12:55:15 +00:00
|
|
|
}
|
2017-02-10 08:05:25 +00:00
|
|
|
split_map =
|
|
|
|
Map::CopyWithField(map, MakeString("dbl"), any_type, NONE, kMutable,
|
|
|
|
Representation::Double(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-01-30 12:55:15 +00:00
|
|
|
}
|
|
|
|
Handle<LayoutDescriptor> split_layout_descriptor(
|
|
|
|
split_map->layout_descriptor(), isolate);
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(split_layout_descriptor->IsConsistentWithMap(*split_map, true));
|
2015-01-30 12:55:15 +00:00
|
|
|
CHECK(split_layout_descriptor->IsSlowLayout());
|
|
|
|
CHECK(split_map->owns_descriptors());
|
|
|
|
|
2017-02-10 08:05:25 +00:00
|
|
|
Handle<Map> map1 =
|
|
|
|
Map::CopyWithField(split_map, MakeString("foo"), any_type, NONE, kMutable,
|
|
|
|
Representation::Double(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-01-30 12:55:15 +00:00
|
|
|
CHECK(!split_map->owns_descriptors());
|
|
|
|
CHECK_EQ(*split_layout_descriptor, split_map->layout_descriptor());
|
|
|
|
|
|
|
|
// Layout descriptors should be shared with |split_map|.
|
|
|
|
CHECK(map1->owns_descriptors());
|
|
|
|
CHECK_EQ(*split_layout_descriptor, map1->layout_descriptor());
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(map1->layout_descriptor()->IsConsistentWithMap(*map1, true));
|
2015-01-30 12:55:15 +00:00
|
|
|
|
2017-02-10 08:05:25 +00:00
|
|
|
Handle<Map> map2 =
|
|
|
|
Map::CopyWithField(split_map, MakeString("bar"), any_type, NONE, kMutable,
|
|
|
|
Representation::Tagged(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-01-30 12:55:15 +00:00
|
|
|
|
|
|
|
// Layout descriptors should not be shared with |split_map|.
|
|
|
|
CHECK(map2->owns_descriptors());
|
|
|
|
CHECK_NE(*split_layout_descriptor, map2->layout_descriptor());
|
2015-03-30 17:03:41 +00:00
|
|
|
CHECK(map2->layout_descriptor()->IsConsistentWithMap(*map2, true));
|
2015-01-30 12:55:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-03-28 18:04:03 +00:00
|
|
|
static void TestWriteBarrier(Handle<Map> map, Handle<Map> new_map,
|
|
|
|
int tagged_descriptor, int double_descriptor,
|
|
|
|
bool check_tagged_value = true) {
|
|
|
|
FLAG_stress_compaction = true;
|
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
2015-04-07 11:31:57 +00:00
|
|
|
PagedSpace* old_space = heap->old_space();
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
// The plan: create |obj| by |map| in old space, create |obj_value| in
|
|
|
|
// new space and ensure that write barrier is triggered when |obj_value| is
|
|
|
|
// written to property |tagged_descriptor| of |obj|.
|
|
|
|
// Then migrate object to |new_map| and set proper value for property
|
|
|
|
// |double_descriptor|. Call GC and ensure that it did not crash during
|
|
|
|
// store buffer entries updating.
|
|
|
|
|
|
|
|
Handle<JSObject> obj;
|
|
|
|
Handle<HeapObject> obj_value;
|
|
|
|
{
|
|
|
|
AlwaysAllocateScope always_allocate(isolate);
|
2015-07-13 08:26:21 +00:00
|
|
|
obj = factory->NewJSObjectFromMap(map, TENURED);
|
2015-04-07 11:31:57 +00:00
|
|
|
CHECK(old_space->Contains(*obj));
|
2015-03-28 18:04:03 +00:00
|
|
|
|
2017-03-17 10:16:54 +00:00
|
|
|
obj_value = factory->NewHeapNumber(0.);
|
2015-03-28 18:04:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
CHECK(heap->InNewSpace(*obj_value));
|
|
|
|
|
|
|
|
{
|
|
|
|
FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
|
|
|
|
const int n = 153;
|
|
|
|
for (int i = 0; i < n; i++) {
|
|
|
|
obj->FastPropertyAtPut(index, *obj_value);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Migrate |obj| to |new_map| which should shift fields and put the
|
|
|
|
// |boom_value| to the slot that was earlier recorded by write barrier.
|
|
|
|
JSObject::MigrateToMap(obj, new_map);
|
|
|
|
|
|
|
|
Address fake_object = reinterpret_cast<Address>(*obj_value) + kPointerSize;
|
2017-01-23 18:18:48 +00:00
|
|
|
uint64_t boom_value = bit_cast<uint64_t>(fake_object);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
FieldIndex double_field_index =
|
|
|
|
FieldIndex::ForDescriptor(*new_map, double_descriptor);
|
|
|
|
CHECK(obj->IsUnboxedDoubleField(double_field_index));
|
2017-01-23 18:18:48 +00:00
|
|
|
obj->RawFastDoublePropertyAsBitsAtPut(double_field_index, boom_value);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
// Trigger GC to evacuate all candidates.
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(NEW_SPACE);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
if (check_tagged_value) {
|
|
|
|
FieldIndex tagged_field_index =
|
|
|
|
FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
|
|
|
|
CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
|
|
|
|
}
|
2017-01-23 18:18:48 +00:00
|
|
|
CHECK_EQ(boom_value, obj->RawFastDoublePropertyAsBitsAt(double_field_index));
|
2015-03-28 18:04:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
|
|
|
|
int tagged_descriptor,
|
|
|
|
int double_descriptor,
|
|
|
|
bool check_tagged_value = true) {
|
|
|
|
if (FLAG_never_compact || !FLAG_incremental_marking) return;
|
2017-05-29 11:06:13 +00:00
|
|
|
FLAG_stress_incremental_marking = false;
|
2015-03-28 18:04:03 +00:00
|
|
|
FLAG_manual_evacuation_candidates_selection = true;
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
Factory* factory = isolate->factory();
|
|
|
|
Heap* heap = CcTest::heap();
|
2015-04-07 11:31:57 +00:00
|
|
|
PagedSpace* old_space = heap->old_space();
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
// The plan: create |obj| by |map| in old space, create |obj_value| in
|
|
|
|
// old space and ensure it end up in evacuation candidate page. Start
|
|
|
|
// incremental marking and ensure that incremental write barrier is triggered
|
|
|
|
// when |obj_value| is written to property |tagged_descriptor| of |obj|.
|
|
|
|
// Then migrate object to |new_map| and set proper value for property
|
|
|
|
// |double_descriptor|. Call GC and ensure that it did not crash during
|
|
|
|
// slots buffer entries updating.
|
|
|
|
|
|
|
|
Handle<JSObject> obj;
|
|
|
|
Handle<HeapObject> obj_value;
|
|
|
|
Page* ec_page;
|
|
|
|
{
|
|
|
|
AlwaysAllocateScope always_allocate(isolate);
|
2015-07-13 08:26:21 +00:00
|
|
|
obj = factory->NewJSObjectFromMap(map, TENURED);
|
2015-04-07 11:31:57 +00:00
|
|
|
CHECK(old_space->Contains(*obj));
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
// Make sure |obj_value| is placed on an old-space evacuation candidate.
|
2016-05-20 13:30:22 +00:00
|
|
|
heap::SimulateFullSpace(old_space);
|
2017-06-30 11:26:14 +00:00
|
|
|
obj_value = factory->NewJSArray(32 * KB, HOLEY_ELEMENTS, TENURED);
|
2015-03-28 18:04:03 +00:00
|
|
|
ec_page = Page::FromAddress(obj_value->address());
|
|
|
|
CHECK_NE(ec_page, Page::FromAddress(obj->address()));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Heap is ready, force |ec_page| to become an evacuation candidate and
|
|
|
|
// simulate incremental marking.
|
2016-11-03 12:12:38 +00:00
|
|
|
heap::ForceEvacuationCandidate(ec_page);
|
2016-05-20 13:30:22 +00:00
|
|
|
heap::SimulateIncrementalMarking(heap);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
// Check that everything is ready for triggering incremental write barrier
|
|
|
|
// (i.e. that both |obj| and |obj_value| are black and the marking phase is
|
|
|
|
// still active and |obj_value|'s page is indeed an evacuation candidate).
|
|
|
|
IncrementalMarking* marking = heap->incremental_marking();
|
|
|
|
CHECK(marking->IsMarking());
|
2017-08-12 12:17:52 +00:00
|
|
|
IncrementalMarking::MarkingState* marking_state = marking->marking_state();
|
2017-08-10 16:54:55 +00:00
|
|
|
CHECK(marking_state->IsBlack(*obj));
|
|
|
|
CHECK(marking_state->IsBlack(*obj_value));
|
2015-03-28 18:04:03 +00:00
|
|
|
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
|
|
|
|
2016-02-25 17:28:50 +00:00
|
|
|
// Trigger incremental write barrier, which should add a slot to remembered
|
|
|
|
// set.
|
2015-03-28 18:04:03 +00:00
|
|
|
{
|
|
|
|
FieldIndex index = FieldIndex::ForDescriptor(*map, tagged_descriptor);
|
2016-02-25 17:28:50 +00:00
|
|
|
obj->FastPropertyAtPut(index, *obj_value);
|
2015-03-28 18:04:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Migrate |obj| to |new_map| which should shift fields and put the
|
|
|
|
// |boom_value| to the slot that was earlier recorded by incremental write
|
|
|
|
// barrier.
|
|
|
|
JSObject::MigrateToMap(obj, new_map);
|
|
|
|
|
2017-01-23 18:18:48 +00:00
|
|
|
uint64_t boom_value = UINT64_C(0xbaad0176a37c28e1);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
FieldIndex double_field_index =
|
|
|
|
FieldIndex::ForDescriptor(*new_map, double_descriptor);
|
|
|
|
CHECK(obj->IsUnboxedDoubleField(double_field_index));
|
2017-01-23 18:18:48 +00:00
|
|
|
obj->RawFastDoublePropertyAsBitsAtPut(double_field_index, boom_value);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
// Trigger GC to evacuate all candidates.
|
2016-09-07 10:02:58 +00:00
|
|
|
CcTest::CollectGarbage(OLD_SPACE);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
// Ensure that the values are still there and correct.
|
|
|
|
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
|
|
|
|
|
|
|
|
if (check_tagged_value) {
|
|
|
|
FieldIndex tagged_field_index =
|
|
|
|
FieldIndex::ForDescriptor(*new_map, tagged_descriptor);
|
|
|
|
CHECK_EQ(*obj_value, obj->RawFastPropertyAt(tagged_field_index));
|
|
|
|
}
|
2017-01-23 18:18:48 +00:00
|
|
|
CHECK_EQ(boom_value, obj->RawFastDoublePropertyAsBitsAt(double_field_index));
|
2015-03-28 18:04:03 +00:00
|
|
|
}
|
|
|
|
|
2016-05-03 13:55:16 +00:00
|
|
|
enum OldToWriteBarrierKind {
|
|
|
|
OLD_TO_OLD_WRITE_BARRIER,
|
|
|
|
OLD_TO_NEW_WRITE_BARRIER
|
|
|
|
};
|
2015-03-28 18:04:03 +00:00
|
|
|
static void TestWriteBarrierObjectShiftFieldsRight(
|
2016-05-03 13:55:16 +00:00
|
|
|
OldToWriteBarrierKind write_barrier_kind) {
|
2017-05-29 11:06:13 +00:00
|
|
|
FLAG_stress_incremental_marking = false;
|
2015-03-28 18:04:03 +00:00
|
|
|
CcTest::InitializeVM();
|
|
|
|
Isolate* isolate = CcTest::i_isolate();
|
|
|
|
v8::HandleScope scope(CcTest::isolate());
|
|
|
|
|
2016-01-26 15:03:40 +00:00
|
|
|
Handle<FieldType> any_type = FieldType::Any(isolate);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
CompileRun("function func() { return 1; }");
|
|
|
|
|
|
|
|
Handle<JSObject> func = GetObject("func");
|
|
|
|
|
|
|
|
Handle<Map> map = Map::Create(isolate, 10);
|
|
|
|
map = Map::CopyWithConstant(map, MakeName("prop", 0), func, NONE,
|
|
|
|
INSERT_TRANSITION).ToHandleChecked();
|
2017-02-10 08:05:25 +00:00
|
|
|
map = Map::CopyWithField(map, MakeName("prop", 1), any_type, NONE, kMutable,
|
|
|
|
Representation::Double(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
|
|
|
map = Map::CopyWithField(map, MakeName("prop", 2), any_type, NONE, kMutable,
|
|
|
|
Representation::Tagged(), INSERT_TRANSITION)
|
|
|
|
.ToHandleChecked();
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
// Shift fields right by turning constant property to a field.
|
|
|
|
Handle<Map> new_map = Map::ReconfigureProperty(
|
2017-01-17 15:41:45 +00:00
|
|
|
map, 0, kData, NONE, Representation::Tagged(), any_type);
|
2015-03-28 18:04:03 +00:00
|
|
|
|
|
|
|
if (write_barrier_kind == OLD_TO_NEW_WRITE_BARRIER) {
|
|
|
|
TestWriteBarrier(map, new_map, 2, 1);
|
|
|
|
} else {
|
|
|
|
CHECK_EQ(OLD_TO_OLD_WRITE_BARRIER, write_barrier_kind);
|
|
|
|
TestIncrementalWriteBarrier(map, new_map, 2, 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-02-09 10:47:16 +00:00
|
|
|
TEST(WriteBarrierObjectShiftFieldsRight) {
|
2015-03-28 18:04:03 +00:00
|
|
|
TestWriteBarrierObjectShiftFieldsRight(OLD_TO_NEW_WRITE_BARRIER);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST(IncrementalWriteBarrierObjectShiftFieldsRight) {
|
|
|
|
TestWriteBarrierObjectShiftFieldsRight(OLD_TO_OLD_WRITE_BARRIER);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// TODO(ishell): add respective tests for property kind reconfiguring from
|
|
|
|
// accessor field to double, once accessor fields are supported by
|
|
|
|
// Map::ReconfigureProperty().
|
|
|
|
|
|
|
|
|
|
|
|
// TODO(ishell): add respective tests for fast property removal case once
|
|
|
|
// Map::ReconfigureProperty() supports that.
|
|
|
|
|
2014-11-11 10:24:52 +00:00
|
|
|
#endif
|