LayoutDescriptorHelper is now able to calculate the length of contiguous regions of tagged/non-tagged fields.
This functionality is now used by both object visitor and store buffer. TEST=cctest/test-unboxed-doubles Review URL: https://codereview.chromium.org/726713003 Cr-Commit-Position: refs/heads/master@{#25816}
This commit is contained in:
parent
286748c97b
commit
b74cfe4a8f
@ -1806,29 +1806,28 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
|
||||
promotion_queue()->remove(&target, &size);
|
||||
|
||||
// Promoted object might be already partially visited
|
||||
// during old space pointer iteration. Thus we search specificly
|
||||
// during old space pointer iteration. Thus we search specifically
|
||||
// for pointers to from semispace instead of looking for pointers
|
||||
// to new space.
|
||||
DCHECK(!target->IsMap());
|
||||
Address start_address = target->address();
|
||||
Address end_address = start_address + size;
|
||||
Address obj_address = target->address();
|
||||
#if V8_DOUBLE_FIELDS_UNBOXING
|
||||
InobjectPropertiesHelper helper(target->map());
|
||||
LayoutDescriptorHelper helper(target->map());
|
||||
bool has_only_tagged_fields = helper.all_fields_tagged();
|
||||
|
||||
if (!has_only_tagged_fields) {
|
||||
for (Address slot = start_address; slot < end_address;
|
||||
slot += kPointerSize) {
|
||||
if (helper.IsTagged(static_cast<int>(slot - start_address))) {
|
||||
// TODO(ishell): call this once for contiguous region
|
||||
// of tagged fields.
|
||||
IterateAndMarkPointersToFromSpace(slot, slot + kPointerSize,
|
||||
&ScavengeObject);
|
||||
for (int offset = 0; offset < size;) {
|
||||
int end_of_region_offset;
|
||||
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
|
||||
IterateAndMarkPointersToFromSpace(
|
||||
obj_address + offset, obj_address + end_of_region_offset,
|
||||
&ScavengeObject);
|
||||
}
|
||||
offset = end_of_region_offset;
|
||||
}
|
||||
} else {
|
||||
#endif
|
||||
IterateAndMarkPointersToFromSpace(start_address, end_address,
|
||||
IterateAndMarkPointersToFromSpace(obj_address, obj_address + size,
|
||||
&ScavengeObject);
|
||||
#if V8_DOUBLE_FIELDS_UNBOXING
|
||||
}
|
||||
|
@ -2783,7 +2783,7 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
|
||||
|
||||
bool may_contain_raw_values = src->MayContainRawValues();
|
||||
#if V8_DOUBLE_FIELDS_UNBOXING
|
||||
InobjectPropertiesHelper helper(src->map());
|
||||
LayoutDescriptorHelper helper(src->map());
|
||||
bool has_only_tagged_fields = helper.all_fields_tagged();
|
||||
#endif
|
||||
for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
|
||||
|
@ -226,16 +226,14 @@ class BodyVisitorBase : public AllStatic {
|
||||
DCHECK(IsAligned(start_offset, kPointerSize) &&
|
||||
IsAligned(end_offset, kPointerSize));
|
||||
|
||||
InobjectPropertiesHelper helper(object->map());
|
||||
LayoutDescriptorHelper helper(object->map());
|
||||
DCHECK(!helper.all_fields_tagged());
|
||||
|
||||
for (int offset = start_offset; offset < end_offset;
|
||||
offset += kPointerSize) {
|
||||
// Visit tagged fields only.
|
||||
if (helper.IsTagged(offset)) {
|
||||
// TODO(ishell): call this once for contiguous region of tagged fields.
|
||||
IterateRawPointers(heap, object, offset, offset + kPointerSize);
|
||||
for (int offset = start_offset; offset < end_offset;) {
|
||||
int end_of_region_offset;
|
||||
if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
|
||||
IterateRawPointers(heap, object, offset, end_of_region_offset);
|
||||
}
|
||||
offset = end_of_region_offset;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -512,24 +512,28 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback,
|
||||
bool may_contain_raw_values = heap_object->MayContainRawValues();
|
||||
if (!may_contain_raw_values) {
|
||||
Address obj_address = heap_object->address();
|
||||
Address start_address = obj_address + HeapObject::kHeaderSize;
|
||||
Address end_address = obj_address + heap_object->Size();
|
||||
const int start_offset = HeapObject::kHeaderSize;
|
||||
const int end_offset = heap_object->Size();
|
||||
#if V8_DOUBLE_FIELDS_UNBOXING
|
||||
InobjectPropertiesHelper helper(heap_object->map());
|
||||
LayoutDescriptorHelper helper(heap_object->map());
|
||||
bool has_only_tagged_fields = helper.all_fields_tagged();
|
||||
|
||||
if (!has_only_tagged_fields) {
|
||||
for (Address slot = start_address; slot < end_address;
|
||||
slot += kPointerSize) {
|
||||
if (helper.IsTagged(static_cast<int>(slot - obj_address))) {
|
||||
// TODO(ishell): call this once for contiguous region
|
||||
// of tagged fields.
|
||||
FindPointersToNewSpaceInRegion(slot, slot + kPointerSize,
|
||||
slot_callback, clear_maps);
|
||||
for (int offset = start_offset; offset < end_offset;) {
|
||||
int end_of_region_offset;
|
||||
if (helper.IsTagged(offset, end_offset,
|
||||
&end_of_region_offset)) {
|
||||
FindPointersToNewSpaceInRegion(
|
||||
obj_address + offset,
|
||||
obj_address + end_of_region_offset, slot_callback,
|
||||
clear_maps);
|
||||
}
|
||||
offset = end_of_region_offset;
|
||||
}
|
||||
} else {
|
||||
#endif
|
||||
Address start_address = obj_address + start_offset;
|
||||
Address end_address = obj_address + end_offset;
|
||||
// Object has only tagged fields.
|
||||
FindPointersToNewSpaceInRegion(start_address, end_address,
|
||||
slot_callback, clear_maps);
|
||||
|
@ -53,7 +53,7 @@ LayoutDescriptor* LayoutDescriptor::FastPointerLayout() {
|
||||
|
||||
|
||||
bool LayoutDescriptor::GetIndexes(int field_index, int* layout_word_index,
|
||||
uint32_t* layout_mask) {
|
||||
int* layout_bit_index) {
|
||||
if (static_cast<unsigned>(field_index) >= static_cast<unsigned>(capacity())) {
|
||||
return false;
|
||||
}
|
||||
@ -62,20 +62,20 @@ bool LayoutDescriptor::GetIndexes(int field_index, int* layout_word_index,
|
||||
CHECK((!IsSmi() && (*layout_word_index < length())) ||
|
||||
(IsSmi() && (*layout_word_index < 1)));
|
||||
|
||||
int layout_bit_index = field_index % kNumberOfBits;
|
||||
*layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
|
||||
*layout_bit_index = field_index % kNumberOfBits;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
LayoutDescriptor* LayoutDescriptor::SetTagged(int field_index, bool tagged) {
|
||||
int layout_word_index;
|
||||
uint32_t layout_mask;
|
||||
int layout_bit_index;
|
||||
|
||||
if (!GetIndexes(field_index, &layout_word_index, &layout_mask)) {
|
||||
if (!GetIndexes(field_index, &layout_word_index, &layout_bit_index)) {
|
||||
CHECK(false);
|
||||
return this;
|
||||
}
|
||||
uint32_t layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
|
||||
|
||||
if (IsSlowLayout()) {
|
||||
uint32_t value = get_scalar(layout_word_index);
|
||||
@ -102,12 +102,13 @@ bool LayoutDescriptor::IsTagged(int field_index) {
|
||||
if (IsFastPointerLayout()) return true;
|
||||
|
||||
int layout_word_index;
|
||||
uint32_t layout_mask;
|
||||
int layout_bit_index;
|
||||
|
||||
if (!GetIndexes(field_index, &layout_word_index, &layout_mask)) {
|
||||
if (!GetIndexes(field_index, &layout_word_index, &layout_bit_index)) {
|
||||
// All bits after Out of bounds queries
|
||||
return true;
|
||||
}
|
||||
uint32_t layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
|
||||
|
||||
if (IsSlowLayout()) {
|
||||
uint32_t value = get_scalar(layout_word_index);
|
||||
@ -155,7 +156,7 @@ LayoutDescriptor* LayoutDescriptor::cast_gc_safe(Object* object) {
|
||||
|
||||
// InobjectPropertiesHelper is a helper class for querying whether inobject
|
||||
// property at offset is Double or not.
|
||||
InobjectPropertiesHelper::InobjectPropertiesHelper(Map* map)
|
||||
LayoutDescriptorHelper::LayoutDescriptorHelper(Map* map)
|
||||
: all_fields_tagged_(true),
|
||||
header_size_(0),
|
||||
layout_descriptor_(LayoutDescriptor::FastPointerLayout()) {
|
||||
@ -175,7 +176,7 @@ InobjectPropertiesHelper::InobjectPropertiesHelper(Map* map)
|
||||
}
|
||||
|
||||
|
||||
bool InobjectPropertiesHelper::IsTagged(int offset_in_bytes) {
|
||||
bool LayoutDescriptorHelper::IsTagged(int offset_in_bytes) {
|
||||
DCHECK(IsAligned(offset_in_bytes, kPointerSize));
|
||||
if (all_fields_tagged_) return true;
|
||||
// Object headers do not contain non-tagged fields.
|
||||
|
@ -6,8 +6,11 @@
|
||||
|
||||
#include "src/v8.h"
|
||||
|
||||
#include "src/base/bits.h"
|
||||
#include "src/layout-descriptor.h"
|
||||
|
||||
using v8::base::bits::CountTrailingZeros32;
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
@ -143,5 +146,111 @@ Handle<LayoutDescriptor> LayoutDescriptor::EnsureCapacity(
|
||||
return new_layout_descriptor;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool LayoutDescriptor::IsTagged(int field_index, int max_sequence_length,
|
||||
int* out_sequence_length) {
|
||||
DCHECK(max_sequence_length > 0);
|
||||
if (IsFastPointerLayout()) {
|
||||
*out_sequence_length = max_sequence_length;
|
||||
return true;
|
||||
}
|
||||
|
||||
int layout_word_index;
|
||||
int layout_bit_index;
|
||||
|
||||
if (!GetIndexes(field_index, &layout_word_index, &layout_bit_index)) {
|
||||
// Out of bounds queries are considered tagged.
|
||||
*out_sequence_length = max_sequence_length;
|
||||
return true;
|
||||
}
|
||||
uint32_t layout_mask = static_cast<uint32_t>(1) << layout_bit_index;
|
||||
|
||||
uint32_t value = IsSlowLayout()
|
||||
? get_scalar(layout_word_index)
|
||||
: static_cast<uint32_t>(Smi::cast(this)->value());
|
||||
|
||||
bool is_tagged = (value & layout_mask) == 0;
|
||||
if (!is_tagged) value = ~value; // Count set bits instead of cleared bits.
|
||||
value = value & ~(layout_mask - 1); // Clear bits we are not interested in.
|
||||
int sequence_length = CountTrailingZeros32(value) - layout_bit_index;
|
||||
|
||||
if (layout_bit_index + sequence_length == kNumberOfBits) {
|
||||
// This is a contiguous sequence till the end of current word, proceed
|
||||
// counting in the subsequent words.
|
||||
if (IsSlowLayout()) {
|
||||
int len = length();
|
||||
++layout_word_index;
|
||||
for (; layout_word_index < len; layout_word_index++) {
|
||||
value = get_scalar(layout_word_index);
|
||||
bool cur_is_tagged = (value & 1) == 0;
|
||||
if (cur_is_tagged != is_tagged) break;
|
||||
if (!is_tagged) value = ~value; // Count set bits instead.
|
||||
int cur_sequence_length = CountTrailingZeros32(value);
|
||||
sequence_length += cur_sequence_length;
|
||||
if (sequence_length >= max_sequence_length) break;
|
||||
if (cur_sequence_length != kNumberOfBits) break;
|
||||
}
|
||||
}
|
||||
if (is_tagged && (field_index + sequence_length == capacity())) {
|
||||
// The contiguous sequence of tagged fields lasts till the end of the
|
||||
// layout descriptor which means that all the fields starting from
|
||||
// field_index are tagged.
|
||||
sequence_length = std::numeric_limits<int>::max();
|
||||
}
|
||||
}
|
||||
*out_sequence_length = Min(sequence_length, max_sequence_length);
|
||||
return is_tagged;
|
||||
}
|
||||
|
||||
|
||||
Handle<LayoutDescriptor> LayoutDescriptor::NewForTesting(Isolate* isolate,
|
||||
int length) {
|
||||
return New(isolate, length);
|
||||
}
|
||||
|
||||
|
||||
LayoutDescriptor* LayoutDescriptor::SetTaggedForTesting(int field_index,
|
||||
bool tagged) {
|
||||
return SetTagged(field_index, tagged);
|
||||
}
|
||||
|
||||
|
||||
bool LayoutDescriptorHelper::IsTagged(
|
||||
int offset_in_bytes, int end_offset,
|
||||
int* out_end_of_contiguous_region_offset) {
|
||||
DCHECK(IsAligned(offset_in_bytes, kPointerSize));
|
||||
DCHECK(IsAligned(end_offset, kPointerSize));
|
||||
DCHECK(offset_in_bytes < end_offset);
|
||||
if (all_fields_tagged_) {
|
||||
*out_end_of_contiguous_region_offset = end_offset;
|
||||
DCHECK(offset_in_bytes < *out_end_of_contiguous_region_offset);
|
||||
return true;
|
||||
}
|
||||
int max_sequence_length = (end_offset - offset_in_bytes) / kPointerSize;
|
||||
int field_index = Max(0, (offset_in_bytes - header_size_) / kPointerSize);
|
||||
int sequence_length;
|
||||
bool tagged = layout_descriptor_->IsTagged(field_index, max_sequence_length,
|
||||
&sequence_length);
|
||||
DCHECK(sequence_length > 0);
|
||||
if (offset_in_bytes < header_size_) {
|
||||
// Object headers do not contain non-tagged fields. Check if the contiguous
|
||||
// region continues after the header.
|
||||
if (tagged) {
|
||||
// First field is tagged, calculate end offset from there.
|
||||
*out_end_of_contiguous_region_offset =
|
||||
header_size_ + sequence_length * kPointerSize;
|
||||
|
||||
} else {
|
||||
*out_end_of_contiguous_region_offset = header_size_;
|
||||
}
|
||||
DCHECK(offset_in_bytes < *out_end_of_contiguous_region_offset);
|
||||
return true;
|
||||
}
|
||||
*out_end_of_contiguous_region_offset =
|
||||
offset_in_bytes + sequence_length * kPointerSize;
|
||||
DCHECK(offset_in_bytes < *out_end_of_contiguous_region_offset);
|
||||
return tagged;
|
||||
}
|
||||
}
|
||||
} // namespace v8::internal
|
||||
|
@ -26,6 +26,14 @@ class LayoutDescriptor : public FixedTypedArray<Uint32ArrayTraits> {
|
||||
public:
|
||||
V8_INLINE bool IsTagged(int field_index);
|
||||
|
||||
// Queries the contiguous region of fields that are either tagged or not.
|
||||
// Returns true if the given field is tagged or false otherwise and writes
|
||||
// the length of the contiguous region to |out_sequence_length|.
|
||||
// If the sequence is longer than |max_sequence_length| then
|
||||
// |out_sequence_length| is set to |max_sequence_length|.
|
||||
bool IsTagged(int field_index, int max_sequence_length,
|
||||
int* out_sequence_length);
|
||||
|
||||
// Returns true if this is a layout of the object having only tagged fields.
|
||||
V8_INLINE bool IsFastPointerLayout();
|
||||
V8_INLINE static bool IsFastPointerLayout(Object* layout_descriptor);
|
||||
@ -76,10 +84,8 @@ class LayoutDescriptor : public FixedTypedArray<Uint32ArrayTraits> {
|
||||
// Capacity of layout descriptors in bits.
|
||||
V8_INLINE int capacity();
|
||||
|
||||
V8_INLINE LayoutDescriptor* SetTaggedForTesting(int field_index,
|
||||
bool tagged) {
|
||||
return SetTagged(field_index, tagged);
|
||||
}
|
||||
static Handle<LayoutDescriptor> NewForTesting(Isolate* isolate, int length);
|
||||
LayoutDescriptor* SetTaggedForTesting(int field_index, bool tagged);
|
||||
|
||||
private:
|
||||
static const int kNumberOfBits = 32;
|
||||
@ -96,7 +102,7 @@ class LayoutDescriptor : public FixedTypedArray<Uint32ArrayTraits> {
|
||||
|
||||
// Returns false if requested field_index is out of bounds.
|
||||
V8_INLINE bool GetIndexes(int field_index, int* layout_word_index,
|
||||
uint32_t* layout_mask);
|
||||
int* layout_bit_index);
|
||||
|
||||
V8_INLINE MUST_USE_RESULT LayoutDescriptor* SetRawData(int field_index) {
|
||||
return SetTagged(field_index, false);
|
||||
@ -107,15 +113,23 @@ class LayoutDescriptor : public FixedTypedArray<Uint32ArrayTraits> {
|
||||
};
|
||||
|
||||
|
||||
// InobjectPropertiesHelper is a helper class for querying layout descriptor
|
||||
// LayoutDescriptorHelper is a helper class for querying layout descriptor
|
||||
// about whether the field at given offset is tagged or not.
|
||||
class InobjectPropertiesHelper {
|
||||
class LayoutDescriptorHelper {
|
||||
public:
|
||||
inline explicit InobjectPropertiesHelper(Map* map);
|
||||
inline explicit LayoutDescriptorHelper(Map* map);
|
||||
|
||||
bool all_fields_tagged() { return all_fields_tagged_; }
|
||||
inline bool IsTagged(int offset_in_bytes);
|
||||
|
||||
// Queries the contiguous region of fields that are either tagged or not.
|
||||
// Returns true if fields starting at |offset_in_bytes| are tagged or false
|
||||
// otherwise and writes the offset of the end of the contiguous region to
|
||||
// |out_end_of_contiguous_region_offset|. The |end_offset| value is the
|
||||
// upper bound for |out_end_of_contiguous_region_offset|.
|
||||
bool IsTagged(int offset_in_bytes, int end_offset,
|
||||
int* out_end_of_contiguous_region_offset);
|
||||
|
||||
private:
|
||||
bool all_fields_tagged_;
|
||||
int header_size_;
|
||||
|
@ -7421,7 +7421,7 @@ static inline void IterateBodyUsingLayoutDescriptor(HeapObject* object,
|
||||
DCHECK(IsAligned(start_offset, kPointerSize) &&
|
||||
IsAligned(end_offset, kPointerSize));
|
||||
|
||||
InobjectPropertiesHelper helper(object->map());
|
||||
LayoutDescriptorHelper helper(object->map());
|
||||
DCHECK(!helper.all_fields_tagged());
|
||||
|
||||
for (int offset = start_offset; offset < end_offset; offset += kPointerSize) {
|
||||
|
@ -31,6 +31,8 @@ static double GetDoubleFieldValue(JSObject* obj, FieldIndex field_index) {
|
||||
}
|
||||
}
|
||||
|
||||
const int kNumberOfBits = 32;
|
||||
|
||||
|
||||
enum TestPropertyKind {
|
||||
PROP_CONSTANT,
|
||||
@ -103,6 +105,14 @@ TEST(LayoutDescriptorBasicFast) {
|
||||
CHECK_EQ(true, layout_desc->IsTagged(i));
|
||||
}
|
||||
CHECK(layout_desc->IsFastPointerLayout());
|
||||
|
||||
int sequence_length;
|
||||
CHECK_EQ(true, layout_desc->IsTagged(0, std::numeric_limits<int>::max(),
|
||||
&sequence_length));
|
||||
CHECK_EQ(std::numeric_limits<int>::max(), sequence_length);
|
||||
|
||||
CHECK_EQ(true, layout_desc->IsTagged(0, 7, &sequence_length));
|
||||
CHECK_EQ(7, sequence_length);
|
||||
}
|
||||
|
||||
|
||||
@ -196,6 +206,239 @@ TEST(LayoutDescriptorBasicSlow) {
|
||||
}
|
||||
|
||||
|
||||
static void TestLayoutDescriptorQueries(int layout_descriptor_length,
|
||||
int* bit_flip_positions,
|
||||
int max_sequence_length) {
|
||||
Handle<LayoutDescriptor> layout_descriptor = LayoutDescriptor::NewForTesting(
|
||||
CcTest::i_isolate(), layout_descriptor_length);
|
||||
layout_descriptor_length = layout_descriptor->capacity();
|
||||
LayoutDescriptor* layout_desc = *layout_descriptor;
|
||||
|
||||
{
|
||||
// Fill in the layout descriptor.
|
||||
int cur_bit_flip_index = 0;
|
||||
bool tagged = true;
|
||||
for (int i = 0; i < layout_descriptor_length; i++) {
|
||||
if (i == bit_flip_positions[cur_bit_flip_index]) {
|
||||
tagged = !tagged;
|
||||
++cur_bit_flip_index;
|
||||
CHECK(i < bit_flip_positions[cur_bit_flip_index]); // check test data
|
||||
}
|
||||
layout_desc = layout_desc->SetTaggedForTesting(i, tagged);
|
||||
}
|
||||
}
|
||||
|
||||
if (layout_desc->IsFastPointerLayout()) {
|
||||
return;
|
||||
}
|
||||
|
||||
{
|
||||
// Check queries.
|
||||
int cur_bit_flip_index = 0;
|
||||
bool tagged = true;
|
||||
for (int i = 0; i < layout_descriptor_length; i++) {
|
||||
if (i == bit_flip_positions[cur_bit_flip_index]) {
|
||||
tagged = !tagged;
|
||||
++cur_bit_flip_index;
|
||||
}
|
||||
CHECK_EQ(tagged, layout_desc->IsTagged(i));
|
||||
|
||||
int next_bit_flip_position = bit_flip_positions[cur_bit_flip_index];
|
||||
int expected_sequence_length;
|
||||
if (next_bit_flip_position < layout_desc->capacity()) {
|
||||
expected_sequence_length = next_bit_flip_position - i;
|
||||
} else {
|
||||
expected_sequence_length = tagged ? std::numeric_limits<int>::max()
|
||||
: (layout_desc->capacity() - i);
|
||||
}
|
||||
expected_sequence_length =
|
||||
Min(expected_sequence_length, max_sequence_length);
|
||||
int sequence_length;
|
||||
CHECK_EQ(tagged,
|
||||
layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
|
||||
CHECK(sequence_length > 0);
|
||||
|
||||
CHECK_EQ(expected_sequence_length, sequence_length);
|
||||
}
|
||||
|
||||
int sequence_length;
|
||||
CHECK_EQ(true,
|
||||
layout_desc->IsTagged(layout_descriptor_length,
|
||||
max_sequence_length, &sequence_length));
|
||||
CHECK_EQ(max_sequence_length, sequence_length);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void TestLayoutDescriptorQueriesFast(int max_sequence_length) {
|
||||
{
|
||||
LayoutDescriptor* layout_desc = LayoutDescriptor::FastPointerLayout();
|
||||
int sequence_length;
|
||||
for (int i = 0; i < kNumberOfBits; i++) {
|
||||
CHECK_EQ(true,
|
||||
layout_desc->IsTagged(i, max_sequence_length, &sequence_length));
|
||||
CHECK(sequence_length > 0);
|
||||
CHECK_EQ(max_sequence_length, sequence_length);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[] = {1000};
|
||||
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[] = {0, 1000};
|
||||
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[kNumberOfBits + 1];
|
||||
for (int i = 0; i <= kNumberOfBits; i++) {
|
||||
bit_flip_positions[i] = i;
|
||||
}
|
||||
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[] = {3, 7, 8, 10, 15, 21, 30, 1000};
|
||||
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[] = {0, 1, 2, 3, 5, 7, 9,
|
||||
12, 15, 18, 22, 26, 29, 1000};
|
||||
TestLayoutDescriptorQueries(kSmiValueSize, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorQueriesFastLimited7) {
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
TestLayoutDescriptorQueriesFast(7);
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorQueriesFastLimited13) {
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
TestLayoutDescriptorQueriesFast(13);
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorQueriesFastUnlimited) {
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
TestLayoutDescriptorQueriesFast(std::numeric_limits<int>::max());
|
||||
}
|
||||
|
||||
|
||||
static void TestLayoutDescriptorQueriesSlow(int max_sequence_length) {
|
||||
{
|
||||
int bit_flip_positions[] = {10000};
|
||||
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[] = {0, 10000};
|
||||
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[kMaxNumberOfDescriptors + 1];
|
||||
for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
|
||||
bit_flip_positions[i] = i;
|
||||
}
|
||||
bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
|
||||
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[] = {3, 7, 8, 10, 15, 21, 30,
|
||||
37, 54, 80, 99, 383, 10000};
|
||||
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[] = {0, 10, 20, 30, 50, 70, 90,
|
||||
120, 150, 180, 220, 260, 290, 10000};
|
||||
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[kMaxNumberOfDescriptors + 1];
|
||||
int cur = 0;
|
||||
for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
|
||||
bit_flip_positions[i] = cur;
|
||||
cur = (cur + 1) * 2;
|
||||
}
|
||||
CHECK(cur < 10000);
|
||||
bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
|
||||
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
|
||||
{
|
||||
int bit_flip_positions[kMaxNumberOfDescriptors + 1];
|
||||
int cur = 3;
|
||||
for (int i = 0; i < kMaxNumberOfDescriptors; i++) {
|
||||
bit_flip_positions[i] = cur;
|
||||
cur = (cur + 1) * 2;
|
||||
}
|
||||
CHECK(cur < 10000);
|
||||
bit_flip_positions[kMaxNumberOfDescriptors] = 10000;
|
||||
TestLayoutDescriptorQueries(kMaxNumberOfDescriptors, bit_flip_positions,
|
||||
max_sequence_length);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorQueriesSlowLimited7) {
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
TestLayoutDescriptorQueriesSlow(7);
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorQueriesSlowLimited13) {
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
TestLayoutDescriptorQueriesSlow(13);
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorQueriesSlowLimited42) {
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
TestLayoutDescriptorQueriesSlow(42);
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorQueriesSlowUnlimited) {
|
||||
CcTest::InitializeVM();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
TestLayoutDescriptorQueriesSlow(std::numeric_limits<int>::max());
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorCreateNewFast) {
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
@ -708,6 +951,151 @@ TEST(DoScavenge) {
|
||||
}
|
||||
|
||||
|
||||
static void TestLayoutDescriptorHelper(Isolate* isolate,
|
||||
int inobject_properties,
|
||||
Handle<DescriptorArray> descriptors,
|
||||
int number_of_descriptors) {
|
||||
Handle<Map> map = Map::Create(isolate, inobject_properties);
|
||||
|
||||
Handle<LayoutDescriptor> layout_descriptor = LayoutDescriptor::New(
|
||||
map, descriptors, descriptors->number_of_descriptors());
|
||||
map->InitializeDescriptors(*descriptors, *layout_descriptor);
|
||||
DCHECK(layout_descriptor->IsConsistentWithMap(*map));
|
||||
|
||||
LayoutDescriptorHelper helper(*map);
|
||||
bool all_fields_tagged = true;
|
||||
|
||||
int instance_size = map->instance_size();
|
||||
|
||||
int end_offset = instance_size * 2;
|
||||
int first_non_tagged_field_offset = end_offset;
|
||||
for (int i = 0; i < number_of_descriptors; i++) {
|
||||
PropertyDetails details = descriptors->GetDetails(i);
|
||||
if (details.type() != FIELD) continue;
|
||||
FieldIndex index = FieldIndex::ForDescriptor(*map, i);
|
||||
if (!index.is_inobject()) continue;
|
||||
all_fields_tagged &= !details.representation().IsDouble();
|
||||
bool expected_tagged = !index.is_double();
|
||||
if (!expected_tagged) {
|
||||
first_non_tagged_field_offset =
|
||||
Min(first_non_tagged_field_offset, index.offset());
|
||||
}
|
||||
|
||||
int end_of_region_offset;
|
||||
CHECK_EQ(expected_tagged, helper.IsTagged(index.offset()));
|
||||
CHECK_EQ(expected_tagged, helper.IsTagged(index.offset(), instance_size,
|
||||
&end_of_region_offset));
|
||||
CHECK(end_of_region_offset > 0);
|
||||
CHECK(end_of_region_offset % kPointerSize == 0);
|
||||
CHECK(end_of_region_offset <= instance_size);
|
||||
|
||||
for (int offset = index.offset(); offset < end_of_region_offset;
|
||||
offset += kPointerSize) {
|
||||
CHECK_EQ(expected_tagged, helper.IsTagged(index.offset()));
|
||||
}
|
||||
if (end_of_region_offset < instance_size) {
|
||||
CHECK_EQ(!expected_tagged, helper.IsTagged(end_of_region_offset));
|
||||
} else {
|
||||
CHECK_EQ(true, helper.IsTagged(end_of_region_offset));
|
||||
}
|
||||
}
|
||||
|
||||
for (int offset = 0; offset < JSObject::kHeaderSize; offset += kPointerSize) {
|
||||
// Header queries
|
||||
CHECK_EQ(true, helper.IsTagged(offset));
|
||||
int end_of_region_offset;
|
||||
CHECK_EQ(true, helper.IsTagged(offset, end_offset, &end_of_region_offset));
|
||||
CHECK_EQ(first_non_tagged_field_offset, end_of_region_offset);
|
||||
|
||||
// Out of bounds queries
|
||||
CHECK_EQ(true, helper.IsTagged(offset + instance_size));
|
||||
}
|
||||
|
||||
CHECK_EQ(all_fields_tagged, helper.all_fields_tagged());
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorHelperMixed) {
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
Handle<LayoutDescriptor> layout_descriptor;
|
||||
const int kPropsCount = kSmiValueSize * 3;
|
||||
TestPropertyKind props[kPropsCount];
|
||||
for (int i = 0; i < kPropsCount; i++) {
|
||||
props[i] = static_cast<TestPropertyKind>(i % PROP_KIND_NUMBER);
|
||||
}
|
||||
Handle<DescriptorArray> descriptors =
|
||||
CreateDescriptorArray(isolate, props, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
|
||||
kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorHelperAllTagged) {
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
Handle<LayoutDescriptor> layout_descriptor;
|
||||
const int kPropsCount = kSmiValueSize * 3;
|
||||
TestPropertyKind props[kPropsCount];
|
||||
for (int i = 0; i < kPropsCount; i++) {
|
||||
props[i] = PROP_TAGGED;
|
||||
}
|
||||
Handle<DescriptorArray> descriptors =
|
||||
CreateDescriptorArray(isolate, props, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
|
||||
kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
|
||||
}
|
||||
|
||||
|
||||
TEST(LayoutDescriptorHelperAllDoubles) {
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
Handle<LayoutDescriptor> layout_descriptor;
|
||||
const int kPropsCount = kSmiValueSize * 3;
|
||||
TestPropertyKind props[kPropsCount];
|
||||
for (int i = 0; i < kPropsCount; i++) {
|
||||
props[i] = PROP_DOUBLE;
|
||||
}
|
||||
Handle<DescriptorArray> descriptors =
|
||||
CreateDescriptorArray(isolate, props, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, 0, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, 13, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kSmiValueSize, descriptors, kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kSmiValueSize * 2, descriptors,
|
||||
kPropsCount);
|
||||
|
||||
TestLayoutDescriptorHelper(isolate, kPropsCount, descriptors, kPropsCount);
|
||||
}
|
||||
|
||||
|
||||
TEST(StoreBufferScanOnScavenge) {
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
|
Loading…
Reference in New Issue
Block a user