[wasm-gc] Optimize struct field offsets

Aligning struct fields to multiples of their own size can leave gaps
between them, e.g. when i8 and i32 fields alternate. This patch
introduces a simple optimization: it keeps track of the most recent
such gap, and attempts to use it for later fields that are small enough.

Bonus changes:
- Cap field alignment to 4 bytes (because we only have 4-byte object
  alignment anyway).
- Don't re-compute field offsets when canonicalizing types. Instead,
  re-use the original type's offsets.

Bug: v8:7748
Change-Id: Iabfc8e7cda94f16d196ed4429f3aa92d249b3b72
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4092494
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Reviewed-by: Manos Koukoutos <manoskouk@chromium.org>
Auto-Submit: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/main@{#84933}
This commit is contained in:
Jakob Kummerow 2022-12-16 17:25:56 +01:00 committed by V8 LUCI CQ
parent 0a7e7383ec
commit 9235ec6302
5 changed files with 167 additions and 16 deletions

View File

@ -159,9 +159,13 @@ TypeCanonicalizer::CanonicalType TypeCanonicalizer::CanonicalizeTypeDef(
for (uint32_t i = 0; i < original_type->field_count(); i++) {
builder.AddField(CanonicalizeValueType(module, original_type->field(i),
recursive_group_start),
original_type->mutability(i));
original_type->mutability(i),
original_type->field_offset(i));
}
result = TypeDefinition(builder.Build(), canonical_supertype);
builder.set_total_fields_size(original_type->total_fields_size());
result = TypeDefinition(
builder.Build(StructType::Builder::kUseProvidedOffsets),
canonical_supertype);
break;
}
case TypeDefinition::kArray: {

View File

@ -2133,7 +2133,10 @@ class ModuleDecoderTemplate : public Decoder {
}
if (failed()) return nullptr;
uint32_t* offsets = zone->NewArray<uint32_t>(field_count);
return zone->New<StructType>(field_count, offsets, fields, mutabilities);
StructType* result =
zone->New<StructType>(field_count, offsets, fields, mutabilities);
result->InitializeOffsets();
return result;
}
const ArrayType* consume_array(Zone* zone) {

View File

@ -26,9 +26,7 @@ class StructType : public ZoneObject {
: field_count_(field_count),
field_offsets_(field_offsets),
reps_(reps),
mutabilities_(mutabilities) {
InitializeOffsets();
}
mutabilities_(mutabilities) {}
uint32_t field_count() const { return field_count_; }
@ -66,53 +64,125 @@ class StructType : public ZoneObject {
uint32_t field_offset(uint32_t index) const {
DCHECK_LT(index, field_count());
if (index == 0) return 0;
DCHECK(offsets_initialized_);
return field_offsets_[index - 1];
}
uint32_t total_fields_size() const {
return field_count() == 0 ? 0 : field_offsets_[field_count() - 1];
}
uint32_t Align(uint32_t offset, uint32_t alignment) {
return RoundUp(offset, std::min(alignment, uint32_t{kTaggedSize}));
}
void InitializeOffsets() {
if (field_count() == 0) return;
DCHECK(!offsets_initialized_);
uint32_t offset = field(0).value_kind_size();
// Optimization: we track the last gap that was introduced by alignment,
// and place any sufficiently-small fields in it.
// It's important that the algorithm that assigns offsets to fields is
// subtyping-safe, i.e. two lists of fields with a common prefix must
// always compute the same offsets for the fields in this common prefix.
uint32_t gap_position = 0;
uint32_t gap_size = 0;
for (uint32_t i = 1; i < field_count(); i++) {
uint32_t field_size = field(i).value_kind_size();
// TODO(jkummerow): Don't round up to more than kTaggedSize-alignment.
offset = RoundUp(offset, field_size);
if (field_size <= gap_size) {
uint32_t aligned_gap = Align(gap_position, field_size);
uint32_t gap_before = aligned_gap - gap_position;
uint32_t aligned_gap_size = gap_size - gap_before;
if (field_size <= aligned_gap_size) {
field_offsets_[i - 1] = aligned_gap;
uint32_t gap_after = aligned_gap_size - field_size;
if (gap_before > gap_after) {
// Keep old {gap_position}.
gap_size = gap_before;
} else {
gap_position = aligned_gap + field_size;
gap_size = gap_after;
}
continue; // Successfully placed the field in the gap.
}
}
uint32_t old_offset = offset;
offset = Align(offset, field_size);
uint32_t gap = offset - old_offset;
if (gap > gap_size) {
gap_size = gap;
gap_position = old_offset;
}
field_offsets_[i - 1] = offset;
offset += field_size;
}
offset = RoundUp(offset, kTaggedSize);
field_offsets_[field_count() - 1] = offset;
#if DEBUG
offsets_initialized_ = true;
#endif
}
// For incrementally building StructTypes.
class Builder {
public:
enum ComputeOffsets : bool {
kComputeOffsets = true,
kUseProvidedOffsets = false
};
Builder(Zone* zone, uint32_t field_count)
: field_count_(field_count),
zone_(zone),
: zone_(zone),
field_count_(field_count),
cursor_(0),
field_offsets_(zone_->NewArray<uint32_t>(field_count_)),
buffer_(zone->NewArray<ValueType>(static_cast<int>(field_count))),
mutabilities_(zone->NewArray<bool>(static_cast<int>(field_count))) {}
void AddField(ValueType type, bool mutability) {
void AddField(ValueType type, bool mutability, uint32_t offset = 0) {
DCHECK_LT(cursor_, field_count_);
if (cursor_ > 0) {
field_offsets_[cursor_ - 1] = offset;
} else {
DCHECK_EQ(0, offset); // First field always has offset 0.
}
mutabilities_[cursor_] = mutability;
buffer_[cursor_++] = type;
}
StructType* Build() {
void set_total_fields_size(uint32_t size) {
if (field_count_ == 0) {
DCHECK_EQ(0, size);
return;
}
field_offsets_[field_count_ - 1] = size;
}
StructType* Build(ComputeOffsets compute_offsets = kComputeOffsets) {
DCHECK_EQ(cursor_, field_count_);
uint32_t* offsets = zone_->NewArray<uint32_t>(field_count_);
return zone_->New<StructType>(field_count_, offsets, buffer_,
mutabilities_);
StructType* result = zone_->New<StructType>(field_count_, field_offsets_,
buffer_, mutabilities_);
if (compute_offsets == kComputeOffsets) {
result->InitializeOffsets();
} else {
#if DEBUG
bool offsets_specified = true;
for (uint32_t i = 0; i < field_count_; i++) {
if (field_offsets_[i] == 0) {
offsets_specified = false;
break;
}
}
result->offsets_initialized_ = offsets_specified;
#endif
}
return result;
}
private:
const uint32_t field_count_;
Zone* const zone_;
const uint32_t field_count_;
uint32_t cursor_;
uint32_t* field_offsets_;
ValueType* const buffer_;
bool* const mutabilities_;
};
@ -122,6 +192,9 @@ class StructType : public ZoneObject {
private:
const uint32_t field_count_;
#if DEBUG
bool offsets_initialized_ = false;
#endif
uint32_t* const field_offsets_;
const ValueType* const reps_;
const bool* const mutabilities_;

View File

@ -577,6 +577,7 @@ v8_source_set("unittests_sources") {
"wasm/simd-shuffle-unittest.cc",
"wasm/streaming-decoder-unittest.cc",
"wasm/string-builder-unittest.cc",
"wasm/struct-types-unittest.cc",
"wasm/subtyping-unittest.cc",
"wasm/wasm-code-manager-unittest.cc",
"wasm/wasm-compiler-unittest.cc",

View File

@ -0,0 +1,70 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/wasm/struct-types.h"
#include "test/unittests/test-utils.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8::internal::wasm {
namespace struct_types_unittest {
class StructTypesTest : public TestWithZone {};
TEST_F(StructTypesTest, Empty) {
StructType::Builder builder(this->zone(), 0);
StructType* type = builder.Build();
EXPECT_EQ(0u, type->total_fields_size());
}
TEST_F(StructTypesTest, OneField) {
StructType::Builder builder(this->zone(), 1);
builder.AddField(kWasmI32, true);
StructType* type = builder.Build();
uint32_t expected = std::max(kUInt32Size, kTaggedSize);
EXPECT_EQ(expected, type->total_fields_size());
EXPECT_EQ(0u, type->field_offset(0));
}
TEST_F(StructTypesTest, Packing) {
StructType::Builder builder(this->zone(), 5);
builder.AddField(kWasmI64, true);
builder.AddField(kWasmI8, true);
builder.AddField(kWasmI32, true);
builder.AddField(kWasmI16, true);
builder.AddField(kWasmI8, true);
StructType* type = builder.Build();
EXPECT_EQ(16u, type->total_fields_size());
EXPECT_EQ(0u, type->field_offset(0));
EXPECT_EQ(8u, type->field_offset(1));
EXPECT_EQ(12u, type->field_offset(2));
EXPECT_EQ(10u, type->field_offset(3));
EXPECT_EQ(9u, type->field_offset(4));
}
TEST_F(StructTypesTest, CopyingOffsets) {
StructType::Builder builder(this->zone(), 5);
builder.AddField(kWasmI64, true);
builder.AddField(kWasmI8, true);
builder.AddField(kWasmI32, true);
builder.AddField(kWasmI16, true);
builder.AddField(kWasmI8, true);
StructType* type = builder.Build();
StructType::Builder copy_builder(this->zone(), type->field_count());
for (uint32_t i = 0; i < type->field_count(); i++) {
copy_builder.AddField(type->field(i), type->mutability(i),
type->field_offset(i));
}
copy_builder.set_total_fields_size(type->total_fields_size());
StructType* copy = copy_builder.Build();
for (uint32_t i = 0; i < type->field_count(); i++) {
EXPECT_EQ(type->field_offset(i), copy->field_offset(i));
}
EXPECT_EQ(type->total_fields_size(), copy->total_fields_size());
}
} // namespace struct_types_unittest
} // namespace v8::internal::wasm