[torque] Move Map layout definition to Torque

This commit attempts to change as little behavior as possible, but it
does require reordering the fields within Map to abide by Torque rules
specifying that strong and weak fields must be in separate sections.

Also includes some Torque compiler updates:
- Allow enums (types extending from integral types) as class fields
- Rename @ifdef to @if and add @ifnot for inverse checks
- Allow void fields in class declarations, which take up no space and
  emit no accessors

Bug: v8:8952
Change-Id: I1de6f34c1b15ed87d718666a05176980a218e97c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1480919
Commit-Queue: Seth Brenith <seth.brenith@microsoft.com>
Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#61588}
This commit is contained in:
Seth Brenith 2019-05-16 11:56:56 -07:00 committed by Commit Bot
parent a9eaf66316
commit 15a7e04eec
20 changed files with 172 additions and 128 deletions

View File

@ -166,8 +166,6 @@ type DirectString extends String;
type RootIndex generates 'TNode<Int32T>' constexpr 'RootIndex';
type Map extends HeapObject generates 'TNode<Map>';
@abstract
@noVerifier
extern class FixedArrayBase extends HeapObject {
@ -184,6 +182,39 @@ extern class WeakFixedArray extends HeapObject { length: Smi; }
extern class ByteArray extends FixedArrayBase {}
type LayoutDescriptor extends ByteArray
generates 'TNode<LayoutDescriptor>';
type TransitionArray extends WeakFixedArray
generates 'TNode<TransitionArray>';
// InstanceType actually extends uint16, but a bunch of methods in
// CodeStubAssembler expect a TNode<Int32T>, so keeping it signed for now.
type InstanceType extends int16 constexpr 'InstanceType';
extern class Map extends HeapObject {
instance_size_in_words: uint8;
in_object_properties_start_or_constructor_function_index: uint8;
used_or_unused_instance_size_in_words: uint8;
visitor_id: uint8;
instance_type: InstanceType;
bit_field: uint8;
bit_field2: uint8;
bit_field3: uint32;
@if(TAGGED_SIZE_8_BYTES) optional_padding: uint32;
@ifnot(TAGGED_SIZE_8_BYTES) optional_padding: void;
prototype: HeapObject;
constructor_or_back_pointer: Object;
instance_descriptors: DescriptorArray;
@if(V8_DOUBLE_FIELDS_UNBOXING) layout_descriptor: LayoutDescriptor;
@ifnot(V8_DOUBLE_FIELDS_UNBOXING) layout_descriptor: void;
dependent_code: DependentCode;
prototype_validity_cell: Smi | Cell;
weak transitions_or_prototype_info: Map | TransitionArray |
PrototypeInfo | Smi;
}
type BytecodeArray extends FixedArrayBase;
@generatePrint
@ -483,7 +514,7 @@ extern class SharedFunctionInfo extends HeapObject {
expected_nof_properties: uint16;
function_token_offset: int16;
flags: int32;
@ifdef(V8_SFI_HAS_UNIQUE_ID) unique_id: int32;
@if(V8_SFI_HAS_UNIQUE_ID) unique_id: int32;
}
extern class JSBoundFunction extends JSObject {
@ -703,7 +734,6 @@ extern class PropertyCell extends HeapObject {
extern class JSDataView extends JSArrayBufferView {}
type InstanceType generates 'TNode<Int32T>' constexpr 'InstanceType';
type ElementsKind generates 'TNode<Int32T>' constexpr 'ElementsKind';
type LanguageMode extends Smi constexpr 'LanguageMode';
type ExtractFixedArrayFlags
@ -2300,8 +2330,6 @@ operator '[]=' macro StoreFixedArrayDirect(a: FixedArray, i: Smi, v: Object) {
a.objects[i] = v;
}
extern operator '.instance_type' macro LoadMapInstanceType(Map): int32;
extern macro GetNumberDictionaryNumberOfElements(NumberDictionary): Smi;
extern macro GetIteratorMethod(implicit context: Context)(HeapObject): Object
labels IfIteratorUndefined;

View File

@ -1607,7 +1607,7 @@ TNode<Int32T> CodeStubAssembler::LoadElementsKind(
TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
SloppyTNode<Map> map) {
CSA_SLOW_ASSERT(this, IsMap(map));
return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
return CAST(LoadObjectField(map, Map::kInstanceDescriptorsOffset));
}
TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {

View File

@ -607,7 +607,7 @@ FieldAccess AccessBuilder::ForMapBitField3() {
// static
FieldAccess AccessBuilder::ForMapDescriptors() {
FieldAccess access = {
kTaggedBase, Map::kDescriptorsOffset,
kTaggedBase, Map::kInstanceDescriptorsOffset,
Handle<Name>(), MaybeHandle<Map>(),
Type::OtherInternal(), MachineType::TypeCompressedTaggedPointer(),
kPointerWriteBarrier};

View File

@ -82,6 +82,14 @@ constexpr int kStackSpaceRequiredForCompilation = 40;
#define V8_DOUBLE_FIELDS_UNBOXING false
#endif
// Determine whether tagged pointers are 8 bytes (used in Torque layouts for
// choosing where to insert padding).
#if V8_TARGET_ARCH_64_BIT && !defined(V8_COMPRESS_POINTERS)
#define TAGGED_SIZE_8_BYTES true
#else
#define TAGGED_SIZE_8_BYTES false
#endif
// Some types of tracing require the SFI to store a unique ID.
#if defined(V8_TRACE_MAPS) || defined(V8_TRACE_IGNITION)
#define V8_SFI_HAS_UNIQUE_ID true
@ -234,6 +242,7 @@ using AtomicTagged_t = base::AtomicWord;
constexpr bool kUseBranchlessPtrDecompression = true;
STATIC_ASSERT(kTaggedSize == (1 << kTaggedSizeLog2));
STATIC_ASSERT((kTaggedSize == 8) == TAGGED_SIZE_8_BYTES);
using AsAtomicTagged = base::AsAtomicPointerImpl<AtomicTagged_t>;
STATIC_ASSERT(sizeof(Tagged_t) == kTaggedSize);

View File

@ -714,18 +714,16 @@ class WasmInstanceObject::BodyDescriptor final : public BodyDescriptorBase {
class Map::BodyDescriptor final : public BodyDescriptorBase {
public:
static bool IsValidSlot(Map map, HeapObject obj, int offset) {
return offset >= Map::kPointerFieldsBeginOffset &&
offset < Map::kPointerFieldsEndOffset;
return offset >= Map::kStartOfPointerFieldsOffset &&
offset < Map::kEndOfTaggedFieldsOffset;
}
template <typename ObjectVisitor>
static inline void IterateBody(Map map, HeapObject obj, int object_size,
ObjectVisitor* v) {
IteratePointers(obj, Map::kPointerFieldsBeginOffset,
Map::kTransitionsOrPrototypeInfoOffset, v);
IteratePointers(obj, Map::kStartOfStrongFieldsOffset,
Map::kEndOfStrongFieldsOffset, v);
IterateMaybeWeakPointer(obj, kTransitionsOrPrototypeInfoOffset, v);
IteratePointers(obj, Map::kTransitionsOrPrototypeInfoOffset + kTaggedSize,
Map::kPointerFieldsEndOffset, v);
}
static inline int SizeOf(Map map, HeapObject obj) { return Map::kSize; }

View File

@ -659,6 +659,7 @@ void JSObject::JSObjectVerify(Isolate* isolate) {
}
void Map::MapVerify(Isolate* isolate) {
TorqueGeneratedClassVerifiers::MapVerify(*this, isolate);
Heap* heap = isolate->heap();
CHECK(!ObjectInYoungGeneration(*this));
CHECK(FIRST_TYPE <= instance_type() && instance_type() <= LAST_TYPE);
@ -667,8 +668,6 @@ void Map::MapVerify(Isolate* isolate) {
static_cast<size_t>(instance_size()) < heap->Capacity()));
CHECK(GetBackPointer()->IsUndefined(isolate) ||
!Map::cast(GetBackPointer())->is_stable());
HeapObject::VerifyHeapPointer(isolate, prototype());
HeapObject::VerifyHeapPointer(isolate, instance_descriptors());
SLOW_DCHECK(instance_descriptors()->IsSortedNoDuplicates());
DisallowHeapAllocation no_gc;
SLOW_DCHECK(
@ -698,8 +697,6 @@ void Map::MapVerify(Isolate* isolate) {
DCHECK(prototype_info() == Smi::kZero ||
prototype_info()->IsPrototypeInfo());
}
CHECK(prototype_validity_cell()->IsSmi() ||
prototype_validity_cell()->IsCell());
}
void Map::DictionaryMapVerify(Isolate* isolate) {

View File

@ -387,6 +387,10 @@ STATIC_ASSERT(FIRST_NONSTRING_TYPE == Internals::kFirstNonstringType);
STATIC_ASSERT(ODDBALL_TYPE == Internals::kOddballType);
STATIC_ASSERT(FOREIGN_TYPE == Internals::kForeignType);
// Make sure it doesn't matter whether we sign-extend or zero-extend these
// values, because Torque treats InstanceType as signed.
STATIC_ASSERT(LAST_TYPE < 1 << 15);
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
InstanceType instance_type);

View File

@ -31,17 +31,18 @@ OBJECT_CONSTRUCTORS_IMPL(Map, HeapObject)
CAST_ACCESSOR(Map)
DescriptorArray Map::instance_descriptors() const {
return DescriptorArray::cast(READ_FIELD(*this, kDescriptorsOffset));
return DescriptorArray::cast(READ_FIELD(*this, kInstanceDescriptorsOffset));
}
DescriptorArray Map::synchronized_instance_descriptors() const {
return DescriptorArray::cast(ACQUIRE_READ_FIELD(*this, kDescriptorsOffset));
return DescriptorArray::cast(
ACQUIRE_READ_FIELD(*this, kInstanceDescriptorsOffset));
}
void Map::set_synchronized_instance_descriptors(DescriptorArray value,
WriteBarrierMode mode) {
RELEASE_WRITE_FIELD(*this, kDescriptorsOffset, value);
CONDITIONAL_WRITE_BARRIER(*this, kDescriptorsOffset, value, mode);
RELEASE_WRITE_FIELD(*this, kInstanceDescriptorsOffset, value);
CONDITIONAL_WRITE_BARRIER(*this, kInstanceDescriptorsOffset, value, mode);
}
// A freshly allocated layout descriptor can be set on an existing map.

View File

@ -1497,26 +1497,20 @@ Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
// applied to the shared map, dependent code and weak cell cache.
Handle<Map> fresh = Map::CopyNormalized(isolate, fast_map, mode);
if (new_map->is_prototype_map()) {
// For prototype maps, the PrototypeInfo is not copied.
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
reinterpret_cast<void*>(new_map->address()),
kTransitionsOrPrototypeInfoOffset));
DCHECK_EQ(fresh->raw_transitions(),
MaybeObject::FromObject(Smi::kZero));
STATIC_ASSERT(kDescriptorsOffset ==
kTransitionsOrPrototypeInfoOffset + kTaggedSize);
DCHECK_EQ(0, memcmp(fresh->RawField(kDescriptorsOffset).ToVoidPtr(),
new_map->RawField(kDescriptorsOffset).ToVoidPtr(),
kDependentCodeOffset - kDescriptorsOffset));
} else {
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
reinterpret_cast<void*>(new_map->address()),
Map::kDependentCodeOffset));
}
STATIC_ASSERT(Map::kPrototypeValidityCellOffset ==
Map::kDependentCodeOffset + kTaggedSize);
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address()),
reinterpret_cast<void*>(new_map->address()),
Map::kDependentCodeOffset));
int offset = Map::kPrototypeValidityCellOffset + kTaggedSize;
if (new_map->is_prototype_map()) {
// For prototype maps, the PrototypeInfo is not copied.
STATIC_ASSERT(Map::kTransitionsOrPrototypeInfoOffset ==
Map::kPrototypeValidityCellOffset + kTaggedSize);
offset = kTransitionsOrPrototypeInfoOffset + kTaggedSize;
DCHECK_EQ(fresh->raw_transitions(),
MaybeObject::FromObject(Smi::kZero));
}
DCHECK_EQ(0, memcmp(reinterpret_cast<void*>(fresh->address() + offset),
reinterpret_cast<void*>(new_map->address() + offset),
Map::kSize - offset));

View File

@ -9,6 +9,7 @@
#include "src/objects.h"
#include "src/objects/code.h"
#include "src/objects/heap-object.h"
#include "torque-generated/field-offsets-tq.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@ -166,11 +167,6 @@ using MapHandles = std::vector<Handle<Map>>;
// +---------------+---------------------------------------------+
// | TaggedPointer | [constructor_or_backpointer] |
// +---------------+---------------------------------------------+
// | TaggedPointer | If Map is a prototype map: |
// | | [prototype_info] |
// | | Else: |
// | | [raw_transitions] |
// +---------------+---------------------------------------------+
// | TaggedPointer | [instance_descriptors] |
// +*************************************************************+
// ! TaggedPointer ! [layout_descriptors] !
@ -180,6 +176,13 @@ using MapHandles = std::vector<Handle<Map>>;
// +*************************************************************+
// | TaggedPointer | [dependent_code] |
// +---------------+---------------------------------------------+
// | TaggedPointer | [prototype_validity_cell] |
// +---------------+---------------------------------------------+
// | TaggedPointer | If Map is a prototype map: |
// | | [prototype_info] |
// | | Else: |
// | | [raw_transitions] |
// +---------------+---------------------------------------------+
class Map : public HeapObject {
public:
@ -828,34 +831,8 @@ class Map : public HeapObject {
static const int kMaxPreAllocatedPropertyFields = 255;
// Layout description.
#define MAP_FIELDS(V) \
/* Raw data fields. */ \
V(kInstanceSizeInWordsOffset, kUInt8Size) \
V(kInObjectPropertiesStartOrConstructorFunctionIndexOffset, kUInt8Size) \
V(kUsedOrUnusedInstanceSizeInWordsOffset, kUInt8Size) \
V(kVisitorIdOffset, kUInt8Size) \
V(kInstanceTypeOffset, kUInt16Size) \
V(kBitFieldOffset, kUInt8Size) \
V(kBitField2Offset, kUInt8Size) \
V(kBitField3Offset, kUInt32Size) \
/* Adds padding to make tagged fields kTaggedSize-aligned. */ \
V(kOptionalPaddingOffset, OBJECT_POINTER_PADDING(kOptionalPaddingOffset)) \
/* Pointer fields. */ \
V(kPointerFieldsBeginOffset, 0) \
V(kPrototypeOffset, kTaggedSize) \
V(kConstructorOrBackPointerOffset, kTaggedSize) \
V(kTransitionsOrPrototypeInfoOffset, kTaggedSize) \
V(kDescriptorsOffset, kTaggedSize) \
V(kLayoutDescriptorOffset, FLAG_unbox_double_fields ? kTaggedSize : 0) \
V(kDependentCodeOffset, kTaggedSize) \
V(kPrototypeValidityCellOffset, kTaggedSize) \
V(kPointerFieldsEndOffset, 0) \
/* Total size. */ \
V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, MAP_FIELDS)
#undef MAP_FIELDS
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
TORQUE_GENERATED_MAP_FIELDS)
STATIC_ASSERT(kInstanceTypeOffset == Internals::kMapInstanceTypeOffset);

View File

@ -1025,7 +1025,7 @@ void V8HeapExplorer::ExtractMapReferences(HeapEntry* entry, Map map) {
DescriptorArray descriptors = map->instance_descriptors();
TagObject(descriptors, "(map descriptors)");
SetInternalReference(entry, "descriptors", descriptors,
Map::kDescriptorsOffset);
Map::kInstanceDescriptorsOffset);
SetInternalReference(entry, "prototype", map->prototype(),
Map::kPrototypeOffset);
if (FLAG_unbox_double_fields) {

View File

@ -715,10 +715,20 @@ struct StructFieldExpression {
bool const_qualified;
};
enum class ConditionalAnnotationType {
kPositive,
kNegative,
};
struct ConditionalAnnotation {
std::string condition;
ConditionalAnnotationType type;
};
struct ClassFieldExpression {
NameAndTypeExpression name_and_type;
base::Optional<std::string> index;
base::Optional<std::string> conditional;
base::Optional<ConditionalAnnotation> conditional;
bool weak;
bool const_qualified;
bool generate_verify;

View File

@ -54,6 +54,8 @@ enum class ParseResultHolderBase::TypeId {
kOptionalLabelBlockPtr,
kNameAndTypeExpression,
kNameAndExpression,
kConditionalAnnotation,
kOptionalConditionalAnnotation,
kClassFieldExpression,
kStructFieldExpression,
kStdVectorOfNameAndTypeExpression,

View File

@ -3097,7 +3097,7 @@ void ImplementationVisitor::GenerateClassFieldOffsets(
FieldSectionType::kStrongSection,
&new_contents_stream);
}
} else {
} else if (f.name_and_type.type != TypeOracle::GetVoidType()) {
ProcessFieldInSection(&section, &completed_sections,
FieldSectionType::kScalarSection,
&new_contents_stream);
@ -3105,8 +3105,7 @@ void ImplementationVisitor::GenerateClassFieldOffsets(
size_t field_size;
std::string size_string;
std::string machine_type;
std::tie(field_size, size_string, machine_type) =
f.GetFieldSizeInformation();
std::tie(field_size, size_string) = f.GetFieldSizeInformation();
new_contents_stream << "V(k" << CamelifyString(f.name_and_type.name)
<< "Offset, " << size_string << ") \\\n";
}

View File

@ -38,6 +38,8 @@ class BuildFlags : public ContextualClass<BuildFlags> {
public:
BuildFlags() {
build_flags_["V8_SFI_HAS_UNIQUE_ID"] = V8_SFI_HAS_UNIQUE_ID;
build_flags_["TAGGED_SIZE_8_BYTES"] = TAGGED_SIZE_8_BYTES;
build_flags_["V8_DOUBLE_FIELDS_UNBOXING"] = V8_DOUBLE_FIELDS_UNBOXING;
build_flags_["TRUE_FOR_TESTING"] = true;
build_flags_["FALSE_FOR_TESTING"] = false;
}
@ -105,6 +107,14 @@ V8_EXPORT_PRIVATE const ParseResultTypeId
ParseResultHolder<NameAndExpression>::id =
ParseResultTypeId::kNameAndExpression;
template <>
V8_EXPORT_PRIVATE const ParseResultTypeId
ParseResultHolder<ConditionalAnnotation>::id =
ParseResultTypeId::kConditionalAnnotation;
template <>
V8_EXPORT_PRIVATE const ParseResultTypeId
ParseResultHolder<base::Optional<ConditionalAnnotation>>::id =
ParseResultTypeId::kOptionalConditionalAnnotation;
template <>
V8_EXPORT_PRIVATE const ParseResultTypeId
ParseResultHolder<ClassFieldExpression>::id =
ParseResultTypeId::kClassFieldExpression;
@ -704,8 +714,12 @@ base::Optional<ParseResult> MakeClassDeclaration(
std::vector<ClassFieldExpression> fields;
std::copy_if(fields_raw.begin(), fields_raw.end(), std::back_inserter(fields),
[](const ClassFieldExpression& exp) {
return !exp.conditional.has_value() ||
BuildFlags::GetFlag(*exp.conditional, "@ifdef");
if (!exp.conditional.has_value()) return true;
const ConditionalAnnotation& conditional = *exp.conditional;
return conditional.type == ConditionalAnnotationType::kPositive
? BuildFlags::GetFlag(conditional.condition, "@if")
: !BuildFlags::GetFlag(conditional.condition,
"@ifnot");
});
Declaration* result = MakeNode<ClassDeclaration>(
@ -1284,8 +1298,20 @@ base::Optional<ParseResult> MakeNameAndExpressionFromExpression(
ReportError("Constructor parameters need to be named.");
}
base::Optional<ParseResult> MakeConditionalAnnotation(
ParseResultIterator* child_results) {
auto type_str = child_results->NextAs<Identifier*>()->value;
DCHECK(type_str == "@if" || type_str == "@ifnot");
ConditionalAnnotationType type = type_str == "@if"
? ConditionalAnnotationType::kPositive
: ConditionalAnnotationType::kNegative;
auto condition = child_results->NextAs<std::string>();
return ParseResult{ConditionalAnnotation{condition, type}};
}
base::Optional<ParseResult> MakeClassField(ParseResultIterator* child_results) {
auto conditional = child_results->NextAs<base::Optional<std::string>>();
auto conditional =
child_results->NextAs<base::Optional<ConditionalAnnotation>>();
AnnotationSet annotations(child_results, {"@noVerifier"});
bool generate_verify = !annotations.Contains("@noVerifier");
auto weak = child_results->NextAs<bool>();
@ -1516,12 +1542,16 @@ struct TorqueGrammar : Grammar {
Symbol* optionalArraySpecifier =
Optional<std::string>(Sequence({Token("["), &identifier, Token("]")}));
Symbol classField = {Rule(
{Optional<std::string>(
Sequence({Token("@ifdef"), Token("("), &identifier, Token(")")})),
annotations, CheckIf(Token("weak")), CheckIf(Token("const")), &name,
optionalArraySpecifier, Token(":"), &type, Token(";")},
MakeClassField)};
// Result: ConditionalAnnotation
Symbol conditionalAnnotation = {
Rule({OneOf({"@if", "@ifnot"}), Token("("), &identifier, Token(")")},
MakeConditionalAnnotation)};
Symbol classField = {
Rule({Optional<ConditionalAnnotation>(&conditionalAnnotation),
annotations, CheckIf(Token("weak")), CheckIf(Token("const")), &name,
optionalArraySpecifier, Token(":"), &type, Token(";")},
MakeClassField)};
Symbol structField = {
Rule({CheckIf(Token("const")), &name, Token(":"), &type, Token(";")},

View File

@ -269,11 +269,10 @@ void TypeVisitor::VisitClassFieldsAndMethods(
size_t field_size;
std::string size_string;
std::string machine_type;
std::tie(field_size, size_string, machine_type) =
field.GetFieldSizeInformation();
std::tie(field_size, size_string) = field.GetFieldSizeInformation();
// Our allocations don't support alignments beyond kTaggedSize.
size_t alignment = std::min(size_t{kTaggedSize}, field_size);
if (class_offset % alignment != 0) {
if (alignment > 0 && class_offset % alignment != 0) {
ReportError("field ", field_expression.name_and_type.name,
" at offset ", class_offset, " is not ", alignment,
"-byte aligned.");

View File

@ -375,7 +375,9 @@ void ClassType::GenerateAccessors() {
// function and define a corresponding '.field' operator. The
// implementation iterator will turn the snippets into code.
for (auto& field : fields_) {
if (field.index) continue;
if (field.index || field.name_and_type.type == TypeOracle::GetVoidType()) {
continue;
}
CurrentSourcePosition::Scope position_activator(field.pos);
IdentifierExpression* parameter =
MakeNode<IdentifierExpression>(MakeNode<Identifier>(std::string{"o"}));
@ -587,62 +589,50 @@ VisitResult VisitResult::NeverResult() {
return result;
}
std::tuple<size_t, std::string, std::string> Field::GetFieldSizeInformation()
const {
std::tuple<size_t, std::string> Field::GetFieldSizeInformation() const {
std::string size_string = "#no size";
std::string machine_type = "#no machine type";
const Type* field_type = this->name_and_type.type;
size_t field_size = 0;
if (field_type->IsSubtypeOf(TypeOracle::GetTaggedType())) {
field_size = kTaggedSize;
size_string = "kTaggedSize";
machine_type = field_type->IsSubtypeOf(TypeOracle::GetSmiType())
? "MachineType::TaggedSigned()"
: "MachineType::AnyTagged()";
} else if (field_type->IsSubtypeOf(TypeOracle::GetRawPtrType())) {
field_size = kSystemPointerSize;
size_string = "kSystemPointerSize";
machine_type = "MachineType::Pointer()";
} else if (field_type == TypeOracle::GetInt32Type()) {
field_size = kInt32Size;
size_string = "kInt32Size";
machine_type = "MachineType::Int32()";
} else if (field_type == TypeOracle::GetUint32Type()) {
field_size = kInt32Size;
size_string = "kInt32Size";
machine_type = "MachineType::Uint32()";
} else if (field_type == TypeOracle::GetInt16Type()) {
field_size = kUInt16Size;
size_string = "kUInt16Size";
machine_type = "MachineType::Int16()";
} else if (field_type == TypeOracle::GetUint16Type()) {
field_size = kUInt16Size;
size_string = "kUInt16Size";
machine_type = "MachineType::Uint16()";
} else if (field_type == TypeOracle::GetInt8Type()) {
} else if (field_type->IsSubtypeOf(TypeOracle::GetVoidType())) {
field_size = 0;
size_string = "0";
} else if (field_type->IsSubtypeOf(TypeOracle::GetInt8Type())) {
field_size = kUInt8Size;
size_string = "kUInt8Size";
machine_type = "MachineType::Int8()";
} else if (field_type == TypeOracle::GetUint8Type()) {
} else if (field_type->IsSubtypeOf(TypeOracle::GetUint8Type())) {
field_size = kUInt8Size;
size_string = "kUInt8Size";
machine_type = "MachineType::Uint8()";
} else if (field_type == TypeOracle::GetFloat64Type()) {
} else if (field_type->IsSubtypeOf(TypeOracle::GetInt16Type())) {
field_size = kUInt16Size;
size_string = "kUInt16Size";
} else if (field_type->IsSubtypeOf(TypeOracle::GetUint16Type())) {
field_size = kUInt16Size;
size_string = "kUInt16Size";
} else if (field_type->IsSubtypeOf(TypeOracle::GetInt32Type())) {
field_size = kInt32Size;
size_string = "kInt32Size";
} else if (field_type->IsSubtypeOf(TypeOracle::GetUint32Type())) {
field_size = kInt32Size;
size_string = "kInt32Size";
} else if (field_type->IsSubtypeOf(TypeOracle::GetFloat64Type())) {
field_size = kDoubleSize;
size_string = "kDoubleSize";
machine_type = "MachineType::Float64()";
} else if (field_type == TypeOracle::GetIntPtrType()) {
} else if (field_type->IsSubtypeOf(TypeOracle::GetIntPtrType())) {
field_size = kIntptrSize;
size_string = "kIntptrSize";
machine_type = "MachineType::IntPtr()";
} else if (field_type == TypeOracle::GetUIntPtrType()) {
} else if (field_type->IsSubtypeOf(TypeOracle::GetUIntPtrType())) {
field_size = kIntptrSize;
size_string = "kIntptrSize";
machine_type = "MachineType::IntPtr()";
} else {
ReportError("fields of type ", *field_type, " are not (yet) supported");
}
return std::make_tuple(field_size, size_string, machine_type);
return std::make_tuple(field_size, size_string);
}
} // namespace torque

View File

@ -146,7 +146,7 @@ struct Field {
// TODO(danno): This likely should be refactored, the handling of the types
// using the universal grab-bag utility with std::tie, as well as the
// reliance of string types is quite clunky.
std::tuple<size_t, std::string, std::string> GetFieldSizeInformation() const;
std::tuple<size_t, std::string> GetFieldSizeInformation() const;
SourcePosition pos;
const AggregateType* aggregate;

View File

@ -785,14 +785,18 @@ namespace test {
i: uintptr;
}
// This class should throw alignment errors if @ifdef decorators aren't
// This class should throw alignment errors if @if decorators aren't
// working.
@noVerifier
extern class PreprocessingTest extends JSObject {
@ifdef(FALSE_FOR_TESTING) a: int8;
@ifdef(TRUE_FOR_TESTING) a: int16;
@if(FALSE_FOR_TESTING) a: int8;
@if(TRUE_FOR_TESTING) a: int16;
b: int16;
d: int32;
@ifnot(TRUE_FOR_TESTING) e: int8;
@ifnot(FALSE_FOR_TESTING) f: int16;
g: int16;
h: int32;
}
macro TestClassWithAllTypesLoadsAndStores(

View File

@ -49,6 +49,7 @@ def preprocess(input):
r'\n otherwise', input)
input = re.sub(r'(\n\s*\S[^\n]*\s)otherwise',
r'\1_OtheSaLi', input)
input = re.sub(r'@if\(', r'@iF(', input)
# Special handing of '%' for intrinsics, turn the percent
# into a unicode character so that it gets treated as part of the
@ -83,6 +84,7 @@ def postprocess(output):
r"\n\1otherwise", output)
output = re.sub(r'_OtheSaLi',
r"otherwise", output)
output = re.sub(r'@iF\(', r'@if(', output)
while True:
old = output