[torque] move more bitfield definitions to Torque

This change moves the definitions of the bitfield flags used by Symbol
and Map to Torque. Symbol could directly follow the pattern established
by SharedFunctionInfo, but Map required some other changes:
- Until now, Torque bitfield definitions have required unsigned types. I
  thought that this would be the least-surprising behavior, since we
  never sign-extend when decoding bitfield values. However, I believe
  that the amount of churn involved in making ElementsKind be unsigned
  outweighs the benefit we were getting from this restriction (and
  similar difficulties are likely to arise in converting other bitfield
  structs to Torque), so this CL updates Torque to allow signed bitfield
  values.
- If we try to make Map extend from all of the generated classes that
  define its flags, we end up with class sizing problems because some
  compilers only apply empty base class optimization to the first in a
  row of empty base classes. We could work around this issue by
  generating macros instead of classes, but I took this as an
  opportunity for a minor clean-up instead: rather than having bitfield
  definitions for several different bitfield structs all jumbled
  together in Map, they can be split up. I think this makes the code a
  little easier to follow, but if others disagree I'm happy to implement
  macro generation instead.

Change-Id: Ibf339b0be97f72d740bf1daa8300b471912faeba
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1988934
Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Commit-Queue: Seth Brenith <seth.brenith@microsoft.com>
Cr-Commit-Position: refs/heads/master@{#65701}
This commit is contained in:
Seth Brenith 2020-01-08 08:08:55 -08:00 committed by Commit Bot
parent 2a7c3d2ba7
commit 87c16da505
39 changed files with 311 additions and 268 deletions

View File

@ -1789,7 +1789,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ JumpIfSmi(r3, &new_target_not_constructor);
__ ldr(scratch, FieldMemOperand(r3, HeapObject::kMapOffset));
__ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
__ tst(scratch, Operand(Map::IsConstructorBit::kMask));
__ tst(scratch, Operand(Map::Bits1::IsConstructorBit::kMask));
__ b(ne, &new_target_constructor);
__ bind(&new_target_not_constructor);
{
@ -2103,7 +2103,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Check if target has a [[Call]] internal method.
__ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
__ tst(r4, Operand(Map::IsCallableBit::kMask));
__ tst(r4, Operand(Map::Bits1::IsCallableBit::kMask));
__ b(eq, &non_callable);
// Check if target is a proxy and call CallProxy external builtin
@ -2198,7 +2198,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// Check if target has a [[Construct]] internal method.
__ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
__ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
__ tst(r2, Operand(Map::IsConstructorBit::kMask));
__ tst(r2, Operand(Map::Bits1::IsConstructorBit::kMask));
__ b(eq, &non_constructor);
// Dispatch based on instance type.

View File

@ -2170,7 +2170,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ JumpIfSmi(x3, &new_target_not_constructor);
__ LoadTaggedPointerField(x5, FieldMemOperand(x3, HeapObject::kMapOffset));
__ Ldrb(x5, FieldMemOperand(x5, Map::kBitFieldOffset));
__ TestAndBranchIfAnySet(x5, Map::IsConstructorBit::kMask,
__ TestAndBranchIfAnySet(x5, Map::Bits1::IsConstructorBit::kMask,
&new_target_constructor);
__ Bind(&new_target_not_constructor);
{
@ -2527,7 +2527,8 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Check if target has a [[Call]] internal method.
__ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x4, Map::IsCallableBit::kMask, &non_callable);
__ TestAndBranchIfAllClear(x4, Map::Bits1::IsCallableBit::kMask,
&non_callable);
// Check if target is a proxy and call CallProxy external builtin
__ Cmp(x5, JS_PROXY_TYPE);
@ -2628,7 +2629,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// Check if target has a [[Construct]] internal method.
__ LoadTaggedPointerField(x4, FieldMemOperand(x1, HeapObject::kMapOffset));
__ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x2, Map::IsConstructorBit::kMask,
__ TestAndBranchIfAllClear(x2, Map::Bits1::IsConstructorBit::kMask,
&non_constructor);
// Dispatch based on instance type.

View File

@ -511,9 +511,9 @@ void CallOrConstructBuiltinsAssembler::CallFunctionTemplate(
TNode<Map> receiver_map = LoadMap(receiver);
Label receiver_needs_access_check(this, Label::kDeferred),
receiver_done(this);
GotoIfNot(
IsSetWord32<Map::IsAccessCheckNeededBit>(LoadMapBitField(receiver_map)),
&receiver_done);
GotoIfNot(IsSetWord32<Map::Bits1::IsAccessCheckNeededBit>(
LoadMapBitField(receiver_map)),
&receiver_done);
TNode<IntPtrT> function_template_info_flags = LoadAndUntagObjectField(
function_template_info, FunctionTemplateInfo::kFlagOffset);
Branch(IsSetWord(function_template_info_flags,

View File

@ -443,11 +443,11 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::EmitCreateShallowObjectLiteral(
TVARIABLE(FixedArray, var_properties);
{
TNode<Uint32T> bit_field_3 = LoadMapBitField3(boilerplate_map);
GotoIf(IsSetWord32<Map::IsDeprecatedBit>(bit_field_3), call_runtime);
GotoIf(IsSetWord32<Map::Bits3::IsDeprecatedBit>(bit_field_3), call_runtime);
// Directly copy over the property store for dict-mode boilerplates.
Label if_dictionary(this), if_fast(this), done(this);
Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field_3), &if_dictionary,
&if_fast);
Branch(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bit_field_3),
&if_dictionary, &if_fast);
BIND(&if_dictionary);
{
Comment("Copy dictionary properties");
@ -633,8 +633,8 @@ TNode<JSObject> ConstructorBuiltinsAssembler::EmitCreateEmptyObjectLiteral(
object_function, JSFunction::kPrototypeOrInitialMapOffset);
// Ensure that slack tracking is disabled for the map.
STATIC_ASSERT(Map::kNoSlackTracking == 0);
CSA_ASSERT(
this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
CSA_ASSERT(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(
LoadMapBitField3(map)));
TNode<FixedArray> empty_fixed_array = EmptyFixedArrayConstant();
TNode<JSObject> result =
AllocateJSObjectFromMap(map, empty_fixed_array, empty_fixed_array);

View File

@ -230,7 +230,7 @@ TNode<JSArray> ObjectEntriesValuesBuiltinsAssembler::FastGetOwnValuesOrEntries(
Label if_has_enum_cache(this), if_not_has_enum_cache(this),
collect_entries(this);
TNode<IntPtrT> object_enum_length =
Signed(DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3));
Signed(DecodeWordFromWord32<Map::Bits3::EnumLengthBits>(bit_field3));
TNode<BoolT> has_enum_cache = WordNotEqual(
object_enum_length, IntPtrConstant(kInvalidEnumCacheSentinel));
@ -480,7 +480,7 @@ TF_BUILTIN(ObjectKeys, ObjectBuiltinsAssembler) {
TNode<Map> object_map = LoadMap(CAST(object));
TNode<Uint32T> object_bit_field3 = LoadMapBitField3(object_map);
TNode<UintPtrT> object_enum_length =
DecodeWordFromWord32<Map::EnumLengthBits>(object_bit_field3);
DecodeWordFromWord32<Map::Bits3::EnumLengthBits>(object_bit_field3);
GotoIf(
WordEqual(object_enum_length, IntPtrConstant(kInvalidEnumCacheSentinel)),
&if_slow);
@ -577,14 +577,15 @@ TF_BUILTIN(ObjectGetOwnPropertyNames, ObjectBuiltinsAssembler) {
BIND(&if_empty_elements);
TNode<Uint32T> object_bit_field3 = LoadMapBitField3(object_map);
TNode<UintPtrT> object_enum_length =
DecodeWordFromWord32<Map::EnumLengthBits>(object_bit_field3);
DecodeWordFromWord32<Map::Bits3::EnumLengthBits>(object_bit_field3);
GotoIf(
WordEqual(object_enum_length, IntPtrConstant(kInvalidEnumCacheSentinel)),
&try_fast);
// Check whether all own properties are enumerable.
TNode<UintPtrT> number_descriptors =
DecodeWordFromWord32<Map::NumberOfOwnDescriptorsBits>(object_bit_field3);
DecodeWordFromWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(
object_bit_field3);
GotoIfNot(WordEqual(object_enum_length, number_descriptors), &if_slow);
// Check whether there are enumerable properties.
@ -1029,7 +1030,8 @@ TF_BUILTIN(ObjectToString, ObjectBuiltinsAssembler) {
GotoIf(IsNull(holder), &return_default);
TNode<Map> holder_map = LoadMap(holder);
TNode<Uint32T> holder_bit_field3 = LoadMapBitField3(holder_map);
GotoIf(IsSetWord32<Map::MayHaveInterestingSymbolsBit>(holder_bit_field3),
GotoIf(IsSetWord32<Map::Bits3::MayHaveInterestingSymbolsBit>(
holder_bit_field3),
&return_generic);
var_holder = LoadMapPrototype(holder_map);
Goto(&loop);
@ -1093,8 +1095,9 @@ TF_BUILTIN(ObjectCreate, ObjectBuiltinsAssembler) {
&call_runtime);
// Handle dictionary objects or fast objects with properties in runtime.
TNode<Uint32T> bit_field3 = LoadMapBitField3(properties_map);
GotoIf(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &call_runtime);
Branch(IsSetWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3),
GotoIf(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bit_field3),
&call_runtime);
Branch(IsSetWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bit_field3),
&call_runtime, &no_properties);
}

View File

@ -1910,7 +1910,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
__ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
__ test_b(FieldOperand(scratch, Map::kBitFieldOffset),
Immediate(Map::IsConstructorBit::kMask));
Immediate(Map::Bits1::IsConstructorBit::kMask));
__ j(not_zero, &new_target_constructor, Label::kNear);
__ bind(&new_target_not_constructor);
{
@ -2239,7 +2239,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Check if target is a proxy and call CallProxy external builtin
__ bind(&non_jsboundfunction);
__ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
Immediate(Map::IsCallableBit::kMask));
Immediate(Map::Bits1::IsCallableBit::kMask));
__ j(zero, &non_callable);
// Call CallProxy external builtin
@ -2342,7 +2342,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// Check if target has a [[Construct]] internal method.
__ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
__ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
Immediate(Map::IsConstructorBit::kMask));
Immediate(Map::Bits1::IsConstructorBit::kMask));
__ j(zero, &non_constructor);
// Dispatch based on instance type.

View File

@ -1780,7 +1780,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ JumpIfSmi(a3, &new_target_not_constructor);
__ lw(t1, FieldMemOperand(a3, HeapObject::kMapOffset));
__ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
__ And(t1, t1, Operand(Map::IsConstructorBit::kMask));
__ And(t1, t1, Operand(Map::Bits1::IsConstructorBit::kMask));
__ Branch(&new_target_constructor, ne, t1, Operand(zero_reg));
__ bind(&new_target_not_constructor);
{
@ -2052,7 +2052,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Check if target has a [[Call]] internal method.
__ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
__ And(t1, t1, Operand(Map::IsCallableBit::kMask));
__ And(t1, t1, Operand(Map::Bits1::IsCallableBit::kMask));
__ Branch(&non_callable, eq, t1, Operand(zero_reg));
// Check if target is a proxy and call CallProxy external builtin
@ -2214,7 +2214,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// Check if target has a [[Construct]] internal method.
__ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
__ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
__ And(t3, t3, Operand(Map::IsConstructorBit::kMask));
__ And(t3, t3, Operand(Map::Bits1::IsConstructorBit::kMask));
__ Branch(&non_constructor, eq, t3, Operand(zero_reg));
// Dispatch based on instance type.

View File

@ -1822,7 +1822,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ JumpIfSmi(a3, &new_target_not_constructor);
__ ld(t1, FieldMemOperand(a3, HeapObject::kMapOffset));
__ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
__ And(t1, t1, Operand(Map::IsConstructorBit::kMask));
__ And(t1, t1, Operand(Map::Bits1::IsConstructorBit::kMask));
__ Branch(&new_target_constructor, ne, t1, Operand(zero_reg));
__ bind(&new_target_not_constructor);
{
@ -2092,7 +2092,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Check if target has a [[Call]] internal method.
__ Lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
__ And(t1, t1, Operand(Map::IsCallableBit::kMask));
__ And(t1, t1, Operand(Map::Bits1::IsCallableBit::kMask));
__ Branch(&non_callable, eq, t1, Operand(zero_reg));
__ Jump(BUILTIN_CODE(masm->isolate(), CallProxy),
@ -2250,7 +2250,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// Check if target has a [[Construct]] internal method.
__ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
__ Lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
__ And(t3, t3, Operand(Map::IsConstructorBit::kMask));
__ And(t3, t3, Operand(Map::Bits1::IsConstructorBit::kMask));
__ Branch(&non_constructor, eq, t3, Operand(zero_reg));
// Dispatch based on instance type.

View File

@ -1874,7 +1874,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ JumpIfSmi(r6, &new_target_not_constructor);
__ LoadP(scratch, FieldMemOperand(r6, HeapObject::kMapOffset));
__ lbz(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
__ TestBit(scratch, Map::IsConstructorBit::kShift, r0);
__ TestBit(scratch, Map::Bits1::IsConstructorBit::kShift, r0);
__ bne(&new_target_constructor, cr0);
__ bind(&new_target_not_constructor);
{
@ -2193,7 +2193,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Check if target has a [[Call]] internal method.
__ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
__ TestBit(r7, Map::IsCallableBit::kShift, r0);
__ TestBit(r7, Map::Bits1::IsCallableBit::kShift, r0);
__ beq(&non_callable, cr0);
// Check if target is a proxy and call CallProxy external builtin
@ -2294,7 +2294,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// Check if target has a [[Construct]] internal method.
__ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
__ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
__ TestBit(r5, Map::IsConstructorBit::kShift, r0);
__ TestBit(r5, Map::Bits1::IsConstructorBit::kShift, r0);
__ beq(&non_constructor, cr0);
// Dispatch based on instance type.

View File

@ -1932,7 +1932,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ JumpIfSmi(r5, &new_target_not_constructor);
__ LoadP(scratch, FieldMemOperand(r5, HeapObject::kMapOffset));
__ LoadlB(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
__ tmll(scratch, Operand(Map::IsConstructorBit::kShift));
__ tmll(scratch, Operand(Map::Bits1::IsConstructorBit::kShift));
__ bne(&new_target_constructor);
__ bind(&new_target_not_constructor);
{
@ -2248,7 +2248,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Check if target has a [[Call]] internal method.
__ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
__ TestBit(r6, Map::IsCallableBit::kShift);
__ TestBit(r6, Map::Bits1::IsCallableBit::kShift);
__ beq(&non_callable);
// Check if target is a proxy and call CallProxy external builtin
@ -2348,7 +2348,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// Check if target has a [[Construct]] internal method.
__ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
__ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
__ TestBit(r4, Map::IsConstructorBit::kShift);
__ TestBit(r4, Map::Bits1::IsConstructorBit::kShift);
__ beq(&non_constructor);
// Dispatch based on instance type.

View File

@ -2028,7 +2028,7 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
__ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
__ LoadTaggedPointerField(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
__ testb(FieldOperand(rbx, Map::kBitFieldOffset),
Immediate(Map::IsConstructorBit::kMask));
Immediate(Map::Bits1::IsConstructorBit::kMask));
__ j(not_zero, &new_target_constructor, Label::kNear);
__ bind(&new_target_not_constructor);
{
@ -2366,7 +2366,7 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// Check if target has a [[Call]] internal method.
__ testb(FieldOperand(rcx, Map::kBitFieldOffset),
Immediate(Map::IsCallableBit::kMask));
Immediate(Map::Bits1::IsCallableBit::kMask));
__ j(zero, &non_callable, Label::kNear);
// Check if target is a proxy and call CallProxy external builtin
@ -2466,7 +2466,7 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
// Check if target has a [[Construct]] internal method.
__ LoadTaggedPointerField(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
__ testb(FieldOperand(rcx, Map::kBitFieldOffset),
Immediate(Map::IsConstructorBit::kMask));
Immediate(Map::Bits1::IsConstructorBit::kMask));
__ j(zero, &non_constructor);
// Dispatch based on instance type.

View File

@ -2022,7 +2022,7 @@ void MacroAssembler::AssertConstructor(Register object) {
push(object);
LoadMap(object, object);
ldrb(object, FieldMemOperand(object, Map::kBitFieldOffset));
tst(object, Operand(Map::IsConstructorBit::kMask));
tst(object, Operand(Map::Bits1::IsConstructorBit::kMask));
pop(object);
Check(ne, AbortReason::kOperandIsNotAConstructor);
}

View File

@ -1538,7 +1538,7 @@ void MacroAssembler::AssertConstructor(Register object) {
LoadMap(temp, object);
Ldrb(temp, FieldMemOperand(temp, Map::kBitFieldOffset));
Tst(temp, Operand(Map::IsConstructorBit::kMask));
Tst(temp, Operand(Map::Bits1::IsConstructorBit::kMask));
Check(ne, AbortReason::kOperandIsNotAConstructor);
}
@ -2570,7 +2570,7 @@ void MacroAssembler::LoadElementsKindFromMap(Register result, Register map) {
// Load the map's "bit field 2".
Ldrb(result, FieldMemOperand(map, Map::kBitField2Offset));
// Retrieve elements_kind from bit field 2.
DecodeField<Map::ElementsKindBits>(result);
DecodeField<Map::Bits2::ElementsKindBits>(result);
}
void MacroAssembler::CompareRoot(const Register& obj, RootIndex index) {

View File

@ -1595,8 +1595,8 @@ TNode<BoolT> CodeStubAssembler::IsSpecialReceiverMap(SloppyTNode<Map> map) {
CSA_SLOW_ASSERT(this, IsMap(map));
TNode<BoolT> is_special =
IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
uint32_t mask =
Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
uint32_t mask = Map::Bits1::HasNamedInterceptorBit::kMask |
Map::Bits1::IsAccessCheckNeededBit::kMask;
USE(mask);
// Interceptors or access checks imply special receiver.
CSA_ASSERT(this,
@ -1696,7 +1696,7 @@ TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
TNode<Int32T> CodeStubAssembler::LoadNumberOfOwnDescriptors(TNode<Map> map) {
TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
return UncheckedCast<Int32T>(
DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3));
DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bit_field3));
}
TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
@ -1724,7 +1724,7 @@ TNode<Uint16T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
CSA_SLOW_ASSERT(this, IsMap(map));
TNode<Int32T> bit_field2 = LoadMapBitField2(map);
return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
return Signed(DecodeWord32<Map::Bits2::ElementsKindBits>(bit_field2));
}
TNode<Int32T> CodeStubAssembler::LoadElementsKind(
@ -1796,7 +1796,7 @@ TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
TNode<WordT> CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
CSA_SLOW_ASSERT(this, IsMap(map));
TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
return DecodeWordFromWord32<Map::Bits3::EnumLengthBits>(bit_field3);
}
TNode<Object> CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
@ -1814,7 +1814,8 @@ TNode<Uint32T> CodeStubAssembler::EnsureOnlyHasSimpleProperties(
GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
GotoIf(IsSetWord32(bit_field3, Map::IsDictionaryMapBit::kMask), bailout);
GotoIf(IsSetWord32(bit_field3, Map::Bits3::IsDictionaryMapBit::kMask),
bailout);
return bit_field3;
}
@ -2726,7 +2727,8 @@ TNode<BoolT> CodeStubAssembler::IsJSFunctionWithPrototypeSlot(
TNode<HeapObject> object) {
// Only JSFunction maps may have HasPrototypeSlotBit set.
return TNode<BoolT>::UncheckedCast(
IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(LoadMap(object))));
IsSetWord32<Map::Bits1::HasPrototypeSlotBit>(
LoadMapBitField(LoadMap(object))));
}
void CodeStubAssembler::BranchIfHasPrototypeProperty(
@ -2734,8 +2736,8 @@ void CodeStubAssembler::BranchIfHasPrototypeProperty(
Label* if_true, Label* if_false) {
// (has_prototype_slot() && IsConstructor()) ||
// IsGeneratorFunction(shared()->kind())
uint32_t mask =
Map::HasPrototypeSlotBit::kMask | Map::IsConstructorBit::kMask;
uint32_t mask = Map::Bits1::HasPrototypeSlotBit::kMask |
Map::Bits1::IsConstructorBit::kMask;
GotoIf(IsAllSetWord32(function_map_bit_field, mask), if_true);
Branch(IsGeneratorFunction(function), if_true, if_false);
@ -2748,13 +2750,14 @@ void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
Label next_check(this);
BranchIfHasPrototypeProperty(function, map_bit_field, &next_check, runtime);
BIND(&next_check);
GotoIf(IsSetWord32<Map::HasNonInstancePrototypeBit>(map_bit_field), runtime);
GotoIf(IsSetWord32<Map::Bits1::HasNonInstancePrototypeBit>(map_bit_field),
runtime);
}
TNode<HeapObject> CodeStubAssembler::LoadJSFunctionPrototype(
TNode<JSFunction> function, Label* if_bailout) {
CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
CSA_ASSERT(this, IsClearWord32<Map::HasNonInstancePrototypeBit>(
CSA_ASSERT(this, IsClearWord32<Map::Bits1::HasNonInstancePrototypeBit>(
LoadMapBitField(LoadMap(function))));
TNode<HeapObject> proto_or_map = LoadObjectField<HeapObject>(
function, JSFunction::kPrototypeOrInitialMapOffset);
@ -2979,7 +2982,7 @@ TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Context> context,
EnsureArrayLengthWritable(context, map, bailout);
TNode<Uint32T> kind =
DecodeWord32<Map::ElementsKindBits>(LoadMapBitField2(map));
DecodeWord32<Map::Bits2::ElementsKindBits>(LoadMapBitField2(map));
return Signed(kind);
}
@ -3552,8 +3555,8 @@ void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
SloppyTNode<HeapObject> object, SloppyTNode<Map> map,
SloppyTNode<IntPtrT> instance_size, int start_offset) {
STATIC_ASSERT(Map::kNoSlackTracking == 0);
CSA_ASSERT(
this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
CSA_ASSERT(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(
LoadMapBitField3(map)));
InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
RootIndex::kUndefinedValue);
}
@ -3568,7 +3571,7 @@ void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
STATIC_ASSERT(Map::kNoSlackTracking == 0);
GotoIf(IsSetWord32<Map::ConstructionCounterBits>(bit_field3),
GotoIf(IsSetWord32<Map::Bits3::ConstructionCounterBits>(bit_field3),
&slack_tracking);
Comment("No slack tracking");
InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
@ -3579,9 +3582,10 @@ void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
Comment("Decrease construction counter");
// Slack tracking is only done on initial maps.
CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
STATIC_ASSERT(Map::ConstructionCounterBits::kLastUsedBit == 31);
STATIC_ASSERT(Map::Bits3::ConstructionCounterBits::kLastUsedBit == 31);
TNode<Word32T> new_bit_field3 = Int32Sub(
bit_field3, Int32Constant(1 << Map::ConstructionCounterBits::kShift));
bit_field3,
Int32Constant(1 << Map::Bits3::ConstructionCounterBits::kShift));
StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
MachineRepresentation::kWord32);
STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
@ -3601,7 +3605,7 @@ void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
RootIndex::kUndefinedValue);
STATIC_ASSERT(Map::kNoSlackTracking == 0);
GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
GotoIf(IsClearWord32<Map::Bits3::ConstructionCounterBits>(new_bit_field3),
&complete);
Goto(&end);
}
@ -5612,24 +5616,25 @@ TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
CSA_SLOW_ASSERT(this, IsMap(map));
return IsSetWord32<Map::IsDictionaryMapBit>(LoadMapBitField3(map));
return IsSetWord32<Map::Bits3::IsDictionaryMapBit>(LoadMapBitField3(map));
}
TNode<BoolT> CodeStubAssembler::IsExtensibleMap(SloppyTNode<Map> map) {
CSA_ASSERT(this, IsMap(map));
return IsSetWord32<Map::IsExtensibleBit>(LoadMapBitField3(map));
return IsSetWord32<Map::Bits3::IsExtensibleBit>(LoadMapBitField3(map));
}
TNode<BoolT> CodeStubAssembler::IsExtensibleNonPrototypeMap(TNode<Map> map) {
int kMask = Map::IsExtensibleBit::kMask | Map::IsPrototypeMapBit::kMask;
int kExpected = Map::IsExtensibleBit::kMask;
int kMask =
Map::Bits3::IsExtensibleBit::kMask | Map::Bits3::IsPrototypeMapBit::kMask;
int kExpected = Map::Bits3::IsExtensibleBit::kMask;
return Word32Equal(Word32And(LoadMapBitField3(map), Int32Constant(kMask)),
Int32Constant(kExpected));
}
TNode<BoolT> CodeStubAssembler::IsCallableMap(SloppyTNode<Map> map) {
CSA_ASSERT(this, IsMap(map));
return IsSetWord32<Map::IsCallableBit>(LoadMapBitField(map));
return IsSetWord32<Map::Bits1::IsCallableBit>(LoadMapBitField(map));
}
TNode<BoolT> CodeStubAssembler::IsDebugInfo(TNode<HeapObject> object) {
@ -5638,12 +5643,12 @@ TNode<BoolT> CodeStubAssembler::IsDebugInfo(TNode<HeapObject> object) {
TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(SloppyTNode<Map> map) {
CSA_ASSERT(this, IsMap(map));
return IsSetWord32<Map::IsDeprecatedBit>(LoadMapBitField3(map));
return IsSetWord32<Map::Bits3::IsDeprecatedBit>(LoadMapBitField3(map));
}
TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
CSA_ASSERT(this, IsMap(map));
return IsSetWord32<Map::IsUndetectableBit>(LoadMapBitField(map));
return IsSetWord32<Map::Bits1::IsUndetectableBit>(LoadMapBitField(map));
}
TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
@ -5779,7 +5784,7 @@ TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
TNode<BoolT> CodeStubAssembler::IsConstructorMap(SloppyTNode<Map> map) {
CSA_ASSERT(this, IsMap(map));
return IsSetWord32<Map::IsConstructorBit>(LoadMapBitField(map));
return IsSetWord32<Map::Bits1::IsConstructorBit>(LoadMapBitField(map));
}
TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
@ -5789,7 +5794,7 @@ TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(
SloppyTNode<Map> map) {
CSA_ASSERT(this, IsMap(map));
return IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(map));
return IsSetWord32<Map::Bits1::HasPrototypeSlotBit>(LoadMapBitField(map));
}
TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
@ -8112,7 +8117,7 @@ void CodeStubAssembler::ForEachEnumerableOwnProperty(
TVARIABLE(DescriptorArray, var_descriptors, LoadMapDescriptors(map));
TNode<Uint32T> nof_descriptors =
DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3);
DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bit_field3);
TVARIABLE(BoolT, var_stable, Int32TrueConstant());
@ -8359,7 +8364,8 @@ void CodeStubAssembler::DescriptorLookup(
SloppyTNode<Uint32T> bitfield3, Label* if_found,
TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
Comment("DescriptorArrayLookup");
TNode<Uint32T> nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
TNode<Uint32T> nof =
DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bitfield3);
Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
var_name_index, if_not_found);
}
@ -8403,8 +8409,8 @@ void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
}
TNode<BoolT> CodeStubAssembler::IsSimpleObjectMap(TNode<Map> map) {
uint32_t mask =
Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
uint32_t mask = Map::Bits1::HasNamedInterceptorBit::kMask |
Map::Bits1::IsAccessCheckNeededBit::kMask;
// !IsSpecialReceiverType && !IsNamedInterceptor && !IsAccessCheckNeeded
return Select<BoolT>(
IsSpecialReceiverInstanceType(LoadMapInstanceType(map)),
@ -8422,7 +8428,7 @@ void CodeStubAssembler::TryLookupPropertyInSimpleObject(
TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
Label if_isfastmap(this), if_isslowmap(this);
Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
Branch(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
&if_isfastmap);
BIND(&if_isfastmap);
{
@ -8463,8 +8469,8 @@ void CodeStubAssembler::TryLookupProperty(
// Handle interceptors and access checks in runtime.
TNode<Int32T> bit_field = LoadMapBitField(map);
int mask =
Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
Map::Bits1::IsAccessCheckNeededBit::kMask;
GotoIf(IsSetWord32(bit_field, mask), if_bailout);
TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(object));
@ -9168,8 +9174,8 @@ TNode<Oddball> CodeStubAssembler::HasInPrototypeChain(TNode<Context> context,
GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
&return_runtime);
TNode<Int32T> object_bitfield = LoadMapBitField(object_map);
int mask = Map::HasNamedInterceptorBit::kMask |
Map::IsAccessCheckNeededBit::kMask;
int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
Map::Bits1::IsAccessCheckNeededBit::kMask;
Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
&if_objectisdirect);
}
@ -11987,12 +11993,13 @@ TNode<String> CodeStubAssembler::Typeof(SloppyTNode<Object> value) {
GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
TNode<Int32T> callable_or_undetectable_mask = Word32And(
LoadMapBitField(map),
Int32Constant(Map::IsCallableBit::kMask | Map::IsUndetectableBit::kMask));
TNode<Int32T> callable_or_undetectable_mask =
Word32And(LoadMapBitField(map),
Int32Constant(Map::Bits1::IsCallableBit::kMask |
Map::Bits1::IsUndetectableBit::kMask));
GotoIf(Word32Equal(callable_or_undetectable_mask,
Int32Constant(Map::IsCallableBit::kMask)),
Int32Constant(Map::Bits1::IsCallableBit::kMask)),
&return_function);
GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),

View File

@ -667,7 +667,7 @@ void MacroAssembler::AssertConstructor(Register object) {
Push(object);
LoadMap(object, object);
test_b(FieldOperand(object, Map::kBitFieldOffset),
Immediate(Map::IsConstructorBit::kMask));
Immediate(Map::Bits1::IsConstructorBit::kMask));
Pop(object);
Check(not_zero, AbortReason::kOperandIsNotAConstructor);
}

View File

@ -4986,7 +4986,7 @@ void MacroAssembler::AssertConstructor(Register object) {
LoadMap(t8, object);
lbu(t8, FieldMemOperand(t8, Map::kBitFieldOffset));
And(t8, t8, Operand(Map::IsConstructorBit::kMask));
And(t8, t8, Operand(Map::Bits1::IsConstructorBit::kMask));
Check(ne, AbortReason::kOperandIsNotAConstructor, t8, Operand(zero_reg));
}
}

View File

@ -5315,7 +5315,7 @@ void MacroAssembler::AssertConstructor(Register object) {
LoadMap(t8, object);
Lbu(t8, FieldMemOperand(t8, Map::kBitFieldOffset));
And(t8, t8, Operand(Map::IsConstructorBit::kMask));
And(t8, t8, Operand(Map::Bits1::IsConstructorBit::kMask));
Check(ne, AbortReason::kOperandIsNotAConstructor, t8, Operand(zero_reg));
}
}

View File

@ -1741,7 +1741,7 @@ void MacroAssembler::AssertConstructor(Register object) {
push(object);
LoadMap(object, object);
lbz(object, FieldMemOperand(object, Map::kBitFieldOffset));
andi(object, object, Operand(Map::IsConstructorBit::kMask));
andi(object, object, Operand(Map::Bits1::IsConstructorBit::kMask));
pop(object);
Check(ne, AbortReason::kOperandIsNotAConstructor, cr0);
}

View File

@ -1684,7 +1684,7 @@ void MacroAssembler::AssertConstructor(Register object, Register scratch) {
Check(ne, AbortReason::kOperandIsASmiAndNotAConstructor);
LoadMap(scratch, object);
tm(FieldMemOperand(scratch, Map::kBitFieldOffset),
Operand(Map::IsConstructorBit::kMask));
Operand(Map::Bits1::IsConstructorBit::kMask));
Check(ne, AbortReason::kOperandIsNotAConstructor);
}
}

View File

@ -2089,7 +2089,7 @@ void MacroAssembler::AssertConstructor(Register object) {
Push(object);
LoadMap(object, object);
testb(FieldOperand(object, Map::kBitFieldOffset),
Immediate(Map::IsConstructorBit::kMask));
Immediate(Map::Bits1::IsConstructorBit::kMask));
Pop(object);
Check(not_zero, AbortReason::kOperandIsNotAConstructor);
}

View File

@ -1521,7 +1521,7 @@ void EffectControlLinearizer::TruncateTaggedPointerToBit(
__ GotoIfNot(
__ Word32Equal(
__ Word32And(value_map_bitfield,
__ Int32Constant(Map::IsUndetectableBit::kMask)),
__ Int32Constant(Map::Bits1::IsUndetectableBit::kMask)),
zero),
done, zero);
@ -1737,7 +1737,7 @@ void EffectControlLinearizer::LowerCheckMaps(Node* node, Node* frame_state) {
__ LoadField(AccessBuilder::ForMapBitField3(), value_map);
Node* if_not_deprecated = __ Word32Equal(
__ Word32And(bitfield3,
__ Int32Constant(Map::IsDeprecatedBit::kMask)),
__ Int32Constant(Map::Bits3::IsDeprecatedBit::kMask)),
__ Int32Constant(0));
__ DeoptimizeIf(DeoptimizeReason::kWrongMap, p.feedback(),
if_not_deprecated, frame_state,
@ -2961,10 +2961,10 @@ Node* EffectControlLinearizer::LowerObjectIsCallable(Node* node) {
Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
Node* value_bit_field =
__ LoadField(AccessBuilder::ForMapBitField(), value_map);
Node* vfalse =
__ Word32Equal(__ Int32Constant(Map::IsCallableBit::kMask),
__ Word32And(value_bit_field,
__ Int32Constant(Map::IsCallableBit::kMask)));
Node* vfalse = __ Word32Equal(
__ Int32Constant(Map::Bits1::IsCallableBit::kMask),
__ Word32And(value_bit_field,
__ Int32Constant(Map::Bits1::IsCallableBit::kMask)));
__ Goto(&done, vfalse);
__ Bind(&if_smi);
@ -2987,9 +2987,9 @@ Node* EffectControlLinearizer::LowerObjectIsConstructor(Node* node) {
Node* value_bit_field =
__ LoadField(AccessBuilder::ForMapBitField(), value_map);
Node* vfalse = __ Word32Equal(
__ Int32Constant(Map::IsConstructorBit::kMask),
__ Int32Constant(Map::Bits1::IsConstructorBit::kMask),
__ Word32And(value_bit_field,
__ Int32Constant(Map::IsConstructorBit::kMask)));
__ Int32Constant(Map::Bits1::IsConstructorBit::kMask)));
__ Goto(&done, vfalse);
__ Bind(&if_smi);
@ -3012,10 +3012,10 @@ Node* EffectControlLinearizer::LowerObjectIsDetectableCallable(Node* node) {
Node* value_bit_field =
__ LoadField(AccessBuilder::ForMapBitField(), value_map);
Node* vfalse = __ Word32Equal(
__ Int32Constant(Map::IsCallableBit::kMask),
__ Int32Constant(Map::Bits1::IsCallableBit::kMask),
__ Word32And(value_bit_field,
__ Int32Constant((Map::IsCallableBit::kMask) |
(Map::IsUndetectableBit::kMask))));
__ Int32Constant((Map::Bits1::IsCallableBit::kMask) |
(Map::Bits1::IsUndetectableBit::kMask))));
__ Goto(&done, vfalse);
__ Bind(&if_smi);
@ -3259,10 +3259,10 @@ Node* EffectControlLinearizer::LowerObjectIsNonCallable(Node* node) {
Node* value_bit_field =
__ LoadField(AccessBuilder::ForMapBitField(), value_map);
Node* check2 =
__ Word32Equal(__ Int32Constant(0),
__ Word32And(value_bit_field,
__ Int32Constant(Map::IsCallableBit::kMask)));
Node* check2 = __ Word32Equal(
__ Int32Constant(0),
__ Word32And(value_bit_field,
__ Int32Constant(Map::Bits1::IsCallableBit::kMask)));
__ Goto(&done, check2);
__ Bind(&if_primitive);
@ -3377,7 +3377,7 @@ Node* EffectControlLinearizer::LowerObjectIsUndetectable(Node* node) {
__ Word32Equal(
__ Int32Constant(0),
__ Word32And(value_bit_field,
__ Int32Constant(Map::IsUndetectableBit::kMask))),
__ Int32Constant(Map::Bits1::IsUndetectableBit::kMask))),
__ Int32Constant(0));
__ Goto(&done, vfalse);
@ -5124,9 +5124,9 @@ void EffectControlLinearizer::LowerTransitionAndStoreElement(Node* node) {
Node* kind;
{
Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
Node* mask = __ Int32Constant(Map::Bits2::ElementsKindBits::kMask);
Node* andit = __ Word32And(bit_field2, mask);
Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
Node* shift = __ Int32Constant(Map::Bits2::ElementsKindBits::kShift);
kind = __ Word32Shr(andit, shift);
}
@ -5242,9 +5242,9 @@ void EffectControlLinearizer::LowerTransitionAndStoreNumberElement(Node* node) {
Node* kind;
{
Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
Node* mask = __ Int32Constant(Map::Bits2::ElementsKindBits::kMask);
Node* andit = __ Word32And(bit_field2, mask);
Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
Node* shift = __ Int32Constant(Map::Bits2::ElementsKindBits::kShift);
kind = __ Word32Shr(andit, shift);
}
@ -5305,9 +5305,9 @@ void EffectControlLinearizer::LowerTransitionAndStoreNonNumberElement(
Node* kind;
{
Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
Node* mask = __ Int32Constant(Map::Bits2::ElementsKindBits::kMask);
Node* andit = __ Word32And(bit_field2, mask);
Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
Node* shift = __ Int32Constant(Map::Bits2::ElementsKindBits::kShift);
kind = __ Word32Shr(andit, shift);
}
@ -5372,9 +5372,9 @@ void EffectControlLinearizer::LowerStoreSignedSmallElement(Node* node) {
Node* kind;
{
Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
Node* mask = __ Int32Constant(Map::Bits2::ElementsKindBits::kMask);
Node* andit = __ Word32And(bit_field2, mask);
Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
Node* shift = __ Int32Constant(Map::Bits2::ElementsKindBits::kShift);
kind = __ Word32Shr(andit, shift);
}

View File

@ -4397,9 +4397,10 @@ Node* JSCallReducer::LoadReceiverElementsKind(Node* receiver, Node** effect,
*effect, *control);
Node* receiver_elements_kind = graph()->NewNode(
simplified()->NumberShiftRightLogical(),
graph()->NewNode(simplified()->NumberBitwiseAnd(), receiver_bit_field2,
jsgraph()->Constant(Map::ElementsKindBits::kMask)),
jsgraph()->Constant(Map::ElementsKindBits::kShift));
graph()->NewNode(
simplified()->NumberBitwiseAnd(), receiver_bit_field2,
jsgraph()->Constant(Map::Bits2::ElementsKindBits::kMask)),
jsgraph()->Constant(Map::Bits2::ElementsKindBits::kShift));
return receiver_elements_kind;
}
@ -6218,9 +6219,10 @@ Reduction JSCallReducer::ReduceTypedArrayPrototypeToStringTag(Node* node) {
effect, control);
Node* receiver_elements_kind = graph()->NewNode(
simplified()->NumberShiftRightLogical(),
graph()->NewNode(simplified()->NumberBitwiseAnd(), receiver_bit_field2,
jsgraph()->Constant(Map::ElementsKindBits::kMask)),
jsgraph()->Constant(Map::ElementsKindBits::kShift));
graph()->NewNode(
simplified()->NumberBitwiseAnd(), receiver_bit_field2,
jsgraph()->Constant(Map::Bits2::ElementsKindBits::kMask)),
jsgraph()->Constant(Map::Bits2::ElementsKindBits::kShift));
// Offset the elements kind by FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND,
// so that the branch cascade below is turned into a simple table

View File

@ -3380,22 +3380,25 @@ BIMODAL_ACCESSOR_C(JSTypedArray, bool, is_on_heap)
BIMODAL_ACCESSOR_C(JSTypedArray, size_t, length)
BIMODAL_ACCESSOR(JSTypedArray, HeapObject, buffer)
BIMODAL_ACCESSOR_B(Map, bit_field2, elements_kind, Map::ElementsKindBits)
BIMODAL_ACCESSOR_B(Map, bit_field3, is_dictionary_map, Map::IsDictionaryMapBit)
BIMODAL_ACCESSOR_B(Map, bit_field3, is_deprecated, Map::IsDeprecatedBit)
BIMODAL_ACCESSOR_B(Map, bit_field2, elements_kind, Map::Bits2::ElementsKindBits)
BIMODAL_ACCESSOR_B(Map, bit_field3, is_dictionary_map,
Map::Bits3::IsDictionaryMapBit)
BIMODAL_ACCESSOR_B(Map, bit_field3, is_deprecated, Map::Bits3::IsDeprecatedBit)
BIMODAL_ACCESSOR_B(Map, bit_field3, NumberOfOwnDescriptors,
Map::NumberOfOwnDescriptorsBits)
Map::Bits3::NumberOfOwnDescriptorsBits)
BIMODAL_ACCESSOR_B(Map, bit_field3, is_migration_target,
Map::IsMigrationTargetBit)
BIMODAL_ACCESSOR_B(Map, bit_field3, is_extensible, Map::IsExtensibleBit)
BIMODAL_ACCESSOR_B(Map, bit_field, has_prototype_slot, Map::HasPrototypeSlotBit)
Map::Bits3::IsMigrationTargetBit)
BIMODAL_ACCESSOR_B(Map, bit_field3, is_extensible, Map::Bits3::IsExtensibleBit)
BIMODAL_ACCESSOR_B(Map, bit_field, has_prototype_slot,
Map::Bits1::HasPrototypeSlotBit)
BIMODAL_ACCESSOR_B(Map, bit_field, is_access_check_needed,
Map::IsAccessCheckNeededBit)
BIMODAL_ACCESSOR_B(Map, bit_field, is_callable, Map::IsCallableBit)
Map::Bits1::IsAccessCheckNeededBit)
BIMODAL_ACCESSOR_B(Map, bit_field, is_callable, Map::Bits1::IsCallableBit)
BIMODAL_ACCESSOR_B(Map, bit_field, has_indexed_interceptor,
Map::HasIndexedInterceptorBit)
BIMODAL_ACCESSOR_B(Map, bit_field, is_constructor, Map::IsConstructorBit)
BIMODAL_ACCESSOR_B(Map, bit_field, is_undetectable, Map::IsUndetectableBit)
Map::Bits1::HasIndexedInterceptorBit)
BIMODAL_ACCESSOR_B(Map, bit_field, is_constructor, Map::Bits1::IsConstructorBit)
BIMODAL_ACCESSOR_B(Map, bit_field, is_undetectable,
Map::Bits1::IsUndetectableBit)
BIMODAL_ACCESSOR_C(Map, int, instance_size)
BIMODAL_ACCESSOR_C(Map, int, NextFreePropertyIndex)
BIMODAL_ACCESSOR_C(Map, int, UnusedPropertyFields)
@ -3597,8 +3600,8 @@ void* JSTypedArrayRef::data_ptr() const {
bool MapRef::IsInobjectSlackTrackingInProgress() const {
IF_ACCESS_FROM_HEAP_C(Map, IsInobjectSlackTrackingInProgress);
return Map::ConstructionCounterBits::decode(data()->AsMap()->bit_field3()) !=
Map::kNoSlackTracking;
return Map::Bits3::ConstructionCounterBits::decode(
data()->AsMap()->bit_field3()) != Map::kNoSlackTracking;
}
int MapRef::constructor_function_index() const {
@ -3609,7 +3612,7 @@ int MapRef::constructor_function_index() const {
bool MapRef::is_stable() const {
IF_ACCESS_FROM_HEAP_C(Map, is_stable);
return !Map::IsUnstableBit::decode(data()->AsMap()->bit_field3());
return !Map::Bits3::IsUnstableBit::decode(data()->AsMap()->bit_field3());
}
bool MapRef::CanBeDeprecated() const {

View File

@ -1957,10 +1957,10 @@ Reduction JSTypedLowering::ReduceJSForInPrepare(Node* node) {
Node* bit_field3 = effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForMapBitField3()), enumerator,
effect, control);
STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
cache_length =
graph()->NewNode(simplified()->NumberBitwiseAnd(), bit_field3,
jsgraph()->Constant(Map::EnumLengthBits::kMask));
STATIC_ASSERT(Map::Bits3::EnumLengthBits::kShift == 0);
cache_length = graph()->NewNode(
simplified()->NumberBitwiseAnd(), bit_field3,
jsgraph()->Constant(Map::Bits3::EnumLengthBits::kMask));
break;
}
case ForInMode::kGeneric: {
@ -1992,10 +1992,10 @@ Reduction JSTypedLowering::ReduceJSForInPrepare(Node* node) {
Node* bit_field3 = etrue = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForMapBitField3()),
enumerator, etrue, if_true);
STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
cache_length_true =
graph()->NewNode(simplified()->NumberBitwiseAnd(), bit_field3,
jsgraph()->Constant(Map::EnumLengthBits::kMask));
STATIC_ASSERT(Map::Bits3::EnumLengthBits::kShift == 0);
cache_length_true = graph()->NewNode(
simplified()->NumberBitwiseAnd(), bit_field3,
jsgraph()->Constant(Map::Bits3::EnumLengthBits::kMask));
}
Node* if_false = graph()->NewNode(common()->IfFalse(), branch);

View File

@ -1896,7 +1896,7 @@ Type Typer::Visitor::TypeJSForInNext(Node* node) {
}
Type Typer::Visitor::TypeJSForInPrepare(Node* node) {
STATIC_ASSERT(Map::EnumLengthBits::kMax <= FixedArray::kMaxLength);
STATIC_ASSERT(Map::Bits3::EnumLengthBits::kMax <= FixedArray::kMaxLength);
Type const cache_type =
Type::Union(Type::SignedSmall(), Type::OtherInternal(), zone());
Type const cache_array = Type::OtherInternal();

View File

@ -1918,11 +1918,12 @@ Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
// |layout_descriptor| are set.
map.set_visitor_id(Map::GetVisitorId(map));
map.set_bit_field(0);
map.set_bit_field2(Map::NewTargetIsBaseBit::encode(true));
int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
Map::OwnsDescriptorsBit::encode(true) |
Map::ConstructionCounterBits::encode(Map::kNoSlackTracking) |
Map::IsExtensibleBit::encode(true);
map.set_bit_field2(Map::Bits2::NewTargetIsBaseBit::encode(true));
int bit_field3 =
Map::Bits3::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
Map::Bits3::OwnsDescriptorsBit::encode(true) |
Map::Bits3::ConstructionCounterBits::encode(Map::kNoSlackTracking) |
Map::Bits3::IsExtensibleBit::encode(true);
map.set_bit_field3(bit_field3);
DCHECK(!map.is_in_retained_map_list());
map.clear_padding();

View File

@ -154,9 +154,10 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
map.SetInObjectUnusedPropertyFields(0);
map.set_bit_field(0);
map.set_bit_field2(0);
int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
Map::OwnsDescriptorsBit::encode(true) |
Map::ConstructionCounterBits::encode(Map::kNoSlackTracking);
int bit_field3 =
Map::Bits3::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
Map::Bits3::OwnsDescriptorsBit::encode(true) |
Map::Bits3::ConstructionCounterBits::encode(Map::kNoSlackTracking);
map.set_bit_field3(bit_field3);
DCHECK(!map.is_in_retained_map_list());
map.clear_padding();

View File

@ -1173,12 +1173,12 @@ void AccessorAssembler::HandleStoreICTransitionMapHandlerCase(
}
TNode<Uint32T> bitfield3 = LoadMapBitField3(transition_map);
CSA_ASSERT(this, IsClearWord32<Map::IsDictionaryMapBit>(bitfield3));
GotoIf(IsSetWord32<Map::IsDeprecatedBit>(bitfield3), miss);
CSA_ASSERT(this, IsClearWord32<Map::Bits3::IsDictionaryMapBit>(bitfield3));
GotoIf(IsSetWord32<Map::Bits3::IsDeprecatedBit>(bitfield3), miss);
// Load last descriptor details.
TNode<UintPtrT> nof =
DecodeWordFromWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
DecodeWordFromWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bitfield3);
CSA_ASSERT(this, WordNotEqual(nof, IntPtrConstant(0)));
TNode<DescriptorArray> descriptors = LoadMapDescriptors(transition_map);
@ -2277,8 +2277,8 @@ void AccessorAssembler::InvalidateValidityCellIfPrototype(
bitfield3 = LoadMapBitField3(map);
}
Branch(IsSetWord32(bitfield3, Map::IsPrototypeMapBit::kMask), &is_prototype,
&cont);
Branch(IsSetWord32(bitfield3, Map::Bits3::IsPrototypeMapBit::kMask),
&is_prototype, &cont);
BIND(&is_prototype);
{
@ -2384,7 +2384,7 @@ void AccessorAssembler::GenericPropertyLoad(TNode<HeapObject> receiver,
// Check if the receiver has fast or slow properties.
TNode<Uint32T> bitfield3 = LoadMapBitField3(receiver_map);
GotoIf(IsSetWord32<Map::IsDictionaryMapBit>(bitfield3),
GotoIf(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bitfield3),
&if_property_dictionary);
// Try looking up the property on the receiver; if unsuccessful, look

View File

@ -769,7 +769,7 @@ void KeyedStoreGenericAssembler::EmitGenericPropertyStore(
readonly(this);
TNode<Uint32T> bitfield3 = LoadMapBitField3(receiver_map);
TNode<Name> name = CAST(p->name());
Branch(IsSetWord32<Map::IsDictionaryMapBit>(bitfield3),
Branch(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bitfield3),
&dictionary_properties, &fast_properties);
BIND(&fast_properties);
@ -876,7 +876,8 @@ void KeyedStoreGenericAssembler::EmitGenericPropertyStore(
Label extensible(this), is_private_symbol(this);
TNode<Uint32T> bitfield3 = LoadMapBitField3(receiver_map);
GotoIf(IsPrivateSymbol(name), &is_private_symbol);
Branch(IsSetWord32<Map::IsExtensibleBit>(bitfield3), &extensible, slow);
Branch(IsSetWord32<Map::Bits3::IsExtensibleBit>(bitfield3), &extensible,
slow);
BIND(&is_private_symbol);
{

View File

@ -2065,11 +2065,11 @@ IGNITION_HANDLER(TestTypeOf, InterpreterAssembler) {
GotoIf(TaggedIsSmi(object), &if_false);
// Check if callable bit is set and not undetectable.
TNode<Int32T> map_bitfield = LoadMapBitField(LoadMap(CAST(object)));
TNode<Int32T> callable_undetectable =
Word32And(map_bitfield, Int32Constant(Map::IsUndetectableBit::kMask |
Map::IsCallableBit::kMask));
TNode<Int32T> callable_undetectable = Word32And(
map_bitfield, Int32Constant(Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask));
Branch(Word32Equal(callable_undetectable,
Int32Constant(Map::IsCallableBit::kMask)),
Int32Constant(Map::Bits1::IsCallableBit::kMask)),
&if_true, &if_false);
}
BIND(&if_object);
@ -2084,9 +2084,9 @@ IGNITION_HANDLER(TestTypeOf, InterpreterAssembler) {
TNode<Map> map = LoadMap(CAST(object));
GotoIfNot(IsJSReceiverMap(map), &if_false);
TNode<Int32T> map_bitfield = LoadMapBitField(map);
TNode<Int32T> callable_undetectable =
Word32And(map_bitfield, Int32Constant(Map::IsUndetectableBit::kMask |
Map::IsCallableBit::kMask));
TNode<Int32T> callable_undetectable = Word32And(
map_bitfield, Int32Constant(Map::Bits1::IsUndetectableBit::kMask |
Map::Bits1::IsCallableBit::kMask));
Branch(Word32Equal(callable_undetectable, Int32Constant(0)), &if_true,
&if_false);
}

View File

@ -59,38 +59,42 @@ ACCESSORS_CHECKED(Map, prototype_info, Object,
// is setup but it's being read by concurrent marker when pointer compression
// is enabled. The latter bit can be modified on a live objects.
BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_non_instance_prototype,
Map::HasNonInstancePrototypeBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::IsCallableBit)
Map::Bits1::HasNonInstancePrototypeBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::Bits1::IsCallableBit)
BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor,
Map::HasNamedInterceptorBit)
Map::Bits1::HasNamedInterceptorBit)
BIT_FIELD_ACCESSORS(Map, bit_field, has_indexed_interceptor,
Map::HasIndexedInterceptorBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable, Map::IsUndetectableBit)
Map::Bits1::HasIndexedInterceptorBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable,
Map::Bits1::IsUndetectableBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed,
Map::IsAccessCheckNeededBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor, Map::IsConstructorBit)
Map::Bits1::IsAccessCheckNeededBit)
BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor,
Map::Bits1::IsConstructorBit)
BIT_FIELD_ACCESSORS(Map, relaxed_bit_field, has_prototype_slot,
Map::HasPrototypeSlotBit)
Map::Bits1::HasPrototypeSlotBit)
// |bit_field2| fields.
BIT_FIELD_ACCESSORS(Map, bit_field2, new_target_is_base,
Map::NewTargetIsBaseBit)
Map::Bits2::NewTargetIsBaseBit)
BIT_FIELD_ACCESSORS(Map, bit_field2, is_immutable_proto,
Map::IsImmutablePrototypeBit)
Map::Bits2::IsImmutablePrototypeBit)
// |bit_field3| fields.
BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors, Map::OwnsDescriptorsBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::IsDeprecatedBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors,
Map::Bits3::OwnsDescriptorsBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::Bits3::IsDeprecatedBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_in_retained_map_list,
Map::IsInRetainedMapListBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_prototype_map, Map::IsPrototypeMapBit)
Map::Bits3::IsInRetainedMapListBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_prototype_map,
Map::Bits3::IsPrototypeMapBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_migration_target,
Map::IsMigrationTargetBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_extensible, Map::IsExtensibleBit)
Map::Bits3::IsMigrationTargetBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, is_extensible, Map::Bits3::IsExtensibleBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols,
Map::MayHaveInterestingSymbolsBit)
Map::Bits3::MayHaveInterestingSymbolsBit)
BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
Map::ConstructionCounterBits)
Map::Bits3::ConstructionCounterBits)
DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) {
DCHECK(has_named_interceptor());
@ -184,21 +188,24 @@ InternalIndex Map::LastAdded() const {
}
int Map::NumberOfOwnDescriptors() const {
return NumberOfOwnDescriptorsBits::decode(bit_field3());
return Bits3::NumberOfOwnDescriptorsBits::decode(bit_field3());
}
void Map::SetNumberOfOwnDescriptors(int number) {
DCHECK_LE(number, instance_descriptors().number_of_descriptors());
CHECK_LE(static_cast<unsigned>(number),
static_cast<unsigned>(kMaxNumberOfDescriptors));
set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
set_bit_field3(
Bits3::NumberOfOwnDescriptorsBits::update(bit_field3(), number));
}
InternalIndex::Range Map::IterateOwnDescriptors() const {
return InternalIndex::Range(NumberOfOwnDescriptors());
}
int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }
int Map::EnumLength() const {
return Bits3::EnumLengthBits::decode(bit_field3());
}
void Map::SetEnumLength(int length) {
if (length != kInvalidEnumCacheSentinel) {
@ -206,7 +213,7 @@ void Map::SetEnumLength(int length) {
CHECK_LE(static_cast<unsigned>(length),
static_cast<unsigned>(kMaxNumberOfDescriptors));
}
set_bit_field3(EnumLengthBits::update(bit_field3(), length));
set_bit_field3(Bits3::EnumLengthBits::update(bit_field3(), length));
}
FixedArrayBase Map::GetInitialElements() const {
@ -465,11 +472,12 @@ bool Map::should_be_fast_prototype_map() const {
void Map::set_elements_kind(ElementsKind elements_kind) {
CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
set_bit_field2(
Map::Bits2::ElementsKindBits::update(bit_field2(), elements_kind));
}
ElementsKind Map::elements_kind() const {
return Map::ElementsKindBits::decode(bit_field2());
return Map::Bits2::ElementsKindBits::decode(bit_field2());
}
bool Map::has_fast_smi_elements() const {
@ -529,20 +537,23 @@ bool Map::has_frozen_elements() const {
}
void Map::set_is_dictionary_map(bool value) {
uint32_t new_bit_field3 = IsDictionaryMapBit::update(bit_field3(), value);
new_bit_field3 = IsUnstableBit::update(new_bit_field3, value);
uint32_t new_bit_field3 =
Bits3::IsDictionaryMapBit::update(bit_field3(), value);
new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, value);
set_bit_field3(new_bit_field3);
}
bool Map::is_dictionary_map() const {
return IsDictionaryMapBit::decode(bit_field3());
return Bits3::IsDictionaryMapBit::decode(bit_field3());
}
void Map::mark_unstable() {
set_bit_field3(IsUnstableBit::update(bit_field3(), true));
set_bit_field3(Bits3::IsUnstableBit::update(bit_field3(), true));
}
bool Map::is_stable() const { return !IsUnstableBit::decode(bit_field3()); }
bool Map::is_stable() const {
return !Bits3::IsUnstableBit::decode(bit_field3());
}
bool Map::CanBeDeprecated() const {
for (InternalIndex i : IterateOwnDescriptors()) {

View File

@ -1427,14 +1427,14 @@ Handle<Map> Map::RawCopy(Isolate* isolate, Handle<Map> map, int instance_size,
result->set_bit_field(map->bit_field());
result->set_bit_field2(map->bit_field2());
int new_bit_field3 = map->bit_field3();
new_bit_field3 = OwnsDescriptorsBit::update(new_bit_field3, true);
new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
new_bit_field3 = Bits3::OwnsDescriptorsBit::update(new_bit_field3, true);
new_bit_field3 = Bits3::NumberOfOwnDescriptorsBits::update(new_bit_field3, 0);
new_bit_field3 =
EnumLengthBits::update(new_bit_field3, kInvalidEnumCacheSentinel);
new_bit_field3 = IsDeprecatedBit::update(new_bit_field3, false);
new_bit_field3 = IsInRetainedMapListBit::update(new_bit_field3, false);
Bits3::EnumLengthBits::update(new_bit_field3, kInvalidEnumCacheSentinel);
new_bit_field3 = Bits3::IsDeprecatedBit::update(new_bit_field3, false);
new_bit_field3 = Bits3::IsInRetainedMapListBit::update(new_bit_field3, false);
if (!map->is_dictionary_map()) {
new_bit_field3 = IsUnstableBit::update(new_bit_field3, false);
new_bit_field3 = Bits3::IsUnstableBit::update(new_bit_field3, false);
}
result->set_bit_field3(new_bit_field3);
result->clear_padding();
@ -1478,7 +1478,8 @@ Handle<Map> Map::Normalize(Isolate* isolate, Handle<Map> fast_map,
// The IsMigrationTargetBit might be different if the {new_map} from
// {cache} has already been marked as a migration target.
constexpr int ignored_bit_field3_bits =
IsInRetainedMapListBit::kMask | IsMigrationTargetBit::kMask;
Bits3::IsInRetainedMapListBit::kMask |
Bits3::IsMigrationTargetBit::kMask;
DCHECK_EQ(fresh->bit_field3() & ~ignored_bit_field3_bits,
new_map->bit_field3() & ~ignored_bit_field3_bits);
int offset = Map::kBitField3Offset + kInt32Size;
@ -2483,9 +2484,10 @@ bool Map::EquivalentToForNormalization(const Map other,
int properties =
mode == CLEAR_INOBJECT_PROPERTIES ? 0 : other.GetInObjectProperties();
// Make sure the elements_kind bits are in bit_field2.
DCHECK_EQ(this->elements_kind(), Map::ElementsKindBits::decode(bit_field2()));
DCHECK_EQ(this->elements_kind(),
Map::Bits2::ElementsKindBits::decode(bit_field2()));
int adjusted_other_bit_field2 =
Map::ElementsKindBits::update(other.bit_field2(), elements_kind);
Map::Bits2::ElementsKindBits::update(other.bit_field2(), elements_kind);
return CheckEquivalent(*this, other) &&
bit_field2() == adjusted_other_bit_field2 &&
GetInObjectProperties() == properties &&

View File

@ -11,6 +11,7 @@
#include "src/objects/heap-object.h"
#include "src/objects/internal-index.h"
#include "src/objects/objects.h"
#include "torque-generated/bit-fields-tq.h"
#include "torque-generated/field-offsets-tq.h"
// Has to be the last include (doesn't have include guards):
@ -250,34 +251,16 @@ class Map : public HeapObject {
// Atomic accessors, used for whitelisting legitimate concurrent accesses.
DECL_PRIMITIVE_ACCESSORS(relaxed_bit_field, byte)
// Bit positions for |bit_field|.
#define MAP_BIT_FIELD_FIELDS(V, _) \
V(HasNonInstancePrototypeBit, bool, 1, _) \
V(IsCallableBit, bool, 1, _) \
V(HasNamedInterceptorBit, bool, 1, _) \
V(HasIndexedInterceptorBit, bool, 1, _) \
V(IsUndetectableBit, bool, 1, _) \
V(IsAccessCheckNeededBit, bool, 1, _) \
V(IsConstructorBit, bool, 1, _) \
V(HasPrototypeSlotBit, bool, 1, _)
DEFINE_BIT_FIELDS(MAP_BIT_FIELD_FIELDS)
#undef MAP_BIT_FIELD_FIELDS
// Bit positions for |bit_field|.
using Bits1 = TorqueGeneratedMapBitFields1Fields;
//
// Bit field 2.
//
DECL_PRIMITIVE_ACCESSORS(bit_field2, byte)
// Bit positions for |bit_field2|.
#define MAP_BIT_FIELD2_FIELDS(V, _) \
V(NewTargetIsBaseBit, bool, 1, _) \
V(IsImmutablePrototypeBit, bool, 1, _) \
V(UnusedBit, bool, 1, _) \
V(ElementsKindBits, ElementsKind, 5, _)
DEFINE_BIT_FIELDS(MAP_BIT_FIELD2_FIELDS)
#undef MAP_BIT_FIELD2_FIELDS
// Bit positions for |bit_field2|.
using Bits2 = TorqueGeneratedMapBitFields2Fields;
//
// Bit field 3.
@ -288,30 +271,22 @@ class Map : public HeapObject {
// is deterministic. Depending on the V8 build mode there could be no padding.
V8_INLINE void clear_padding();
// Bit positions for |bit_field3|.
#define MAP_BIT_FIELD3_FIELDS(V, _) \
V(EnumLengthBits, int, kDescriptorIndexBitCount, _) \
V(NumberOfOwnDescriptorsBits, int, kDescriptorIndexBitCount, _) \
V(IsPrototypeMapBit, bool, 1, _) \
V(IsDictionaryMapBit, bool, 1, _) \
V(OwnsDescriptorsBit, bool, 1, _) \
V(IsInRetainedMapListBit, bool, 1, _) \
V(IsDeprecatedBit, bool, 1, _) \
V(IsUnstableBit, bool, 1, _) \
V(IsMigrationTargetBit, bool, 1, _) \
V(IsExtensibleBit, bool, 1, _) \
V(MayHaveInterestingSymbolsBit, bool, 1, _) \
V(ConstructionCounterBits, int, 3, _)
// Bit positions for |bit_field3|.
using Bits3 = TorqueGeneratedMapBitFields3Fields;
DEFINE_BIT_FIELDS(MAP_BIT_FIELD3_FIELDS)
#undef MAP_BIT_FIELD3_FIELDS
// Ensure that Torque-defined bit widths for |bit_field3| are as expected.
STATIC_ASSERT(Bits3::EnumLengthBits::kSize == kDescriptorIndexBitCount);
STATIC_ASSERT(Bits3::NumberOfOwnDescriptorsBits::kSize ==
kDescriptorIndexBitCount);
STATIC_ASSERT(NumberOfOwnDescriptorsBits::kMax >= kMaxNumberOfDescriptors);
STATIC_ASSERT(Bits3::NumberOfOwnDescriptorsBits::kMax >=
kMaxNumberOfDescriptors);
static const int kSlackTrackingCounterStart = 7;
static const int kSlackTrackingCounterEnd = 1;
static const int kNoSlackTracking = 0;
STATIC_ASSERT(kSlackTrackingCounterStart <= ConstructionCounterBits::kMax);
STATIC_ASSERT(kSlackTrackingCounterStart <=
Bits3::ConstructionCounterBits::kMax);
// Inobject slack tracking is the way to reclaim unused inobject space.
//

View File

@ -2,6 +2,39 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
bitfield struct MapBitFields1 extends uint8 {
has_non_instance_prototype: bool: 1 bit;
is_callable: bool: 1 bit;
has_named_interceptor: bool: 1 bit;
has_indexed_interceptor: bool: 1 bit;
is_undetectable: bool: 1 bit;
is_access_check_needed: bool: 1 bit;
is_constructor: bool: 1 bit;
has_prototype_slot: bool: 1 bit;
}
bitfield struct MapBitFields2 extends uint8 {
new_target_is_base: bool: 1 bit;
is_immutable_prototype: bool: 1 bit;
unused: bool: 1 bit;
elements_kind: ElementsKind: 5 bit;
}
bitfield struct MapBitFields3 extends uint32 {
enum_length: int32: 10 bit;
number_of_own_descriptors: int32: 10 bit;
is_prototype_map: bool: 1 bit;
is_dictionary_map: bool: 1 bit;
owns_descriptors: bool: 1 bit;
is_in_retained_map_list: bool: 1 bit;
is_deprecated: bool: 1 bit;
is_unstable: bool: 1 bit;
is_migration_target: bool: 1 bit;
is_extensible: bool: 1 bit;
may_have_interesting_symbols: bool: 1 bit;
construction_counter: int32: 3 bit;
}
extern class Map extends HeapObject {
macro PrototypeInfo(): PrototypeInfo labels HasNoPrototypeInfo {
typeswitch (this.transitions_or_prototype_info) {
@ -25,9 +58,9 @@ extern class Map extends HeapObject {
used_or_unused_instance_size_in_words: uint8;
visitor_id: uint8;
instance_type: InstanceType;
bit_field: uint8;
bit_field2: uint8;
bit_field3: uint32;
bit_field: MapBitFields1;
bit_field2: MapBitFields2;
bit_field3: MapBitFields3;
@if(TAGGED_SIZE_8_BYTES) optional_padding: uint32;
@ifnot(TAGGED_SIZE_8_BYTES) optional_padding: void;

View File

@ -8,6 +8,7 @@
#include "src/base/bit-field.h"
#include "src/objects/objects.h"
#include "src/objects/primitive-heap-object.h"
#include "torque-generated/bit-fields-tq.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
@ -138,7 +139,8 @@ class Name : public TorqueGeneratedName<Name, PrimitiveHeapObject> {
};
// ES6 symbols.
class Symbol : public TorqueGeneratedSymbol<Symbol, Name> {
class Symbol : public TorqueGeneratedSymbol<Symbol, Name>,
public TorqueGeneratedSymbolFlagsFields {
public:
// [is_private]: Whether this is a private symbol. Private symbols can only
// be used to designate own properties of objects.
@ -180,18 +182,6 @@ class Symbol : public TorqueGeneratedSymbol<Symbol, Name> {
DECL_PRINTER(Symbol)
DECL_VERIFIER(Symbol)
// Flags layout.
#define FLAGS_BIT_FIELDS(V, _) \
V(IsPrivateBit, bool, 1, _) \
V(IsWellKnownSymbolBit, bool, 1, _) \
V(IsInPublicSymbolTableBit, bool, 1, _) \
V(IsInterestingSymbolBit, bool, 1, _) \
V(IsPrivateNameBit, bool, 1, _) \
V(IsPrivateBrandBit, bool, 1, _)
DEFINE_BIT_FIELDS(FLAGS_BIT_FIELDS)
#undef FLAGS_BIT_FIELDS
using BodyDescriptor = FixedBodyDescriptor<kDescriptionOffset, kSize, kSize>;
void SymbolShortPrint(std::ostream& os);

View File

@ -11,9 +11,18 @@ extern class Name extends PrimitiveHeapObject {
// kinds of names.
type AnyName = PrivateSymbol|PublicSymbol|String;
bitfield struct SymbolFlags extends uint32 {
is_private: bool: 1 bit;
is_well_known_symbol: bool: 1 bit;
is_in_public_symbol_table: bool: 1 bit;
is_interesting_symbol: bool: 1 bit;
is_private_name: bool: 1 bit;
is_private_brand: bool: 1 bit;
}
@generateCppClass
extern class Symbol extends Name {
flags: int32;
flags: SymbolFlags;
description: String|Undefined;
}

View File

@ -241,6 +241,8 @@ void CSAGenerator::EmitInstruction(const CallIntrinsicInstruction& instruction,
out_ << "ca_.UintPtrConstant";
} else if (return_type->IsSubtypeOf(TypeOracle::GetInt32Type())) {
out_ << "ca_.Int32Constant";
} else if (return_type->IsSubtypeOf(TypeOracle::GetUint32Type())) {
out_ << "ca_.Uint32Constant";
} else if (return_type->IsSubtypeOf(TypeOracle::GetBoolType())) {
out_ << "ca_.BoolConstant";
} else {

View File

@ -855,11 +855,13 @@ bool IsAllowedAsBitField(const Type* type) {
// compelling use case.
return false;
}
// Any unsigned integer-ish type, including bools and enums which inherit from
// unsigned integer types, are allowed. Currently decoding signed integers is
// not supported.
// Any integer-ish type, including bools and enums which inherit from integer
// types, are allowed. Note, however, that we always zero-extend during
// decoding regardless of signedness.
return type->IsSubtypeOf(TypeOracle::GetUint32Type()) ||
type->IsSubtypeOf(TypeOracle::GetUIntPtrType()) ||
type->IsSubtypeOf(TypeOracle::GetInt32Type()) ||
type->IsSubtypeOf(TypeOracle::GetIntPtrType()) ||
type->IsSubtypeOf(TypeOracle::GetBoolType());
}

View File

@ -157,15 +157,15 @@ consts_misc = [
'value': 'DICTIONARY_ELEMENTS' },
{ 'name': 'bit_field2_elements_kind_mask',
'value': 'Map::ElementsKindBits::kMask' },
'value': 'Map::Bits2::ElementsKindBits::kMask' },
{ 'name': 'bit_field2_elements_kind_shift',
'value': 'Map::ElementsKindBits::kShift' },
'value': 'Map::Bits2::ElementsKindBits::kShift' },
{ 'name': 'bit_field3_is_dictionary_map_shift',
'value': 'Map::IsDictionaryMapBit::kShift' },
'value': 'Map::Bits3::IsDictionaryMapBit::kShift' },
{ 'name': 'bit_field3_number_of_own_descriptors_mask',
'value': 'Map::NumberOfOwnDescriptorsBits::kMask' },
'value': 'Map::Bits3::NumberOfOwnDescriptorsBits::kMask' },
{ 'name': 'bit_field3_number_of_own_descriptors_shift',
'value': 'Map::NumberOfOwnDescriptorsBits::kShift' },
'value': 'Map::Bits3::NumberOfOwnDescriptorsBits::kShift' },
{ 'name': 'class_Map__instance_descriptors_offset',
'value': 'Map::kInstanceDescriptorsOffset' },