Make some functions that are hit during renderer startup available for inlining

This is towards closing the perf gap between the MSVC build (which uses link-
time optimization) and Clang (where LTO isn't ready on Windows yet). We did
a study (see bug) to see which non-inlined functions are hit a lot during render
start-up, and which would be inlined during LTO. This should benefit performance
in all builds which currently don't use LTO (Android, Linux, Mac) as well as
the Win/Clang build.

The binary size of chrome_child.dll increases by 2KB with this.

BUG=chromium:728324

Review-Url: https://codereview.chromium.org/2950993002
Cr-Commit-Position: refs/heads/master@{#46191}
This commit is contained in:
hans 2017-06-23 14:12:13 -07:00 committed by Commit Bot
parent ee0e295d8e
commit d00d52be1f
21 changed files with 291 additions and 312 deletions

View File

@ -200,17 +200,6 @@ VariableProxy::VariableProxy(Variable* var, int start_position)
BindTo(var);
}
VariableProxy::VariableProxy(const AstRawString* name,
VariableKind variable_kind, int start_position)
: Expression(start_position, kVariableProxy),
raw_name_(name),
next_unresolved_(nullptr) {
bit_field_ |= IsThisField::encode(variable_kind == THIS_VARIABLE) |
IsAssignedField::encode(false) |
IsResolvedField::encode(false) |
HoleCheckModeField::encode(HoleCheckMode::kElided);
}
VariableProxy::VariableProxy(const VariableProxy* copy_from)
: Expression(copy_from->position(), kVariableProxy),
next_unresolved_(nullptr) {

View File

@ -1585,8 +1585,18 @@ class VariableProxy final : public Expression {
friend class AstNodeFactory;
VariableProxy(Variable* var, int start_position);
VariableProxy(const AstRawString* name, VariableKind variable_kind,
int start_position);
int start_position)
: Expression(start_position, kVariableProxy),
raw_name_(name),
next_unresolved_(nullptr) {
bit_field_ |= IsThisField::encode(variable_kind == THIS_VARIABLE) |
IsAssignedField::encode(false) |
IsResolvedField::encode(false) |
HoleCheckModeField::encode(HoleCheckMode::kElided);
}
explicit VariableProxy(const VariableProxy* copy_from);
class IsThisField : public BitField<bool, Expression::kNextBitFieldIndex, 1> {

View File

@ -1234,20 +1234,6 @@ Variable* Scope::DeclareVariableName(const AstRawString* name,
}
}
VariableProxy* Scope::NewUnresolved(AstNodeFactory* factory,
const AstRawString* name,
int start_position, VariableKind kind) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
DCHECK(!already_resolved_);
DCHECK_EQ(factory->zone(), zone());
VariableProxy* proxy = factory->NewVariableProxy(name, kind, start_position);
proxy->set_next_unresolved(unresolved_);
unresolved_ = proxy;
return proxy;
}
void Scope::AddUnresolved(VariableProxy* proxy) {
DCHECK(!already_resolved_);
DCHECK(!proxy->is_resolved());

View File

@ -5,6 +5,7 @@
#ifndef V8_AST_SCOPES_H_
#define V8_AST_SCOPES_H_
#include "src/ast/ast.h"
#include "src/base/compiler-specific.h"
#include "src/base/hashmap.h"
#include "src/globals.h"
@ -208,8 +209,18 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {
// Create a new unresolved variable.
VariableProxy* NewUnresolved(AstNodeFactory* factory,
const AstRawString* name,
int start_position = kNoSourcePosition,
VariableKind kind = NORMAL_VARIABLE);
int start_pos = kNoSourcePosition,
VariableKind kind = NORMAL_VARIABLE) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
DCHECK(!already_resolved_);
DCHECK_EQ(factory->zone(), zone());
VariableProxy* proxy = factory->NewVariableProxy(name, kind, start_pos);
proxy->set_next_unresolved(unresolved_);
unresolved_ = proxy;
return proxy;
}
void AddUnresolved(VariableProxy* proxy);

View File

@ -14,26 +14,6 @@ namespace internal {
// ----------------------------------------------------------------------------
// Implementation Variable.
Variable::Variable(Scope* scope, const AstRawString* name, VariableMode mode,
VariableKind kind, InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag)
: scope_(scope),
name_(name),
local_if_not_shadowed_(nullptr),
next_(nullptr),
index_(-1),
initializer_position_(kNoSourcePosition),
bit_field_(MaybeAssignedFlagField::encode(maybe_assigned_flag) |
InitializationFlagField::encode(initialization_flag) |
VariableModeField::encode(mode) | IsUsedField::encode(false) |
ForceContextAllocationField::encode(false) |
ForceHoleInitializationField::encode(false) |
LocationField::encode(VariableLocation::UNALLOCATED) |
VariableKindField::encode(kind)) {
// Var declared variables never need initialization.
DCHECK(!(mode == VAR && initialization_flag == kNeedsInitialization));
}
Variable::Variable(Variable* other)
: scope_(other->scope_),
name_(other->name_),

View File

@ -20,7 +20,24 @@ class Variable final : public ZoneObject {
public:
Variable(Scope* scope, const AstRawString* name, VariableMode mode,
VariableKind kind, InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned);
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned)
: scope_(scope),
name_(name),
local_if_not_shadowed_(nullptr),
next_(nullptr),
index_(-1),
initializer_position_(kNoSourcePosition),
bit_field_(MaybeAssignedFlagField::encode(maybe_assigned_flag) |
InitializationFlagField::encode(initialization_flag) |
VariableModeField::encode(mode) |
IsUsedField::encode(false) |
ForceContextAllocationField::encode(false) |
ForceHoleInitializationField::encode(false) |
LocationField::encode(VariableLocation::UNALLOCATED) |
VariableKindField::encode(kind)) {
// Var declared variables never need initialization.
DCHECK(!(mode == VAR && initialization_flag == kNeedsInitialization));
}
explicit Variable(Variable* other);

View File

@ -294,6 +294,9 @@ AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
return CopyFixedDoubleArrayWithMap(src, src->map());
}
AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
}
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
AllocationAlignment alignment) {

View File

@ -4028,11 +4028,6 @@ AllocationResult Heap::AllocateFixedArrayWithFiller(int length,
}
AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
}
AllocationResult Heap::AllocateUninitializedFixedArray(int length) {
if (length == 0) return empty_fixed_array();

View File

@ -2006,8 +2006,8 @@ class Heap {
CopyBytecodeArray(BytecodeArray* bytecode_array);
// Allocates a fixed array initialized with undefined values
MUST_USE_RESULT AllocationResult
AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT inline AllocationResult AllocateFixedArray(
int length, PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT AllocationResult AllocateSmallOrderedHashSet(
int length, PretenureFlag pretenure = NOT_TENURED);

View File

@ -15,6 +15,204 @@
namespace v8 {
namespace internal {
VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
return GetVisitorId(map->instance_type(), map->instance_size(),
FLAG_unbox_double_fields && !map->HasFastPointerLayout());
}
VisitorId StaticVisitorBase::GetVisitorId(int instance_type, int instance_size,
bool has_unboxed_fields) {
if (instance_type < FIRST_NONSTRING_TYPE) {
switch (instance_type & kStringRepresentationMask) {
case kSeqStringTag:
if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
return kVisitSeqOneByteString;
} else {
return kVisitSeqTwoByteString;
}
case kConsStringTag:
if (IsShortcutCandidate(instance_type)) {
return kVisitShortcutCandidate;
} else {
return kVisitConsString;
}
case kSlicedStringTag:
return kVisitSlicedString;
case kExternalStringTag:
return kVisitDataObject;
case kThinStringTag:
return kVisitThinString;
}
UNREACHABLE();
}
switch (instance_type) {
case BYTE_ARRAY_TYPE:
return kVisitByteArray;
case BYTECODE_ARRAY_TYPE:
return kVisitBytecodeArray;
case FREE_SPACE_TYPE:
return kVisitFreeSpace;
case FIXED_ARRAY_TYPE:
return kVisitFixedArray;
case FIXED_DOUBLE_ARRAY_TYPE:
return kVisitFixedDoubleArray;
case ODDBALL_TYPE:
return kVisitOddball;
case MAP_TYPE:
return kVisitMap;
case CODE_TYPE:
return kVisitCode;
case CELL_TYPE:
return kVisitCell;
case PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
case WEAK_CELL_TYPE:
return kVisitWeakCell;
case TRANSITION_ARRAY_TYPE:
return kVisitTransitionArray;
case JS_WEAK_MAP_TYPE:
case JS_WEAK_SET_TYPE:
return kVisitJSWeakCollection;
case JS_REGEXP_TYPE:
return kVisitJSRegExp;
case SHARED_FUNCTION_INFO_TYPE:
return kVisitSharedFunctionInfo;
case JS_PROXY_TYPE:
return kVisitStruct;
case SYMBOL_TYPE:
return kVisitSymbol;
case JS_ARRAY_BUFFER_TYPE:
return kVisitJSArrayBuffer;
case SMALL_ORDERED_HASH_MAP_TYPE:
return kVisitSmallOrderedHashMap;
case SMALL_ORDERED_HASH_SET_TYPE:
return kVisitSmallOrderedHashSet;
case JS_OBJECT_TYPE:
case JS_ERROR_TYPE:
case JS_ARGUMENTS_TYPE:
case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_GENERATOR_OBJECT_TYPE:
case JS_ASYNC_GENERATOR_OBJECT_TYPE:
case JS_MODULE_NAMESPACE_TYPE:
case JS_VALUE_TYPE:
case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
case JS_TYPED_ARRAY_TYPE:
case JS_DATA_VIEW_TYPE:
case JS_SET_TYPE:
case JS_MAP_TYPE:
case JS_SET_ITERATOR_TYPE:
case JS_MAP_ITERATOR_TYPE:
case JS_STRING_ITERATOR_TYPE:
case JS_TYPED_ARRAY_KEY_ITERATOR_TYPE:
case JS_FAST_ARRAY_KEY_ITERATOR_TYPE:
case JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE:
case JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_GENERIC_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE:
case JS_INT8_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE:
case JS_INT16_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_INT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
case JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE:
case JS_PROMISE_CAPABILITY_TYPE:
case JS_PROMISE_TYPE:
case JS_BOUND_FUNCTION_TYPE:
return has_unboxed_fields ? kVisitJSObject : kVisitJSObjectFast;
case JS_API_OBJECT_TYPE:
case JS_SPECIAL_API_OBJECT_TYPE:
return kVisitJSApiObject;
case JS_FUNCTION_TYPE:
return kVisitJSFunction;
case FILLER_TYPE:
case FOREIGN_TYPE:
case HEAP_NUMBER_TYPE:
case MUTABLE_HEAP_NUMBER_TYPE:
return kVisitDataObject;
case FIXED_UINT8_ARRAY_TYPE:
case FIXED_INT8_ARRAY_TYPE:
case FIXED_UINT16_ARRAY_TYPE:
case FIXED_INT16_ARRAY_TYPE:
case FIXED_UINT32_ARRAY_TYPE:
case FIXED_INT32_ARRAY_TYPE:
case FIXED_FLOAT32_ARRAY_TYPE:
case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
return kVisitFixedTypedArrayBase;
case FIXED_FLOAT64_ARRAY_TYPE:
return kVisitFixedFloat64Array;
#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
if (instance_type == ALLOCATION_SITE_TYPE) {
return kVisitAllocationSite;
}
return kVisitStruct;
default:
UNREACHABLE();
}
}
template <typename Callback>
Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {

View File

@ -11,205 +11,6 @@
namespace v8 {
namespace internal {
VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
return GetVisitorId(map->instance_type(), map->instance_size(),
FLAG_unbox_double_fields && !map->HasFastPointerLayout());
}
VisitorId StaticVisitorBase::GetVisitorId(int instance_type, int instance_size,
bool has_unboxed_fields) {
if (instance_type < FIRST_NONSTRING_TYPE) {
switch (instance_type & kStringRepresentationMask) {
case kSeqStringTag:
if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
return kVisitSeqOneByteString;
} else {
return kVisitSeqTwoByteString;
}
case kConsStringTag:
if (IsShortcutCandidate(instance_type)) {
return kVisitShortcutCandidate;
} else {
return kVisitConsString;
}
case kSlicedStringTag:
return kVisitSlicedString;
case kExternalStringTag:
return kVisitDataObject;
case kThinStringTag:
return kVisitThinString;
}
UNREACHABLE();
}
switch (instance_type) {
case BYTE_ARRAY_TYPE:
return kVisitByteArray;
case BYTECODE_ARRAY_TYPE:
return kVisitBytecodeArray;
case FREE_SPACE_TYPE:
return kVisitFreeSpace;
case FIXED_ARRAY_TYPE:
return kVisitFixedArray;
case FIXED_DOUBLE_ARRAY_TYPE:
return kVisitFixedDoubleArray;
case ODDBALL_TYPE:
return kVisitOddball;
case MAP_TYPE:
return kVisitMap;
case CODE_TYPE:
return kVisitCode;
case CELL_TYPE:
return kVisitCell;
case PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
case WEAK_CELL_TYPE:
return kVisitWeakCell;
case TRANSITION_ARRAY_TYPE:
return kVisitTransitionArray;
case JS_WEAK_MAP_TYPE:
case JS_WEAK_SET_TYPE:
return kVisitJSWeakCollection;
case JS_REGEXP_TYPE:
return kVisitJSRegExp;
case SHARED_FUNCTION_INFO_TYPE:
return kVisitSharedFunctionInfo;
case JS_PROXY_TYPE:
return kVisitStruct;
case SYMBOL_TYPE:
return kVisitSymbol;
case JS_ARRAY_BUFFER_TYPE:
return kVisitJSArrayBuffer;
case SMALL_ORDERED_HASH_MAP_TYPE:
return kVisitSmallOrderedHashMap;
case SMALL_ORDERED_HASH_SET_TYPE:
return kVisitSmallOrderedHashSet;
case JS_OBJECT_TYPE:
case JS_ERROR_TYPE:
case JS_ARGUMENTS_TYPE:
case JS_ASYNC_FROM_SYNC_ITERATOR_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_GENERATOR_OBJECT_TYPE:
case JS_ASYNC_GENERATOR_OBJECT_TYPE:
case JS_MODULE_NAMESPACE_TYPE:
case JS_VALUE_TYPE:
case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
case JS_TYPED_ARRAY_TYPE:
case JS_DATA_VIEW_TYPE:
case JS_SET_TYPE:
case JS_MAP_TYPE:
case JS_SET_ITERATOR_TYPE:
case JS_MAP_ITERATOR_TYPE:
case JS_STRING_ITERATOR_TYPE:
case JS_TYPED_ARRAY_KEY_ITERATOR_TYPE:
case JS_FAST_ARRAY_KEY_ITERATOR_TYPE:
case JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE:
case JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_GENERIC_ARRAY_KEY_VALUE_ITERATOR_TYPE:
case JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE:
case JS_INT8_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE:
case JS_INT16_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_INT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE:
case JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
case JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE:
case JS_GENERIC_ARRAY_VALUE_ITERATOR_TYPE:
case JS_PROMISE_CAPABILITY_TYPE:
case JS_PROMISE_TYPE:
case JS_BOUND_FUNCTION_TYPE:
return has_unboxed_fields ? kVisitJSObject : kVisitJSObjectFast;
case JS_API_OBJECT_TYPE:
case JS_SPECIAL_API_OBJECT_TYPE:
return kVisitJSApiObject;
case JS_FUNCTION_TYPE:
return kVisitJSFunction;
case FILLER_TYPE:
case FOREIGN_TYPE:
case HEAP_NUMBER_TYPE:
case MUTABLE_HEAP_NUMBER_TYPE:
return kVisitDataObject;
case FIXED_UINT8_ARRAY_TYPE:
case FIXED_INT8_ARRAY_TYPE:
case FIXED_UINT16_ARRAY_TYPE:
case FIXED_INT16_ARRAY_TYPE:
case FIXED_UINT32_ARRAY_TYPE:
case FIXED_INT32_ARRAY_TYPE:
case FIXED_FLOAT32_ARRAY_TYPE:
case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
return kVisitFixedTypedArrayBase;
case FIXED_FLOAT64_ARRAY_TYPE:
return kVisitFixedFloat64Array;
#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
STRUCT_LIST(MAKE_STRUCT_CASE)
#undef MAKE_STRUCT_CASE
if (instance_type == ALLOCATION_SITE_TYPE) {
return kVisitAllocationSite;
}
return kVisitStruct;
default:
UNREACHABLE();
}
}
// We don't record weak slots during marking or scavenges. Instead we do it
// once when we complete mark-compact cycle. Note that write barrier has no

View File

@ -86,11 +86,11 @@ class StaticVisitorBase : public AllStatic {
// Determine which specialized visitor should be used for given instance type
// and instance type.
static VisitorId GetVisitorId(int instance_type, int instance_size,
static inline VisitorId GetVisitorId(int instance_type, int instance_size,
bool has_unboxed_fields);
// Determine which specialized visitor should be used for given map.
static VisitorId GetVisitorId(Map* map);
static inline VisitorId GetVisitorId(Map* map);
};

View File

@ -37,6 +37,15 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
return ScavengeObjectSlow(p, object);
}
void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
MapWord first_word = object->map_word();
SLOW_DCHECK(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
}
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
Address slot_address) {
Object** slot = reinterpret_cast<Object**>(slot_address);

View File

@ -404,17 +404,6 @@ void Scavenger::Initialize() {
}
// static
void Scavenger::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
MapWord first_word = object->map_word();
SLOW_DCHECK(!first_word.IsForwardingAddress());
Map* map = first_word.ToMap();
Scavenger* scavenger = map->GetHeap()->scavenge_collector_;
scavenger->scavenging_visitors_table_.GetVisitor(map)(map, p, object);
}
void Scavenger::SelectScavengingVisitorsTable() {
bool logging_and_profiling =
FLAG_verify_predictable || isolate()->logger()->is_logging() ||

View File

@ -30,7 +30,7 @@ class Scavenger {
Address slot_address);
// Slow part of {ScavengeObject} above.
static void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
static inline void ScavengeObjectSlow(HeapObject** p, HeapObject* object);
// Chooses an appropriate static visitor table depending on the current state
// of the heap (i.e. incremental marking, logging and profiling).

View File

@ -55,6 +55,18 @@ const OperandSize* const Bytecodes::kOperandSizes[][3] = {
BYTECODE_LIST(ENTRY)
#undef ENTRY
};
const OperandSize Bytecodes::kOperandKindSizes[][3] = {
#define ENTRY(Name, ...) \
{ OperandScaler<OperandType::k##Name, \
OperandScale::kSingle>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kDouble>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kQuadruple>::kOperandSize },
OPERAND_TYPE_LIST(ENTRY)
#undef ENTRY
};
// clang-format on
// static
@ -274,31 +286,6 @@ bool Bytecodes::IsUnsignedOperandType(OperandType operand_type) {
UNREACHABLE();
}
// static
OperandSize Bytecodes::SizeOfOperand(OperandType operand_type,
OperandScale operand_scale) {
DCHECK_LE(operand_type, OperandType::kLast);
DCHECK_GE(operand_scale, OperandScale::kSingle);
DCHECK_LE(operand_scale, OperandScale::kLast);
STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
OperandScale::kLast == OperandScale::kQuadruple);
int scale_index = static_cast<int>(operand_scale) >> 1;
// clang-format off
static const OperandSize kOperandSizes[][3] = {
#define ENTRY(Name, ...) \
{ OperandScaler<OperandType::k##Name, \
OperandScale::kSingle>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kDouble>::kOperandSize, \
OperandScaler<OperandType::k##Name, \
OperandScale::kQuadruple>::kOperandSize },
OPERAND_TYPE_LIST(ENTRY)
#undef ENTRY
};
// clang-format on
return kOperandSizes[static_cast<size_t>(operand_type)][scale_index];
}
// static
bool Bytecodes::BytecodeHasHandler(Bytecode bytecode,
OperandScale operand_scale) {

View File

@ -872,8 +872,17 @@ class V8_EXPORT_PRIVATE Bytecodes final {
UNREACHABLE();
}
// Returns the size of |operand| for |operand_scale|.
static OperandSize SizeOfOperand(OperandType operand, OperandScale scale);
// Returns the size of |operand_type| for |operand_scale|.
static OperandSize SizeOfOperand(OperandType operand_type,
OperandScale operand_scale) {
DCHECK_LE(operand_type, OperandType::kLast);
DCHECK_GE(operand_scale, OperandScale::kSingle);
DCHECK_LE(operand_scale, OperandScale::kLast);
STATIC_ASSERT(static_cast<int>(OperandScale::kQuadruple) == 4 &&
OperandScale::kLast == OperandScale::kQuadruple);
int scale_index = static_cast<int>(operand_scale) >> 1;
return kOperandKindSizes[static_cast<size_t>(operand_type)][scale_index];
}
// Returns true if |operand_type| is a runtime-id operand (kRuntimeId).
static bool IsRuntimeIdOperandType(OperandType operand_type);
@ -929,6 +938,7 @@ class V8_EXPORT_PRIVATE Bytecodes final {
static const bool kIsScalable[];
static const int kBytecodeSizes[][3];
static const OperandSize* const kOperandSizes[][3];
static OperandSize const kOperandKindSizes[][3];
};
V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,

View File

@ -1178,13 +1178,6 @@ Token::Value Scanner::ScanTemplateStart() {
return ScanTemplateSpan();
}
Token::Value Scanner::ScanTemplateContinuation() {
DCHECK_EQ(next_.token, Token::RBRACE);
next_.location.beg_pos = source_pos() - 1; // We already consumed }
return ScanTemplateSpan();
}
Handle<String> Scanner::SourceUrl(Isolate* isolate) const {
Handle<String> tmp;
if (source_url_.length() > 0) tmp = source_url_.Internalize(isolate);

View File

@ -342,7 +342,11 @@ class Scanner {
// Scans the input as a template literal
Token::Value ScanTemplateStart();
Token::Value ScanTemplateContinuation();
Token::Value ScanTemplateContinuation() {
DCHECK_EQ(next_.token, Token::RBRACE);
next_.location.beg_pos = source_pos() - 1; // We already consumed }
return ScanTemplateSpan();
}
Handle<String> SourceUrl(Isolate* isolate) const;
Handle<String> SourceMappingUrl(Isolate* isolate) const;

View File

@ -12,12 +12,6 @@
namespace v8 {
namespace internal {
void SnapshotByteSource::CopyRaw(byte* to, int number_of_bytes) {
memcpy(to, data_ + position_, number_of_bytes);
position_ += number_of_bytes;
}
void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) {
DCHECK(integer < 1 << 30);
integer <<= 2;

View File

@ -38,7 +38,10 @@ class SnapshotByteSource final {
void Advance(int by) { position_ += by; }
void CopyRaw(byte* to, int number_of_bytes);
void CopyRaw(byte* to, int number_of_bytes) {
memcpy(to, data_ + position_, number_of_bytes);
position_ += number_of_bytes;
}
inline int GetInt() {
// This way of decoding variable-length encoded integers does not