Remove the js-function-inl.h inline header

As an experiment to see how performance is impacted when changing
inline definitions to normal definitions in a .cc file, this CL moves
js-function-inl.h to js-function.cc.

Bug: v8:10749
Change-Id: I97c3a0b7d20217f444c6891442bbe3c34f3b0cc9
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2315993
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69091}
This commit is contained in:
Jakob Gruber 2020-07-23 15:03:09 +02:00 committed by Commit Bot
parent ed70c77fa7
commit 67cdacd941
10 changed files with 374 additions and 339 deletions

View File

@ -1040,7 +1040,6 @@ action("postmortem-metadata") {
"src/objects/js-array-inl.h",
"src/objects/js-function.cc",
"src/objects/js-function.h",
"src/objects/js-function-inl.h",
"src/objects/js-objects.cc",
"src/objects/js-objects.h",
"src/objects/js-objects-inl.h",
@ -2835,7 +2834,6 @@ v8_source_set("v8_base_without_compiler") {
"src/objects/js-display-names-inl.h",
"src/objects/js-display-names.cc",
"src/objects/js-display-names.h",
"src/objects/js-function-inl.h",
"src/objects/js-function.cc",
"src/objects/js-function.h",
"src/objects/js-generator-inl.h",

View File

@ -12,6 +12,7 @@
#include "src/objects/objects-inl.h"
#include "src/objects/slots.h"
#include "src/objects/visitors.h"
#include "src/utils/ostreams.h"
namespace v8 {
namespace internal {

View File

@ -7,7 +7,7 @@
#include "src/execution/isolate.h"
#include "src/objects/cell-inl.h"
#include "src/objects/js-function-inl.h"
#include "src/objects/js-function.h"
#include "src/objects/objects-inl.h"
#include "src/objects/oddball.h"
#include "src/objects/property-cell.h"

View File

@ -9,7 +9,6 @@
#include "src/objects/contexts.h"
#include "src/objects/dictionary-inl.h"
#include "src/objects/fixed-array-inl.h"
#include "src/objects/js-function-inl.h"
#include "src/objects/js-objects-inl.h"
#include "src/objects/map-inl.h"
#include "src/objects/objects-inl.h"

View File

@ -1,289 +0,0 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_OBJECTS_JS_FUNCTION_INL_H_
#define V8_OBJECTS_JS_FUNCTION_INL_H_
#include "src/diagnostics/code-tracer.h"
#include "src/objects/feedback-cell-inl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunctionOrBoundFunction)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)
OBJECT_CONSTRUCTORS_IMPL(JSFunction, JSFunctionOrBoundFunction)
CAST_ACCESSOR(JSFunction)
ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell, kFeedbackCellOffset)
FeedbackVector JSFunction::feedback_vector() const {
DCHECK(has_feedback_vector());
return FeedbackVector::cast(raw_feedback_cell().value());
}
ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const {
DCHECK(has_closure_feedback_cell_array());
return ClosureFeedbackCellArray::cast(raw_feedback_cell().value());
}
// Code objects that are marked for deoptimization are not considered to be
// optimized. This is because the JSFunction might have been already
// deoptimized but its code() still needs to be unlinked, which will happen on
// its next activation.
// TODO(jupvfranco): rename this function. Maybe RunOptimizedCode,
// or IsValidOptimizedCode.
bool JSFunction::IsOptimized() {
return is_compiled() && code().kind() == Code::OPTIMIZED_FUNCTION &&
!code().marked_for_deoptimization();
}
bool JSFunction::HasOptimizedCode() {
return IsOptimized() ||
(has_feedback_vector() && feedback_vector().has_optimized_code() &&
!feedback_vector().optimized_code().marked_for_deoptimization());
}
bool JSFunction::HasOptimizationMarker() {
return has_feedback_vector() && feedback_vector().has_optimization_marker();
}
void JSFunction::ClearOptimizationMarker() {
DCHECK(has_feedback_vector());
feedback_vector().ClearOptimizationMarker();
}
// Optimized code marked for deoptimization will tier back down to running
// interpreted on its next activation, and already doesn't count as IsOptimized.
bool JSFunction::IsInterpreted() {
return is_compiled() && (code().is_interpreter_trampoline_builtin() ||
(code().kind() == Code::OPTIMIZED_FUNCTION &&
code().marked_for_deoptimization()));
}
bool JSFunction::ChecksOptimizationMarker() {
return code().checks_optimization_marker();
}
bool JSFunction::IsMarkedForOptimization() {
return has_feedback_vector() && feedback_vector().optimization_marker() ==
OptimizationMarker::kCompileOptimized;
}
bool JSFunction::IsMarkedForConcurrentOptimization() {
return has_feedback_vector() &&
feedback_vector().optimization_marker() ==
OptimizationMarker::kCompileOptimizedConcurrent;
}
bool JSFunction::IsInOptimizationQueue() {
return has_feedback_vector() && feedback_vector().optimization_marker() ==
OptimizationMarker::kInOptimizationQueue;
}
void JSFunction::CompleteInobjectSlackTrackingIfActive() {
if (!has_prototype_slot()) return;
if (has_initial_map() && initial_map().IsInobjectSlackTrackingInProgress()) {
initial_map().CompleteInobjectSlackTracking(GetIsolate());
}
}
AbstractCode JSFunction::abstract_code() {
if (IsInterpreted()) {
return AbstractCode::cast(shared().GetBytecodeArray());
} else {
return AbstractCode::cast(code());
}
}
int JSFunction::length() { return shared().length(); }
Code JSFunction::code() const {
return Code::cast(RELAXED_READ_FIELD(*this, kCodeOffset));
}
void JSFunction::set_code(Code value) {
DCHECK(!ObjectInYoungGeneration(value));
RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
#ifndef V8_DISABLE_WRITE_BARRIERS
WriteBarrier::Marking(*this, RawField(kCodeOffset), value);
#endif
}
void JSFunction::set_code_no_write_barrier(Code value) {
DCHECK(!ObjectInYoungGeneration(value));
RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
}
// TODO(ishell): Why relaxed read but release store?
DEF_GETTER(JSFunction, shared, SharedFunctionInfo) {
return SharedFunctionInfo::cast(
RELAXED_READ_FIELD(*this, kSharedFunctionInfoOffset));
}
void JSFunction::set_shared(SharedFunctionInfo value, WriteBarrierMode mode) {
// Release semantics to support acquire read in NeedsResetDueToFlushedBytecode
RELEASE_WRITE_FIELD(*this, kSharedFunctionInfoOffset, value);
CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfoOffset, value, mode);
}
void JSFunction::ClearOptimizedCodeSlot(const char* reason) {
if (has_feedback_vector() && feedback_vector().has_optimized_code()) {
if (FLAG_trace_opt) {
CodeTracer::Scope scope(GetIsolate()->GetCodeTracer());
PrintF(scope.file(),
"[evicting entry from optimizing code feedback slot (%s) for ",
reason);
ShortPrint(scope.file());
PrintF(scope.file(), "]\n");
}
feedback_vector().ClearOptimizedCode();
}
}
void JSFunction::SetOptimizationMarker(OptimizationMarker marker) {
DCHECK(has_feedback_vector());
DCHECK(ChecksOptimizationMarker());
DCHECK(!HasOptimizedCode());
feedback_vector().SetOptimizationMarker(marker);
}
bool JSFunction::has_feedback_vector() const {
return shared().is_compiled() &&
raw_feedback_cell().value().IsFeedbackVector();
}
bool JSFunction::has_closure_feedback_cell_array() const {
return shared().is_compiled() &&
raw_feedback_cell().value().IsClosureFeedbackCellArray();
}
Context JSFunction::context() {
return TaggedField<Context, kContextOffset>::load(*this);
}
bool JSFunction::has_context() const {
return TaggedField<HeapObject, kContextOffset>::load(*this).IsContext();
}
JSGlobalProxy JSFunction::global_proxy() { return context().global_proxy(); }
NativeContext JSFunction::native_context() {
return context().native_context();
}
void JSFunction::set_context(HeapObject value) {
DCHECK(value.IsUndefined() || value.IsContext());
WRITE_FIELD(*this, kContextOffset, value);
WRITE_BARRIER(*this, kContextOffset, value);
}
ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, HeapObject,
kPrototypeOrInitialMapOffset, map().has_prototype_slot())
DEF_GETTER(JSFunction, has_prototype_slot, bool) {
return map(isolate).has_prototype_slot();
}
DEF_GETTER(JSFunction, initial_map, Map) {
return Map::cast(prototype_or_initial_map(isolate));
}
DEF_GETTER(JSFunction, has_initial_map, bool) {
DCHECK(has_prototype_slot(isolate));
return prototype_or_initial_map(isolate).IsMap(isolate);
}
DEF_GETTER(JSFunction, has_instance_prototype, bool) {
DCHECK(has_prototype_slot(isolate));
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
return has_initial_map(isolate) ||
!prototype_or_initial_map(isolate).IsTheHole(
GetReadOnlyRoots(isolate));
}
DEF_GETTER(JSFunction, has_prototype, bool) {
DCHECK(has_prototype_slot(isolate));
return map(isolate).has_non_instance_prototype() ||
has_instance_prototype(isolate);
}
DEF_GETTER(JSFunction, has_prototype_property, bool) {
return (has_prototype_slot(isolate) && IsConstructor(isolate)) ||
IsGeneratorFunction(shared(isolate).kind());
}
DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
return !has_prototype_property(isolate) ||
map(isolate).has_non_instance_prototype();
}
DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
DCHECK(has_instance_prototype(isolate));
if (has_initial_map(isolate)) return initial_map(isolate).prototype(isolate);
// When there is no initial map and the prototype is a JSReceiver, the
// initial map field is used for the prototype field.
return HeapObject::cast(prototype_or_initial_map(isolate));
}
DEF_GETTER(JSFunction, prototype, Object) {
DCHECK(has_prototype(isolate));
// If the function's prototype property has been set to a non-JSReceiver
// value, that value is stored in the constructor field of the map.
if (map(isolate).has_non_instance_prototype()) {
Object prototype = map(isolate).GetConstructor(isolate);
// The map must have a prototype in that field, not a back pointer.
DCHECK(!prototype.IsMap(isolate));
DCHECK(!prototype.IsFunctionTemplateInfo(isolate));
return prototype;
}
return instance_prototype(isolate);
}
bool JSFunction::is_compiled() const {
return code().builtin_index() != Builtins::kCompileLazy &&
shared().is_compiled();
}
bool JSFunction::NeedsResetDueToFlushedBytecode() {
// Do a raw read for shared and code fields here since this function may be
// called on a concurrent thread and the JSFunction might not be fully
// initialized yet.
Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
Object maybe_code = RELAXED_READ_FIELD(*this, kCodeOffset);
if (!maybe_shared.IsSharedFunctionInfo() || !maybe_code.IsCode()) {
return false;
}
SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
Code code = Code::cast(maybe_code);
return !shared.is_compiled() &&
code.builtin_index() != Builtins::kCompileLazy;
}
void JSFunction::ResetIfBytecodeFlushed(
base::Optional<std::function<void(HeapObject object, ObjectSlot slot,
HeapObject target)>>
gc_notify_updated_slot) {
if (FLAG_flush_bytecode && NeedsResetDueToFlushedBytecode()) {
// Bytecode was flushed and function is now uncompiled, reset JSFunction
// by setting code to CompileLazy and clearing the feedback vector.
set_code(GetIsolate()->builtins()->builtin(i::Builtins::kCompileLazy));
raw_feedback_cell().reset_feedback_vector(gc_notify_updated_slot);
}
}
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"
#endif // V8_OBJECTS_JS_FUNCTION_INL_H_

View File

@ -5,15 +5,286 @@
#include "src/objects/js-function.h"
#include "src/codegen/compiler.h"
#include "src/diagnostics/code-tracer.h"
#include "src/heap/heap-inl.h"
#include "src/ic/ic.h"
#include "src/init/bootstrapper.h"
#include "src/objects/js-function-inl.h"
#include "src/objects/feedback-cell-inl.h"
#include "src/strings/string-builder-inl.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
namespace v8 {
namespace internal {
TQ_OBJECT_CONSTRUCTORS_IMPL_NONINLINE(JSFunctionOrBoundFunction)
TQ_OBJECT_CONSTRUCTORS_IMPL_NONINLINE(JSBoundFunction)
OBJECT_CONSTRUCTORS_IMPL_NONINLINE(JSFunction, JSFunctionOrBoundFunction)
CAST_ACCESSOR(JSFunction)
ACCESSORS(JSFunction, raw_feedback_cell, FeedbackCell, kFeedbackCellOffset)
FeedbackVector JSFunction::feedback_vector() const {
DCHECK(has_feedback_vector());
return FeedbackVector::cast(raw_feedback_cell().value());
}
ClosureFeedbackCellArray JSFunction::closure_feedback_cell_array() const {
DCHECK(has_closure_feedback_cell_array());
return ClosureFeedbackCellArray::cast(raw_feedback_cell().value());
}
// Code objects that are marked for deoptimization are not considered to be
// optimized. This is because the JSFunction might have been already
// deoptimized but its code() still needs to be unlinked, which will happen on
// its next activation.
// TODO(jupvfranco): rename this function. Maybe RunOptimizedCode,
// or IsValidOptimizedCode.
bool JSFunction::IsOptimized() {
return is_compiled() && code().kind() == Code::OPTIMIZED_FUNCTION &&
!code().marked_for_deoptimization();
}
bool JSFunction::HasOptimizedCode() {
return IsOptimized() ||
(has_feedback_vector() && feedback_vector().has_optimized_code() &&
!feedback_vector().optimized_code().marked_for_deoptimization());
}
bool JSFunction::HasOptimizationMarker() {
return has_feedback_vector() && feedback_vector().has_optimization_marker();
}
void JSFunction::ClearOptimizationMarker() {
DCHECK(has_feedback_vector());
feedback_vector().ClearOptimizationMarker();
}
// Optimized code marked for deoptimization will tier back down to running
// interpreted on its next activation, and already doesn't count as IsOptimized.
bool JSFunction::IsInterpreted() {
return is_compiled() && (code().is_interpreter_trampoline_builtin() ||
(code().kind() == Code::OPTIMIZED_FUNCTION &&
code().marked_for_deoptimization()));
}
bool JSFunction::ChecksOptimizationMarker() {
return code().checks_optimization_marker();
}
bool JSFunction::IsMarkedForOptimization() {
return has_feedback_vector() && feedback_vector().optimization_marker() ==
OptimizationMarker::kCompileOptimized;
}
bool JSFunction::IsMarkedForConcurrentOptimization() {
return has_feedback_vector() &&
feedback_vector().optimization_marker() ==
OptimizationMarker::kCompileOptimizedConcurrent;
}
bool JSFunction::IsInOptimizationQueue() {
return has_feedback_vector() && feedback_vector().optimization_marker() ==
OptimizationMarker::kInOptimizationQueue;
}
void JSFunction::CompleteInobjectSlackTrackingIfActive() {
if (!has_prototype_slot()) return;
if (has_initial_map() && initial_map().IsInobjectSlackTrackingInProgress()) {
initial_map().CompleteInobjectSlackTracking(GetIsolate());
}
}
AbstractCode JSFunction::abstract_code() {
if (IsInterpreted()) {
return AbstractCode::cast(shared().GetBytecodeArray());
} else {
return AbstractCode::cast(code());
}
}
int JSFunction::length() { return shared().length(); }
Code JSFunction::code() const {
return Code::cast(RELAXED_READ_FIELD(*this, kCodeOffset));
}
void JSFunction::set_code(Code value) {
DCHECK(!ObjectInYoungGeneration(value));
RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
#ifndef V8_DISABLE_WRITE_BARRIERS
WriteBarrier::Marking(*this, RawField(kCodeOffset), value);
#endif
}
void JSFunction::set_code_no_write_barrier(Code value) {
DCHECK(!ObjectInYoungGeneration(value));
RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
}
// TODO(ishell): Why relaxed read but release store?
DEF_GETTER(JSFunction, shared, SharedFunctionInfo) {
return SharedFunctionInfo::cast(
RELAXED_READ_FIELD(*this, kSharedFunctionInfoOffset));
}
void JSFunction::set_shared(SharedFunctionInfo value, WriteBarrierMode mode) {
// Release semantics to support acquire read in NeedsResetDueToFlushedBytecode
RELEASE_WRITE_FIELD(*this, kSharedFunctionInfoOffset, value);
CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfoOffset, value, mode);
}
void JSFunction::ClearOptimizedCodeSlot(const char* reason) {
if (has_feedback_vector() && feedback_vector().has_optimized_code()) {
if (FLAG_trace_opt) {
CodeTracer::Scope scope(GetIsolate()->GetCodeTracer());
PrintF(scope.file(),
"[evicting entry from optimizing code feedback slot (%s) for ",
reason);
ShortPrint(scope.file());
PrintF(scope.file(), "]\n");
}
feedback_vector().ClearOptimizedCode();
}
}
void JSFunction::SetOptimizationMarker(OptimizationMarker marker) {
DCHECK(has_feedback_vector());
DCHECK(ChecksOptimizationMarker());
DCHECK(!HasOptimizedCode());
feedback_vector().SetOptimizationMarker(marker);
}
bool JSFunction::has_feedback_vector() const {
return shared().is_compiled() &&
raw_feedback_cell().value().IsFeedbackVector();
}
bool JSFunction::has_closure_feedback_cell_array() const {
return shared().is_compiled() &&
raw_feedback_cell().value().IsClosureFeedbackCellArray();
}
Context JSFunction::context() {
return TaggedField<Context, kContextOffset>::load(*this);
}
bool JSFunction::has_context() const {
return TaggedField<HeapObject, kContextOffset>::load(*this).IsContext();
}
JSGlobalProxy JSFunction::global_proxy() { return context().global_proxy(); }
NativeContext JSFunction::native_context() {
return context().native_context();
}
void JSFunction::set_context(HeapObject value) {
DCHECK(value.IsUndefined() || value.IsContext());
WRITE_FIELD(*this, kContextOffset, value);
WRITE_BARRIER(*this, kContextOffset, value);
}
ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, HeapObject,
kPrototypeOrInitialMapOffset, map().has_prototype_slot())
DEF_GETTER(JSFunction, has_prototype_slot, bool) {
return map(isolate).has_prototype_slot();
}
DEF_GETTER(JSFunction, initial_map, Map) {
return Map::cast(prototype_or_initial_map(isolate));
}
DEF_GETTER(JSFunction, has_initial_map, bool) {
DCHECK(has_prototype_slot(isolate));
return prototype_or_initial_map(isolate).IsMap(isolate);
}
DEF_GETTER(JSFunction, has_instance_prototype, bool) {
DCHECK(has_prototype_slot(isolate));
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
return has_initial_map(isolate) ||
!prototype_or_initial_map(isolate).IsTheHole(
GetReadOnlyRoots(isolate));
}
DEF_GETTER(JSFunction, has_prototype, bool) {
DCHECK(has_prototype_slot(isolate));
return map(isolate).has_non_instance_prototype() ||
has_instance_prototype(isolate);
}
DEF_GETTER(JSFunction, has_prototype_property, bool) {
return (has_prototype_slot(isolate) && IsConstructor(isolate)) ||
IsGeneratorFunction(shared(isolate).kind());
}
DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
return !has_prototype_property(isolate) ||
map(isolate).has_non_instance_prototype();
}
DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
DCHECK(has_instance_prototype(isolate));
if (has_initial_map(isolate)) return initial_map(isolate).prototype(isolate);
// When there is no initial map and the prototype is a JSReceiver, the
// initial map field is used for the prototype field.
return HeapObject::cast(prototype_or_initial_map(isolate));
}
DEF_GETTER(JSFunction, prototype, Object) {
DCHECK(has_prototype(isolate));
// If the function's prototype property has been set to a non-JSReceiver
// value, that value is stored in the constructor field of the map.
if (map(isolate).has_non_instance_prototype()) {
Object prototype = map(isolate).GetConstructor(isolate);
// The map must have a prototype in that field, not a back pointer.
DCHECK(!prototype.IsMap(isolate));
DCHECK(!prototype.IsFunctionTemplateInfo(isolate));
return prototype;
}
return instance_prototype(isolate);
}
bool JSFunction::is_compiled() const {
return code().builtin_index() != Builtins::kCompileLazy &&
shared().is_compiled();
}
bool JSFunction::NeedsResetDueToFlushedBytecode() {
// Do a raw read for shared and code fields here since this function may be
// called on a concurrent thread and the JSFunction might not be fully
// initialized yet.
Object maybe_shared = ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
Object maybe_code = RELAXED_READ_FIELD(*this, kCodeOffset);
if (!maybe_shared.IsSharedFunctionInfo() || !maybe_code.IsCode()) {
return false;
}
SharedFunctionInfo shared = SharedFunctionInfo::cast(maybe_shared);
Code code = Code::cast(maybe_code);
return !shared.is_compiled() &&
code.builtin_index() != Builtins::kCompileLazy;
}
void JSFunction::ResetIfBytecodeFlushed(
base::Optional<std::function<void(HeapObject object, ObjectSlot slot,
HeapObject target)>>
gc_notify_updated_slot) {
if (FLAG_flush_bytecode && NeedsResetDueToFlushedBytecode()) {
// Bytecode was flushed and function is now uncompiled, reset JSFunction
// by setting code to CompileLazy and clearing the feedback vector.
set_code(GetIsolate()->builtins()->builtin(i::Builtins::kCompileLazy));
raw_feedback_cell().reset_feedback_vector(gc_notify_updated_slot);
}
}
// static
MaybeHandle<NativeContext> JSBoundFunction::GetFunctionRealm(
Handle<JSBoundFunction> function) {
@ -820,3 +1091,5 @@ void JSFunction::ClearTypeFeedbackInfo() {
} // namespace internal
} // namespace v8
#include "src/objects/object-macros-undef.h"

View File

@ -23,7 +23,7 @@ class JSFunctionOrBoundFunction
JSObject> {
public:
STATIC_ASSERT(kHeaderSize == JSObject::kHeaderSize);
TQ_OBJECT_CONSTRUCTORS(JSFunctionOrBoundFunction)
TQ_OBJECT_CONSTRUCTORS_NONINLINE(JSFunctionOrBoundFunction)
};
// JSBoundFunction describes a bound function exotic object.
@ -46,18 +46,18 @@ class JSBoundFunction
// to ES6 section 19.2.3.5 Function.prototype.toString ( ).
static Handle<String> ToString(Handle<JSBoundFunction> function);
TQ_OBJECT_CONSTRUCTORS(JSBoundFunction)
TQ_OBJECT_CONSTRUCTORS_NONINLINE(JSBoundFunction)
};
// JSFunction describes JavaScript functions.
class JSFunction : public JSFunctionOrBoundFunction {
public:
// [prototype_or_initial_map]:
DECL_ACCESSORS(prototype_or_initial_map, HeapObject)
DECL_ACCESSORS_NONINLINE(prototype_or_initial_map, HeapObject)
// [shared]: The information about the function that
// can be shared by instances.
DECL_ACCESSORS(shared, SharedFunctionInfo)
DECL_ACCESSORS_NONINLINE(shared, SharedFunctionInfo)
static const int kLengthDescriptorIndex = 0;
static const int kNameDescriptorIndex = 1;
@ -67,12 +67,12 @@ class JSFunction : public JSFunctionOrBoundFunction {
static const int kMinDescriptorsForFastBind = 2;
// [context]: The context for this function.
inline Context context();
inline bool has_context() const;
inline void set_context(HeapObject context);
inline JSGlobalProxy global_proxy();
inline NativeContext native_context();
inline int length();
V8_EXPORT_PRIVATE Context context();
bool has_context() const;
void set_context(HeapObject context);
JSGlobalProxy global_proxy();
V8_EXPORT_PRIVATE NativeContext native_context();
int length();
static Handle<Object> GetName(Isolate* isolate, Handle<JSFunction> function);
static Handle<NativeContext> GetFunctionRealm(Handle<JSFunction> function);
@ -81,58 +81,58 @@ class JSFunction : public JSFunctionOrBoundFunction {
// when the function is invoked, e.g. foo() or new foo(). See
// [[Call]] and [[Construct]] description in ECMA-262, section
// 8.6.2, page 27.
inline Code code() const;
inline void set_code(Code code);
inline void set_code_no_write_barrier(Code code);
V8_EXPORT_PRIVATE Code code() const;
V8_EXPORT_PRIVATE void set_code(Code code);
void set_code_no_write_barrier(Code code);
// Get the abstract code associated with the function, which will either be
// a Code object or a BytecodeArray.
inline AbstractCode abstract_code();
V8_EXPORT_PRIVATE AbstractCode abstract_code();
// Tells whether or not this function is interpreted.
//
// Note: function->IsInterpreted() does not necessarily return the same value
// as function->shared()->IsInterpreted() because the closure might have been
// optimized.
inline bool IsInterpreted();
V8_EXPORT_PRIVATE bool IsInterpreted();
// Tells whether or not this function checks its optimization marker in its
// feedback vector.
inline bool ChecksOptimizationMarker();
bool ChecksOptimizationMarker();
// Tells whether or not this function holds optimized code.
//
// Note: Returning false does not necessarily mean that this function hasn't
// been optimized, as it may have optimized code on its feedback vector.
inline bool IsOptimized();
V8_EXPORT_PRIVATE bool IsOptimized();
// Tells whether or not this function has optimized code available to it,
// either because it is optimized or because it has optimized code in its
// feedback vector.
inline bool HasOptimizedCode();
bool HasOptimizedCode();
// Tells whether or not this function has a (non-zero) optimization marker.
inline bool HasOptimizationMarker();
bool HasOptimizationMarker();
// Mark this function for lazy recompilation. The function will be recompiled
// the next time it is executed.
void MarkForOptimization(ConcurrencyMode mode);
// Tells whether or not the function is already marked for lazy recompilation.
inline bool IsMarkedForOptimization();
inline bool IsMarkedForConcurrentOptimization();
bool IsMarkedForOptimization();
bool IsMarkedForConcurrentOptimization();
// Tells whether or not the function is on the concurrent recompilation queue.
inline bool IsInOptimizationQueue();
bool IsInOptimizationQueue();
// Clears the optimized code slot in the function's feedback vector.
inline void ClearOptimizedCodeSlot(const char* reason);
void ClearOptimizedCodeSlot(const char* reason);
// Sets the optimization marker in the function's feedback vector.
inline void SetOptimizationMarker(OptimizationMarker marker);
void SetOptimizationMarker(OptimizationMarker marker);
// Clears the optimization marker in the function's feedback vector.
inline void ClearOptimizationMarker();
void ClearOptimizationMarker();
// If slack tracking is active, it computes instance size of the initial map
// with minimum permissible object slack. If it is not active, it simply
@ -140,19 +140,19 @@ class JSFunction : public JSFunctionOrBoundFunction {
int ComputeInstanceSizeWithMinSlack(Isolate* isolate);
// Completes inobject slack tracking on initial map if it is active.
inline void CompleteInobjectSlackTrackingIfActive();
void CompleteInobjectSlackTrackingIfActive();
// [raw_feedback_cell]: Gives raw access to the FeedbackCell used to hold the
/// FeedbackVector eventually. Generally this shouldn't be used to get the
// feedback_vector, instead use feedback_vector() which correctly deals with
// the JSFunction's bytecode being flushed.
DECL_ACCESSORS(raw_feedback_cell, FeedbackCell)
DECL_ACCESSORS_NONINLINE(raw_feedback_cell, FeedbackCell)
// Functions related to feedback vector. feedback_vector() can be used once
// the function has feedback vectors allocated. feedback vectors may not be
// available after compile when lazily allocating feedback vectors.
inline FeedbackVector feedback_vector() const;
inline bool has_feedback_vector() const;
V8_EXPORT_PRIVATE FeedbackVector feedback_vector() const;
V8_EXPORT_PRIVATE bool has_feedback_vector() const;
V8_EXPORT_PRIVATE static void EnsureFeedbackVector(
Handle<JSFunction> function, IsCompiledScope* compiled_scope);
@ -160,8 +160,8 @@ class JSFunction : public JSFunctionOrBoundFunction {
// used to create closures from this function. We allocate closure feedback
// cell arrays after compile, when we want to allocate feedback vectors
// lazily.
inline bool has_closure_feedback_cell_array() const;
inline ClosureFeedbackCellArray closure_feedback_cell_array() const;
V8_EXPORT_PRIVATE bool has_closure_feedback_cell_array() const;
ClosureFeedbackCellArray closure_feedback_cell_array() const;
static void EnsureClosureFeedbackCellArray(Handle<JSFunction> function);
// Initializes the feedback cell of |function|. In lite mode, this would be
@ -175,20 +175,20 @@ class JSFunction : public JSFunctionOrBoundFunction {
void ClearTypeFeedbackInfo();
// Resets function to clear compiled data after bytecode has been flushed.
inline bool NeedsResetDueToFlushedBytecode();
inline void ResetIfBytecodeFlushed(
bool NeedsResetDueToFlushedBytecode();
void ResetIfBytecodeFlushed(
base::Optional<std::function<void(HeapObject object, ObjectSlot slot,
HeapObject target)>>
gc_notify_updated_slot = base::nullopt);
DECL_GETTER(has_prototype_slot, bool)
DECL_GETTER_NONINLINE(has_prototype_slot, bool)
// The initial map for an object created by this constructor.
DECL_GETTER(initial_map, Map)
DECL_GETTER_NONINLINE(initial_map, Map)
static void SetInitialMap(Handle<JSFunction> function, Handle<Map> map,
Handle<HeapObject> prototype);
DECL_GETTER(has_initial_map, bool)
DECL_GETTER_NONINLINE(has_initial_map, bool)
V8_EXPORT_PRIVATE static void EnsureHasInitialMap(
Handle<JSFunction> function);
@ -203,16 +203,16 @@ class JSFunction : public JSFunctionOrBoundFunction {
// function has an initial map the prototype is set on the initial
// map. Otherwise, the prototype is put in the initial map field
// until an initial map is needed.
DECL_GETTER(has_prototype, bool)
DECL_GETTER(has_instance_prototype, bool)
DECL_GETTER(prototype, Object)
DECL_GETTER(instance_prototype, HeapObject)
DECL_GETTER(has_prototype_property, bool)
DECL_GETTER(PrototypeRequiresRuntimeLookup, bool)
DECL_GETTER_NONINLINE(has_prototype, bool)
DECL_GETTER_NONINLINE(has_instance_prototype, bool)
DECL_GETTER_NONINLINE(prototype, Object)
DECL_GETTER_NONINLINE(instance_prototype, HeapObject)
DECL_GETTER_NONINLINE(has_prototype_property, bool)
DECL_GETTER_NONINLINE(PrototypeRequiresRuntimeLookup, bool)
static void SetPrototype(Handle<JSFunction> function, Handle<Object> value);
// Returns if this function has been compiled to native code yet.
inline bool is_compiled() const;
V8_EXPORT_PRIVATE bool is_compiled() const;
static int GetHeaderSize(bool function_has_prototype_slot) {
return function_has_prototype_slot ? JSFunction::kSizeWithPrototype
@ -222,7 +222,7 @@ class JSFunction : public JSFunctionOrBoundFunction {
// Prints the name of the function using PrintF.
void PrintName(FILE* out = stdout);
DECL_CAST(JSFunction)
DECL_CAST_NONINLINE(JSFunction)
// Calculate the instance size and in-object properties count.
// {CalculateExpectedNofProperties} can trigger compilation.
@ -281,7 +281,7 @@ class JSFunction : public JSFunctionOrBoundFunction {
static constexpr int kSizeWithoutPrototype = kPrototypeOrInitialMapOffset;
static constexpr int kSizeWithPrototype = FieldOffsets::kHeaderSize;
OBJECT_CONSTRUCTORS(JSFunction, JSFunctionOrBoundFunction);
OBJECT_CONSTRUCTORS_NONINLINE(JSFunction, JSFunctionOrBoundFunction);
};
} // namespace internal

View File

@ -81,3 +81,10 @@
#undef DEFINE_DEOPT_ENTRY_ACCESSORS
#undef TQ_OBJECT_CONSTRUCTORS
#undef TQ_OBJECT_CONSTRUCTORS_IMPL
#undef OBJECT_CONSTRUCTORS_NONINLINE
#undef OBJECT_CONSTRUCTORS_IMPL_NONINLINE
#undef DECL_GETTER_NONINLINE
#undef DECL_ACCESSORS_NONINLINE
#undef DECL_CAST_NONINLINE
#undef TQ_OBJECT_CONSTRUCTORS_NONINLINE
#undef TQ_OBJECT_CONSTRUCTORS_IMPL_NONINLINE

View File

@ -16,6 +16,19 @@
#include "src/base/memory.h"
// TODO(v8:10749): When we've had a chance to collect performance impact,
// either move towards using _NONINLINE versions everywhere, or revert back to
// inline use and remove _NONINLINE macros.
#define OBJECT_CONSTRUCTORS_NONINLINE(Type, ...) \
public: \
constexpr Type() : __VA_ARGS__() {} \
\
protected: \
template <typename TFieldType, int kFieldOffset> \
friend class TaggedField; \
\
V8_EXPORT_PRIVATE explicit Type(Address ptr)
// Since this changes visibility, it should always be last in a class
// definition.
#define OBJECT_CONSTRUCTORS(Type, ...) \
@ -28,6 +41,9 @@
\
explicit inline Type(Address ptr)
#define OBJECT_CONSTRUCTORS_IMPL_NONINLINE(Type, Super) \
Type::Type(Address ptr) : Super(ptr) { SLOW_DCHECK(Is##Type()); }
#define OBJECT_CONSTRUCTORS_IMPL(Type, Super) \
inline Type::Type(Address ptr) : Super(ptr) { SLOW_DCHECK(Is##Type()); }
// In these cases, we don't have our own instance type to check, so check the
@ -84,6 +100,10 @@
inline type name() const; \
inline type name(const Isolate* isolate) const;
#define DECL_GETTER_NONINLINE(name, type) \
V8_EXPORT_PRIVATE type name() const; \
V8_EXPORT_PRIVATE type name(const Isolate* isolate) const;
#define DEF_GETTER(holder, name, type) \
type holder::name() const { \
const Isolate* isolate = GetIsolateForPtrCompr(*this); \
@ -96,6 +116,17 @@
inline void set_##name(type value, \
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
#define DECL_ACCESSORS_NONINLINE(name, type) \
DECL_GETTER_NONINLINE(name, type) \
V8_EXPORT_PRIVATE void set_##name( \
type value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
#define DECL_CAST_NONINLINE(Type) \
V8_EXPORT_PRIVATE static Type cast(Object object); \
inline static Type unchecked_cast(Object object) { \
return bit_cast<Type>(object); \
}
#define DECL_CAST(Type) \
V8_INLINE static Type cast(Object object); \
V8_INLINE static Type unchecked_cast(Object object) { \
@ -482,6 +513,20 @@
set(IndexForEntry(i) + k##name##Offset, value); \
}
#define TQ_OBJECT_CONSTRUCTORS_NONINLINE(Type) \
public: \
constexpr Type() = default; \
\
protected: \
template <typename TFieldType, int kFieldOffset> \
friend class TaggedField; \
\
V8_EXPORT_PRIVATE explicit Type(Address ptr); \
friend class TorqueGenerated##Type<Type, Super>;
#define TQ_OBJECT_CONSTRUCTORS_IMPL_NONINLINE(Type) \
Type::Type(Address ptr) : TorqueGenerated##Type<Type, Type::Super>(ptr) {}
#define TQ_OBJECT_CONSTRUCTORS(Type) \
public: \
constexpr Type() = default; \

View File

@ -8,6 +8,7 @@
#include "src/ast/scopes.h"
#include "src/codegen/compilation-cache.h"
#include "src/codegen/compiler.h"
#include "src/diagnostics/code-tracer.h"
#include "src/objects/shared-function-info-inl.h"
#include "src/strings/string-builder-inl.h"