[builtins] FastNewClosureStub becomes a builtin.

This aids in TurboFan concurrent compilation, a general good.

TBR for Ross, on vacation...

TBR=rmcilroy@chromium.org

BUG=

Review-Url: https://codereview.chromium.org/2607563002
Cr-Commit-Position: refs/heads/master@{#41970}
This commit is contained in:
mvstanton 2016-12-28 03:43:16 -08:00 committed by Commit bot
parent ee1b69a480
commit b063b34373
13 changed files with 201 additions and 181 deletions

View File

@ -944,6 +944,8 @@ v8_source_set("v8_base") {
"src/builtins/builtins-boolean.cc",
"src/builtins/builtins-call.cc",
"src/builtins/builtins-callsite.cc",
"src/builtins/builtins-constructor.cc",
"src/builtins/builtins-constructor.h",
"src/builtins/builtins-conversion.cc",
"src/builtins/builtins-dataview.cc",
"src/builtins/builtins-date.cc",

View File

@ -0,0 +1,158 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/builtins/builtins-constructor.h"
#include "src/builtins/builtins-utils.h"
#include "src/builtins/builtins.h"
#include "src/code-factory.h"
#include "src/code-stub-assembler.h"
#include "src/interface-descriptors.h"
namespace v8 {
namespace internal {
typedef compiler::Node Node;
Node* ConstructorBuiltinsAssembler::EmitFastNewClosure(Node* shared_info,
Node* context) {
typedef compiler::CodeAssembler::Label Label;
typedef compiler::CodeAssembler::Variable Variable;
Isolate* isolate = this->isolate();
Factory* factory = isolate->factory();
IncrementCounter(isolate->counters()->fast_new_closure_total(), 1);
// Create a new closure from the given function info in new space
Node* result = Allocate(JSFunction::kSize);
// Calculate the index of the map we should install on the function based on
// the FunctionKind and LanguageMode of the function.
// Note: Must be kept in sync with Context::FunctionMapIndex
Node* compiler_hints =
LoadObjectField(shared_info, SharedFunctionInfo::kCompilerHintsOffset,
MachineType::Uint32());
Node* is_strict = Word32And(
compiler_hints, Int32Constant(1 << SharedFunctionInfo::kStrictModeBit));
Label if_normal(this), if_generator(this), if_async(this),
if_class_constructor(this), if_function_without_prototype(this),
load_map(this);
Variable map_index(this, MachineType::PointerRepresentation());
STATIC_ASSERT(FunctionKind::kNormalFunction == 0);
Node* is_not_normal =
Word32And(compiler_hints,
Int32Constant(SharedFunctionInfo::kAllFunctionKindBitsMask));
GotoUnless(is_not_normal, &if_normal);
Node* is_generator = Word32And(
compiler_hints, Int32Constant(FunctionKind::kGeneratorFunction
<< SharedFunctionInfo::kFunctionKindShift));
GotoIf(is_generator, &if_generator);
Node* is_async = Word32And(
compiler_hints, Int32Constant(FunctionKind::kAsyncFunction
<< SharedFunctionInfo::kFunctionKindShift));
GotoIf(is_async, &if_async);
Node* is_class_constructor = Word32And(
compiler_hints, Int32Constant(FunctionKind::kClassConstructor
<< SharedFunctionInfo::kFunctionKindShift));
GotoIf(is_class_constructor, &if_class_constructor);
if (FLAG_debug_code) {
// Function must be a function without a prototype.
CSA_ASSERT(
this,
Word32And(compiler_hints,
Int32Constant((FunctionKind::kAccessorFunction |
FunctionKind::kArrowFunction |
FunctionKind::kConciseMethod)
<< SharedFunctionInfo::kFunctionKindShift)));
}
Goto(&if_function_without_prototype);
Bind(&if_normal);
{
map_index.Bind(SelectIntPtrConstant(is_strict,
Context::STRICT_FUNCTION_MAP_INDEX,
Context::SLOPPY_FUNCTION_MAP_INDEX));
Goto(&load_map);
}
Bind(&if_generator);
{
map_index.Bind(SelectIntPtrConstant(
is_strict, Context::STRICT_GENERATOR_FUNCTION_MAP_INDEX,
Context::SLOPPY_GENERATOR_FUNCTION_MAP_INDEX));
Goto(&load_map);
}
Bind(&if_async);
{
map_index.Bind(SelectIntPtrConstant(
is_strict, Context::STRICT_ASYNC_FUNCTION_MAP_INDEX,
Context::SLOPPY_ASYNC_FUNCTION_MAP_INDEX));
Goto(&load_map);
}
Bind(&if_class_constructor);
{
map_index.Bind(IntPtrConstant(Context::CLASS_FUNCTION_MAP_INDEX));
Goto(&load_map);
}
Bind(&if_function_without_prototype);
{
map_index.Bind(
IntPtrConstant(Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX));
Goto(&load_map);
}
Bind(&load_map);
// Get the function map in the current native context and set that
// as the map of the allocated object.
Node* native_context = LoadNativeContext(context);
Node* map_slot_value =
LoadFixedArrayElement(native_context, map_index.value());
StoreMapNoWriteBarrier(result, map_slot_value);
// Initialize the rest of the function.
Node* empty_fixed_array = HeapConstant(factory->empty_fixed_array());
Node* empty_literals_array = HeapConstant(factory->empty_literals_array());
StoreObjectFieldNoWriteBarrier(result, JSObject::kPropertiesOffset,
empty_fixed_array);
StoreObjectFieldNoWriteBarrier(result, JSObject::kElementsOffset,
empty_fixed_array);
StoreObjectFieldNoWriteBarrier(result, JSFunction::kLiteralsOffset,
empty_literals_array);
StoreObjectFieldNoWriteBarrier(
result, JSFunction::kPrototypeOrInitialMapOffset, TheHoleConstant());
StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
shared_info);
StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
Handle<Code> lazy_builtin_handle(
isolate->builtins()->builtin(Builtins::kCompileLazy));
Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
Node* lazy_builtin_entry =
IntPtrAdd(BitcastTaggedToWord(lazy_builtin),
IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeEntryOffset,
lazy_builtin_entry,
MachineType::PointerRepresentation());
StoreObjectFieldNoWriteBarrier(result, JSFunction::kNextFunctionLinkOffset,
UndefinedConstant());
return result;
}
TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
Node* shared = Parameter(FastNewClosureDescriptor::kSharedFunctionInfo);
Node* context = Parameter(FastNewClosureDescriptor::kContext);
Return(EmitFastNewClosure(shared, context));
}
} // namespace internal
} // namespace v8

View File

@ -0,0 +1,22 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/code-stub-assembler.h"
namespace v8 {
namespace internal {
typedef compiler::Node Node;
typedef compiler::CodeAssemblerState CodeAssemblerState;
class ConstructorBuiltinsAssembler : public CodeStubAssembler {
public:
explicit ConstructorBuiltinsAssembler(CodeAssemblerState* state)
: CodeStubAssembler(state) {}
Node* EmitFastNewClosure(Node* shared_info, Node* context);
};
} // namespace internal
} // namespace v8

View File

@ -50,6 +50,8 @@ namespace internal {
/* Code aging */ \
CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, ASM) \
\
/* Declared first for dependency reasons */ \
ASM(CompileLazy) \
TFS(ToObject, BUILTIN, kNoExtraICState, TypeConversion) \
\
/* Calls */ \
@ -86,6 +88,7 @@ namespace internal {
ASM(JSConstructStubGeneric) \
ASM(JSBuiltinsConstructStub) \
ASM(JSBuiltinsConstructStubForDerived) \
TFS(FastNewClosure, BUILTIN, kNoExtraICState, FastNewClosure) \
\
/* Apply and entries */ \
ASM(Apply) \
@ -121,7 +124,6 @@ namespace internal {
ASM(InterpreterOnStackReplacement) \
\
/* Code life-cycle */ \
ASM(CompileLazy) \
ASM(CompileBaseline) \
ASM(CompileOptimized) \
ASM(CompileOptimizedConcurrent) \

View File

@ -377,8 +377,8 @@ Callable CodeFactory::FastNewFunctionContext(Isolate* isolate,
// static
Callable CodeFactory::FastNewClosure(Isolate* isolate) {
FastNewClosureStub stub(isolate);
return make_callable(stub);
return Callable(isolate->builtins()->FastNewClosure(),
FastNewClosureDescriptor(isolate));
}
// static

View File

@ -2330,161 +2330,6 @@ void GetPropertyStub::GenerateAssembly(
assembler.Return(var_result.value());
}
// static
compiler::Node* FastNewClosureStub::Generate(CodeStubAssembler* assembler,
compiler::Node* shared_info,
compiler::Node* context) {
typedef compiler::Node Node;
typedef compiler::CodeAssembler::Label Label;
typedef compiler::CodeAssembler::Variable Variable;
Isolate* isolate = assembler->isolate();
Factory* factory = assembler->isolate()->factory();
assembler->IncrementCounter(isolate->counters()->fast_new_closure_total(), 1);
// Create a new closure from the given function info in new space
Node* result = assembler->Allocate(JSFunction::kSize);
// Calculate the index of the map we should install on the function based on
// the FunctionKind and LanguageMode of the function.
// Note: Must be kept in sync with Context::FunctionMapIndex
Node* compiler_hints = assembler->LoadObjectField(
shared_info, SharedFunctionInfo::kCompilerHintsOffset,
MachineType::Uint32());
Node* is_strict = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(1 << SharedFunctionInfo::kStrictModeBit));
Label if_normal(assembler), if_generator(assembler), if_async(assembler),
if_class_constructor(assembler), if_function_without_prototype(assembler),
load_map(assembler);
Variable map_index(assembler, MachineType::PointerRepresentation());
STATIC_ASSERT(FunctionKind::kNormalFunction == 0);
Node* is_not_normal = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(SharedFunctionInfo::kAllFunctionKindBitsMask));
assembler->GotoUnless(is_not_normal, &if_normal);
Node* is_generator = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(FunctionKind::kGeneratorFunction
<< SharedFunctionInfo::kFunctionKindShift));
assembler->GotoIf(is_generator, &if_generator);
Node* is_async = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(FunctionKind::kAsyncFunction
<< SharedFunctionInfo::kFunctionKindShift));
assembler->GotoIf(is_async, &if_async);
Node* is_class_constructor = assembler->Word32And(
compiler_hints,
assembler->Int32Constant(FunctionKind::kClassConstructor
<< SharedFunctionInfo::kFunctionKindShift));
assembler->GotoIf(is_class_constructor, &if_class_constructor);
if (FLAG_debug_code) {
// Function must be a function without a prototype.
CSA_ASSERT(assembler, assembler->Word32And(
compiler_hints,
assembler->Int32Constant(
(FunctionKind::kAccessorFunction |
FunctionKind::kArrowFunction |
FunctionKind::kConciseMethod)
<< SharedFunctionInfo::kFunctionKindShift)));
}
assembler->Goto(&if_function_without_prototype);
assembler->Bind(&if_normal);
{
map_index.Bind(assembler->SelectIntPtrConstant(
is_strict, Context::STRICT_FUNCTION_MAP_INDEX,
Context::SLOPPY_FUNCTION_MAP_INDEX));
assembler->Goto(&load_map);
}
assembler->Bind(&if_generator);
{
map_index.Bind(assembler->SelectIntPtrConstant(
is_strict, Context::STRICT_GENERATOR_FUNCTION_MAP_INDEX,
Context::SLOPPY_GENERATOR_FUNCTION_MAP_INDEX));
assembler->Goto(&load_map);
}
assembler->Bind(&if_async);
{
map_index.Bind(assembler->SelectIntPtrConstant(
is_strict, Context::STRICT_ASYNC_FUNCTION_MAP_INDEX,
Context::SLOPPY_ASYNC_FUNCTION_MAP_INDEX));
assembler->Goto(&load_map);
}
assembler->Bind(&if_class_constructor);
{
map_index.Bind(
assembler->IntPtrConstant(Context::CLASS_FUNCTION_MAP_INDEX));
assembler->Goto(&load_map);
}
assembler->Bind(&if_function_without_prototype);
{
map_index.Bind(assembler->IntPtrConstant(
Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX));
assembler->Goto(&load_map);
}
assembler->Bind(&load_map);
// Get the function map in the current native context and set that
// as the map of the allocated object.
Node* native_context = assembler->LoadNativeContext(context);
Node* map_slot_value =
assembler->LoadFixedArrayElement(native_context, map_index.value());
assembler->StoreMapNoWriteBarrier(result, map_slot_value);
// Initialize the rest of the function.
Node* empty_fixed_array =
assembler->HeapConstant(factory->empty_fixed_array());
Node* empty_literals_array =
assembler->HeapConstant(factory->empty_literals_array());
assembler->StoreObjectFieldNoWriteBarrier(result, JSObject::kPropertiesOffset,
empty_fixed_array);
assembler->StoreObjectFieldNoWriteBarrier(result, JSObject::kElementsOffset,
empty_fixed_array);
assembler->StoreObjectFieldNoWriteBarrier(result, JSFunction::kLiteralsOffset,
empty_literals_array);
assembler->StoreObjectFieldNoWriteBarrier(
result, JSFunction::kPrototypeOrInitialMapOffset,
assembler->TheHoleConstant());
assembler->StoreObjectFieldNoWriteBarrier(
result, JSFunction::kSharedFunctionInfoOffset, shared_info);
assembler->StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset,
context);
Handle<Code> lazy_builtin_handle(
assembler->isolate()->builtins()->builtin(Builtins::kCompileLazy));
Node* lazy_builtin = assembler->HeapConstant(lazy_builtin_handle);
Node* lazy_builtin_entry = assembler->IntPtrAdd(
assembler->BitcastTaggedToWord(lazy_builtin),
assembler->IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
assembler->StoreObjectFieldNoWriteBarrier(
result, JSFunction::kCodeEntryOffset, lazy_builtin_entry,
MachineType::PointerRepresentation());
assembler->StoreObjectFieldNoWriteBarrier(result,
JSFunction::kNextFunctionLinkOffset,
assembler->UndefinedConstant());
return result;
}
void FastNewClosureStub::GenerateAssembly(
compiler::CodeAssemblerState* state) const {
typedef compiler::Node Node;
CodeStubAssembler assembler(state);
Node* shared = assembler.Parameter(Descriptor::kSharedFunctionInfo);
Node* context = assembler.Parameter(Descriptor::kContext);
assembler.Return(Generate(&assembler, shared, context));
}
// static
int FastNewFunctionContextStub::MaximumSlots() {

View File

@ -102,7 +102,6 @@ class Node;
V(FastCloneRegExp) \
V(FastCloneShallowArray) \
V(FastCloneShallowObject) \
V(FastNewClosure) \
V(FastNewFunctionContext) \
V(KeyedLoadSloppyArguments) \
V(KeyedStoreSloppyArguments) \
@ -788,18 +787,6 @@ class NumberToStringStub final : public TurboFanCodeStub {
DEFINE_TURBOFAN_CODE_STUB(NumberToString, TurboFanCodeStub);
};
class FastNewClosureStub : public TurboFanCodeStub {
public:
explicit FastNewClosureStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
static compiler::Node* Generate(CodeStubAssembler* assembler,
compiler::Node* shared_info,
compiler::Node* context);
DEFINE_CALL_INTERFACE_DESCRIPTOR(FastNewClosure);
DEFINE_TURBOFAN_CODE_STUB(FastNewClosure, TurboFanCodeStub);
};
class FastNewFunctionContextStub final : public TurboFanCodeStub {
public:
static int MaximumSlots();

View File

@ -344,7 +344,8 @@ void JSGenericLowering::LowerJSCreateClosure(Node* node) {
Handle<SharedFunctionInfo> const shared_info = p.shared_info();
node->InsertInput(zone(), 0, jsgraph()->HeapConstant(shared_info));
// Use the FastNewClosureStub only for functions allocated in new space.
// Use the FastNewClosurebuiltin only for functions allocated in new
// space.
if (p.pretenure() == NOT_TENURED) {
Callable callable = CodeFactory::FastNewClosure(isolate());
ReplaceWithStubCall(node, callable, flags);

View File

@ -638,7 +638,7 @@ class Context: public FixedArray {
}
static int FunctionMapIndex(LanguageMode language_mode, FunctionKind kind) {
// Note: Must be kept in sync with FastNewClosureStub::Generate.
// Note: Must be kept in sync with the FastNewClosure builtin.
if (IsGeneratorFunction(kind)) {
return is_strict(language_mode) ? STRICT_GENERATOR_FUNCTION_MAP_INDEX
: SLOPPY_GENERATOR_FUNCTION_MAP_INDEX;

View File

@ -5052,11 +5052,10 @@ void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
HConstant* shared_info_value = Add<HConstant>(shared_info);
HInstruction* instr;
if (!expr->pretenure()) {
FastNewClosureStub stub(isolate());
FastNewClosureDescriptor descriptor(isolate());
Callable callable = CodeFactory::FastNewClosure(isolate());
HValue* values[] = {shared_info_value};
HConstant* stub_value = Add<HConstant>(stub.GetCode());
instr = New<HCallWithDescriptor>(stub_value, 0, descriptor,
HConstant* stub_value = Add<HConstant>(callable.code());
instr = New<HCallWithDescriptor>(stub_value, 0, callable.descriptor(),
ArrayVector(values));
} else {
Add<HPushArguments>(shared_info_value);

View File

@ -1030,9 +1030,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
scope()->is_function_scope()) {
FastNewClosureStub stub(isolate());
__ Move(stub.GetCallInterfaceDescriptor().GetRegisterParameter(0), info);
__ CallStub(&stub);
Callable callable = CodeFactory::FastNewClosure(isolate());
__ Move(callable.descriptor().GetRegisterParameter(0), info);
__ Call(callable.code(), RelocInfo::CODE_TARGET);
} else {
__ Push(info);
__ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured

View File

@ -8,6 +8,7 @@
#include <memory>
#include "src/ast/prettyprinter.h"
#include "src/builtins/builtins-constructor.h"
#include "src/code-factory.h"
#include "src/compilation-info.h"
#include "src/compiler.h"
@ -2397,7 +2398,8 @@ void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) {
Label call_runtime(assembler, Label::kDeferred);
__ GotoUnless(__ IsSetWord32<CreateClosureFlags::FastNewClosureBit>(flags),
&call_runtime);
__ SetAccumulator(FastNewClosureStub::Generate(assembler, shared, context));
ConstructorBuiltinsAssembler constructor_assembler(assembler->state());
__ SetAccumulator(constructor_assembler.EmitFastNewClosure(shared, context));
__ Dispatch();
__ Bind(&call_runtime);

View File

@ -484,6 +484,8 @@
'builtins/builtins-call.cc',
'builtins/builtins-callsite.cc',
'builtins/builtins-conversion.cc',
'builtins/builtins-constructor.cc',
'builtins/builtins-constructor.h',
'builtins/builtins-dataview.cc',
'builtins/builtins-date.cc',
'builtins/builtins-debug.cc',