[stubs] A new approach to TF stubs

* Add a sibling interface to InterpreterAssembler called
  CodeStubAssembler which provides a wrapper around the
  RawMachineAssembler and is intented to make it easy to build
  efficient cross-platform code stubs. Much of the implementation
  of CodeStubAssembler is shamelessly stolen from the
  InterpreterAssembler, and the idea is to eventually merge the
  two interfaces somehow, probably moving the
  InterpreterAssembler interface over to use the
  CodeStubAssembler. Short-term, however, the two interfaces
  shall remain decoupled to increase our velocity developing the
  two systems in parallel.
* Implement the StringLength stub in TurboFan with the new
  CodeStubAssembler. Replace and remove the old Hydrogen-stub
  version.
* Remove a whole slew of machinery to support JavaScript-style
  code stub generation, since it ultimately proved unwieldy,
  brittle and baroque. This cleanup includes removing the shared
  code stub context, several example stubs and a tangle of build
  file changes.

BUG=v8:4587
LOG=n

Review URL: https://codereview.chromium.org/1475953002

Cr-Commit-Position: refs/heads/master@{#32508}
This commit is contained in:
danno 2015-12-02 04:35:12 -08:00 committed by Commit bot
parent 2377170d07
commit 3e7e3ed726
53 changed files with 645 additions and 906 deletions

View File

@ -266,40 +266,6 @@ action("js2c") {
}
}
action("js2c_code_stubs") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
script = "tools/js2c.py"
# The script depends on this other script, this rule causes a rebuild if it
# changes.
inputs = [ "tools/jsmin.py" ]
sources = [
"src/js/macros.py",
"src/messages.h",
"src/js/code-stubs.js"
]
outputs = [
"$target_gen_dir/code-stub-libraries.cc",
]
args = [
rebase_path("$target_gen_dir/code-stub-libraries.cc",
root_build_dir),
"CODE_STUB",
] + rebase_path(sources, root_build_dir)
if (v8_use_external_startup_data) {
outputs += [ "$target_gen_dir/libraries_code_stub.bin" ]
args += [
"--startup_blob",
rebase_path("$target_gen_dir/libraries_code_stub.bin", root_build_dir),
]
}
}
action("js2c_experimental") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
@ -440,7 +406,6 @@ if (v8_use_external_startup_data) {
deps = [
":js2c",
":js2c_code_stubs",
":js2c_experimental",
":js2c_extras",
":js2c_experimental_extras",
@ -448,7 +413,6 @@ if (v8_use_external_startup_data) {
sources = [
"$target_gen_dir/libraries.bin",
"$target_gen_dir/libraries_code_stub.bin",
"$target_gen_dir/libraries_experimental.bin",
"$target_gen_dir/libraries_extras.bin",
"$target_gen_dir/libraries_experimental_extras.bin",
@ -536,7 +500,6 @@ source_set("v8_nosnapshot") {
deps = [
":js2c",
":js2c_code_stubs",
":js2c_experimental",
":js2c_extras",
":js2c_experimental_extras",
@ -545,7 +508,6 @@ source_set("v8_nosnapshot") {
sources = [
"$target_gen_dir/libraries.cc",
"$target_gen_dir/code-stub-libraries.cc",
"$target_gen_dir/experimental-libraries.cc",
"$target_gen_dir/extras-libraries.cc",
"$target_gen_dir/experimental-extras-libraries.cc",
@ -571,7 +533,6 @@ source_set("v8_snapshot") {
deps = [
":js2c",
":js2c_code_stubs",
":js2c_experimental",
":js2c_extras",
":js2c_experimental_extras",
@ -585,7 +546,6 @@ source_set("v8_snapshot") {
sources = [
"$target_gen_dir/libraries.cc",
"$target_gen_dir/code-stub-libraries.cc",
"$target_gen_dir/experimental-libraries.cc",
"$target_gen_dir/extras-libraries.cc",
"$target_gen_dir/experimental-extras-libraries.cc",
@ -607,7 +567,6 @@ if (v8_use_external_startup_data) {
deps = [
":js2c",
":js2c_code_stubs",
":js2c_experimental",
":js2c_extras",
":js2c_experimental_extras",
@ -745,6 +704,8 @@ source_set("v8_base") {
"src/compiler/code-generator-impl.h",
"src/compiler/code-generator.cc",
"src/compiler/code-generator.h",
"src/compiler/code-stub-assembler.cc",
"src/compiler/code-stub-assembler.h",
"src/compiler/common-node-cache.cc",
"src/compiler/common-node-cache.h",
"src/compiler/common-operator-reducer.cc",

View File

@ -2,6 +2,7 @@ include_rules = [
"+src",
"-src/compiler",
"+src/compiler/pipeline.h",
"+src/compiler/code-stub-assembler.h",
"-src/heap",
"+src/heap/heap.h",
"+src/heap/heap-inl.h",

View File

@ -416,27 +416,6 @@ void ApiAccessorDescriptor::InitializePlatformSpecific(
}
void MathRoundVariantCallFromUnoptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
r1, // math rounding function
r3, // vector slot id
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void MathRoundVariantCallFromOptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
r1, // math rounding function
r3, // vector slot id
r4, // type vector
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {

View File

@ -449,27 +449,6 @@ void ApiAccessorDescriptor::InitializePlatformSpecific(
}
void MathRoundVariantCallFromUnoptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
x1, // math rounding function
x3, // vector slot id
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void MathRoundVariantCallFromOptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
x1, // math rounding function
x3, // vector slot id
x4, // type vector
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {

View File

@ -59,7 +59,6 @@ template Handle<String> Bootstrapper::SourceLookup<ExperimentalNatives>(
template Handle<String> Bootstrapper::SourceLookup<ExperimentalExtraNatives>(
int index);
template Handle<String> Bootstrapper::SourceLookup<ExtraNatives>(int index);
template Handle<String> Bootstrapper::SourceLookup<CodeStubNatives>(int index);
void Bootstrapper::Initialize(bool create_heap_objects) {
@ -130,7 +129,6 @@ void Bootstrapper::TearDown() {
DeleteNativeSources(ExtraNatives::GetSourceCache(isolate_->heap()));
DeleteNativeSources(
ExperimentalExtraNatives::GetSourceCache(isolate_->heap()));
DeleteNativeSources(CodeStubNatives::GetSourceCache(isolate_->heap()));
extensions_cache_.Initialize(isolate_, false); // Yes, symmetrical
}
@ -339,26 +337,6 @@ Handle<Context> Bootstrapper::CreateEnvironment(
}
bool Bootstrapper::CreateCodeStubContext(Isolate* isolate) {
HandleScope scope(isolate);
SaveContext save_context(isolate);
BootstrapperActive active(this);
v8::ExtensionConfiguration no_extensions;
Handle<Context> native_context = CreateEnvironment(
MaybeHandle<JSGlobalProxy>(), v8::Local<v8::ObjectTemplate>(),
&no_extensions, THIN_CONTEXT);
isolate->heap()->SetRootCodeStubContext(*native_context);
isolate->set_context(*native_context);
Handle<JSObject> code_stub_exports =
isolate->factory()->NewJSObject(isolate->object_function());
JSObject::NormalizeProperties(code_stub_exports, CLEAR_INOBJECT_PROPERTIES, 2,
"container to export to extra natives");
isolate->heap()->SetRootCodeStubExportsObject(*code_stub_exports);
return InstallCodeStubNatives(isolate);
}
static void SetObjectPrototype(Handle<JSObject> object, Handle<Object> proto) {
// object.__proto__ = proto;
Handle<Map> old_map = Handle<Map>(object->map());
@ -1554,20 +1532,6 @@ bool Bootstrapper::CompileExperimentalExtraBuiltin(Isolate* isolate,
}
bool Bootstrapper::CompileCodeStubBuiltin(Isolate* isolate, int index) {
HandleScope scope(isolate);
Vector<const char> name = CodeStubNatives::GetScriptName(index);
Handle<String> source_code =
isolate->bootstrapper()->SourceLookup<CodeStubNatives>(index);
Handle<JSObject> global(isolate->global_object());
Handle<JSObject> exports(isolate->heap()->code_stub_exports_object());
Handle<Object> args[] = {global, exports};
bool result =
CompileNative(isolate, name, source_code, arraysize(args), args);
return result;
}
bool Bootstrapper::CompileNative(Isolate* isolate, Vector<const char> name,
Handle<String> source, int argc,
Handle<Object> argv[]) {
@ -2582,16 +2546,6 @@ bool Genesis::InstallDebuggerNatives() {
}
bool Bootstrapper::InstallCodeStubNatives(Isolate* isolate) {
for (int i = CodeStubNatives::GetDebuggerCount();
i < CodeStubNatives::GetBuiltinsCount(); i++) {
if (!CompileCodeStubBuiltin(isolate, i)) return false;
}
return true;
}
static void InstallBuiltinFunctionId(Handle<JSObject> holder,
const char* function_name,
BuiltinFunctionId id) {

View File

@ -82,8 +82,6 @@ class Bootstrapper final {
v8::ExtensionConfiguration* extensions,
ContextType context_type = FULL_CONTEXT);
bool CreateCodeStubContext(Isolate* isolate);
// Detach the environment from its outer global object.
void DetachGlobal(Handle<Context> env);
@ -116,8 +114,6 @@ class Bootstrapper final {
static bool CompileExperimentalBuiltin(Isolate* isolate, int index);
static bool CompileExtraBuiltin(Isolate* isolate, int index);
static bool CompileExperimentalExtraBuiltin(Isolate* isolate, int index);
static bool CompileCodeStubBuiltin(Isolate* isolate, int index);
static bool InstallCodeStubNatives(Isolate* isolate);
static void ExportFromRuntime(Isolate* isolate, Handle<JSObject> container);
static void ExportExperimentalFromRuntime(Isolate* isolate,

View File

@ -1140,20 +1140,6 @@ Handle<Code> StoreTransitionStub::GenerateCode() {
}
template <>
HValue* CodeStubGraphBuilder<StringLengthStub>::BuildCodeStub() {
HValue* string = BuildLoadNamedField(GetParameter(0),
FieldIndex::ForInObjectOffset(JSValue::kValueOffset));
return BuildLoadNamedField(string,
FieldIndex::ForInObjectOffset(String::kLengthOffset));
}
Handle<Code> StringLengthStub::GenerateCode() {
return DoGenerateCode(this);
}
template <>
HValue* CodeStubGraphBuilder<StoreFastElementStub>::BuildCodeStub() {
BuildUncheckedMonomorphicElementAccess(

View File

@ -7,6 +7,7 @@
#include <sstream>
#include "src/bootstrapper.h"
#include "src/compiler/code-stub-assembler.h"
#include "src/factory.h"
#include "src/gdb-jit.h"
#include "src/ic/handler-compiler.h"
@ -341,11 +342,6 @@ void StringAddStub::PrintBaseName(std::ostream& os) const { // NOLINT
}
void StringAddTFStub::PrintBaseName(std::ostream& os) const { // NOLINT
os << "StringAddTFStub_" << flags() << "_" << pretenure_flag();
}
InlineCacheState CompareICStub::GetICState() const {
CompareICState::State state = Max(left(), right());
switch (state) {
@ -473,38 +469,25 @@ void CompareNilICStub::UpdateStatus(Handle<Object> object) {
}
namespace {
Handle<JSFunction> GetFunction(Isolate* isolate, const char* name) {
v8::ExtensionConfiguration no_extensions;
MaybeHandle<Object> fun = Object::GetProperty(
isolate, isolate->factory()->code_stub_exports_object(), name);
Handle<JSFunction> function = Handle<JSFunction>::cast(fun.ToHandleChecked());
DCHECK(!function->IsUndefined() &&
"JavaScript implementation of stub not found");
return function;
}
} // namespace
Handle<Code> TurboFanCodeStub::GenerateCode() {
// Get the outer ("stub generator") function.
const char* name = CodeStub::MajorName(MajorKey());
Handle<JSFunction> outer = GetFunction(isolate(), name);
DCHECK_EQ(2, outer->shared()->length());
Zone zone;
CallInterfaceDescriptor descriptor(GetCallInterfaceDescriptor());
compiler::CodeStubAssembler assembler(isolate(), &zone, descriptor,
GetCodeKind(), name);
GenerateAssembly(&assembler);
return assembler.GenerateCode();
}
// Invoke the outer function to get the stub itself.
Factory* factory = isolate()->factory();
Handle<Object> call_conv = factory->InternalizeUtf8String(name);
Handle<Object> minor_key = factory->NewNumber(MinorKey());
Handle<Object> args[] = {call_conv, minor_key};
MaybeHandle<Object> result =
Execution::Call(isolate(), outer, factory->undefined_value(), 2, args);
Handle<JSFunction> inner = Handle<JSFunction>::cast(result.ToHandleChecked());
// Just to make sure nobody calls this...
inner->set_code(isolate()->builtins()->builtin(Builtins::kIllegal));
return Compiler::GetStubCode(inner, this).ToHandleChecked();
void StringLengthStub::GenerateAssembly(
compiler::CodeStubAssembler* assembler) const {
compiler::Node* value = assembler->Parameter(0);
compiler::Node* string =
assembler->LoadObjectField(value, JSValue::kValueOffset);
compiler::Node* result =
assembler->LoadObjectField(string, String::kLengthOffset);
assembler->Return(result);
}

View File

@ -8,6 +8,7 @@
#include "src/allocation.h"
#include "src/assembler.h"
#include "src/codegen.h"
#include "src/compiler/code-stub-assembler.h"
#include "src/globals.h"
#include "src/ic/ic-state.h"
#include "src/interface-descriptors.h"
@ -96,10 +97,7 @@ namespace internal {
V(KeyedLoadIC) \
V(LoadIC) \
/* TurboFanCodeStubs */ \
V(StringLengthTF) \
V(StringAddTF) \
/* TurboFanICs */ \
V(MathFloor) \
V(StringLength) \
/* IC Handler stubs */ \
V(ArrayBufferViewLoadField) \
V(LoadConstant) \
@ -109,8 +107,7 @@ namespace internal {
V(KeyedStoreSloppyArguments) \
V(StoreField) \
V(StoreGlobal) \
V(StoreTransition) \
V(StringLength)
V(StoreTransition)
// List of code stubs only used on ARM 32 bits platforms.
#if V8_TARGET_ARCH_ARM
@ -349,19 +346,6 @@ class CodeStub BASE_EMBEDDED {
}; \
DEFINE_CODE_STUB(NAME, SUPER)
#define DEFINE_TURBOFAN_IC(NAME, SUPER, DESC) \
public: \
CallInterfaceDescriptor GetCallInterfaceDescriptor() const override { \
if (GetCallMode() == CALL_FROM_OPTIMIZED_CODE) { \
return DESC##CallFromOptimizedCodeDescriptor(isolate()); \
} else { \
return DESC##CallFromUnoptimizedCodeDescriptor(isolate()); \
} \
}; \
\
protected: \
DEFINE_CODE_STUB(NAME, SUPER)
#define DEFINE_HANDLER_CODE_STUB(NAME, SUPER) \
public: \
Handle<Code> GenerateCode() override; \
@ -550,38 +534,14 @@ class TurboFanCodeStub : public CodeStub {
protected:
explicit TurboFanCodeStub(Isolate* isolate) : CodeStub(isolate) {}
virtual void GenerateAssembly(
compiler::CodeStubAssembler* assembler) const = 0;
private:
DEFINE_CODE_STUB_BASE(TurboFanCodeStub, CodeStub);
};
class TurboFanIC : public TurboFanCodeStub {
public:
enum CallMode { CALL_FROM_UNOPTIMIZED_CODE, CALL_FROM_OPTIMIZED_CODE };
protected:
explicit TurboFanIC(Isolate* isolate, CallMode mode)
: TurboFanCodeStub(isolate) {
minor_key_ = CallModeBits::encode(mode);
}
CallMode GetCallMode() const { return CallModeBits::decode(minor_key_); }
void set_sub_minor_key(uint32_t key) {
minor_key_ = SubMinorKeyBits::update(minor_key_, key);
}
uint32_t sub_minor_key() const { return SubMinorKeyBits::decode(minor_key_); }
static const int kSubMinorKeyBits = kStubMinorKeyBits - 1;
private:
class CallModeBits : public BitField<CallMode, 0, 1> {};
class SubMinorKeyBits : public BitField<int, 1, kSubMinorKeyBits> {};
DEFINE_CODE_STUB_BASE(TurboFanIC, TurboFanCodeStub);
};
// Helper interface to prepare to/restore after making runtime calls.
class RuntimeCallHelper {
public:
@ -649,25 +609,18 @@ class NopRuntimeCallHelper : public RuntimeCallHelper {
};
class MathFloorStub : public TurboFanIC {
class StringLengthStub : public TurboFanCodeStub {
public:
explicit MathFloorStub(Isolate* isolate, TurboFanIC::CallMode mode)
: TurboFanIC(isolate, mode) {}
Code::Kind GetCodeKind() const override { return Code::CALL_IC; }
DEFINE_TURBOFAN_IC(MathFloor, TurboFanIC, MathRoundVariant);
};
class StringLengthTFStub : public TurboFanCodeStub {
public:
explicit StringLengthTFStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
explicit StringLengthStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
Code::Kind GetCodeKind() const override { return Code::HANDLER; }
InlineCacheState GetICState() const override { return MONOMORPHIC; }
ExtraICState GetExtraICState() const override { return Code::LOAD_IC; }
void GenerateAssembly(compiler::CodeStubAssembler* assembler) const override;
DEFINE_CALL_INTERFACE_DESCRIPTOR(LoadWithVector);
DEFINE_CODE_STUB(StringLengthTF, TurboFanCodeStub);
DEFINE_CODE_STUB(StringLength, TurboFanCodeStub);
};
@ -690,34 +643,6 @@ enum StringAddFlags {
std::ostream& operator<<(std::ostream& os, const StringAddFlags& flags);
class StringAddTFStub : public TurboFanCodeStub {
public:
StringAddTFStub(Isolate* isolate, StringAddFlags flags,
PretenureFlag pretenure_flag)
: TurboFanCodeStub(isolate) {
minor_key_ = StringAddFlagsBits::encode(flags) |
PretenureFlagBits::encode(pretenure_flag);
}
StringAddFlags flags() const {
return StringAddFlagsBits::decode(MinorKey());
}
PretenureFlag pretenure_flag() const {
return PretenureFlagBits::decode(MinorKey());
}
private:
class StringAddFlagsBits : public BitField<StringAddFlags, 0, 3> {};
class PretenureFlagBits : public BitField<PretenureFlag, 3, 1> {};
void PrintBaseName(std::ostream& os) const override; // NOLINT
DEFINE_CALL_INTERFACE_DESCRIPTOR(StringAdd);
DEFINE_CODE_STUB(StringAddTF, TurboFanCodeStub);
};
class NumberToStringStub final : public HydrogenCodeStub {
public:
explicit NumberToStringStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}
@ -1180,18 +1105,6 @@ class LoadConstantStub : public HandlerStub {
};
class StringLengthStub: public HandlerStub {
public:
explicit StringLengthStub(Isolate* isolate) : HandlerStub(isolate) {}
protected:
Code::Kind kind() const override { return Code::LOAD_IC; }
Code::StubType GetStubType() const override { return Code::FAST; }
DEFINE_HANDLER_CODE_STUB(StringLength, HandlerStub);
};
class StoreFieldStub : public HandlerStub {
public:
StoreFieldStub(Isolate* isolate, FieldIndex index,

View File

@ -551,8 +551,8 @@ Reduction ChangeLowering::Allocate(Node* node) {
AllocationSpace space = OLD_SPACE;
Runtime::FunctionId f = Runtime::kAllocateInTargetSpace;
Operator::Properties props = node->op()->properties();
CallDescriptor* desc =
Linkage::GetRuntimeCallDescriptor(jsgraph()->zone(), f, 2, props);
CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(
jsgraph()->zone(), f, 2, props, CallDescriptor::kNeedsFrameState);
ExternalReference ref(f, jsgraph()->isolate());
int32_t flags = AllocateTargetSpace::encode(space);
node->InsertInput(graph()->zone(), 0, jsgraph()->CEntryStubConstant(1));

View File

@ -0,0 +1,196 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/code-stub-assembler.h"
#include <ostream>
#include "src/code-factory.h"
#include "src/compiler/graph.h"
#include "src/compiler/instruction-selector.h"
#include "src/compiler/linkage.h"
#include "src/compiler/machine-type.h"
#include "src/compiler/pipeline.h"
#include "src/compiler/raw-machine-assembler.h"
#include "src/compiler/schedule.h"
#include "src/frames.h"
#include "src/interface-descriptors.h"
#include "src/interpreter/bytecodes.h"
#include "src/macro-assembler.h"
#include "src/zone.h"
namespace v8 {
namespace internal {
namespace compiler {
CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
const CallInterfaceDescriptor& descriptor,
Code::Kind kind, const char* name)
: raw_assembler_(new RawMachineAssembler(
isolate, new (zone) Graph(zone),
Linkage::GetStubCallDescriptor(isolate, zone, descriptor, 0,
CallDescriptor::kNoFlags))),
end_nodes_(zone),
kind_(kind),
name_(name),
code_generated_(false) {}
CodeStubAssembler::~CodeStubAssembler() {}
Handle<Code> CodeStubAssembler::GenerateCode() {
DCHECK(!code_generated_);
End();
Schedule* schedule = raw_assembler_->Export();
Handle<Code> code = Pipeline::GenerateCodeForCodeStub(
isolate(), raw_assembler_->call_descriptor(), graph(), schedule, kind_,
name_);
code_generated_ = true;
return code;
}
Node* CodeStubAssembler::Int32Constant(int value) {
return raw_assembler_->Int32Constant(value);
}
Node* CodeStubAssembler::IntPtrConstant(intptr_t value) {
return raw_assembler_->IntPtrConstant(value);
}
Node* CodeStubAssembler::NumberConstant(double value) {
return raw_assembler_->NumberConstant(value);
}
Node* CodeStubAssembler::HeapConstant(Handle<HeapObject> object) {
return raw_assembler_->HeapConstant(object);
}
Node* CodeStubAssembler::BooleanConstant(bool value) {
return raw_assembler_->BooleanConstant(value);
}
Node* CodeStubAssembler::Parameter(int value) {
return raw_assembler_->Parameter(value);
}
void CodeStubAssembler::Return(Node* value) {
return raw_assembler_->Return(value);
}
Node* CodeStubAssembler::SmiShiftBitsConstant() {
return Int32Constant(kSmiShiftSize + kSmiTagSize);
}
Node* CodeStubAssembler::SmiTag(Node* value) {
return raw_assembler_->WordShl(value, SmiShiftBitsConstant());
}
Node* CodeStubAssembler::SmiUntag(Node* value) {
return raw_assembler_->WordSar(value, SmiShiftBitsConstant());
}
Node* CodeStubAssembler::IntPtrAdd(Node* a, Node* b) {
return raw_assembler_->IntPtrAdd(a, b);
}
Node* CodeStubAssembler::IntPtrSub(Node* a, Node* b) {
return raw_assembler_->IntPtrSub(a, b);
}
Node* CodeStubAssembler::WordShl(Node* value, int shift) {
return raw_assembler_->WordShl(value, Int32Constant(shift));
}
Node* CodeStubAssembler::LoadObjectField(Node* object, int offset) {
return raw_assembler_->Load(kMachAnyTagged, object,
IntPtrConstant(offset - kHeapObjectTag));
}
Node* CodeStubAssembler::CallN(CallDescriptor* descriptor, Node* code_target,
Node** args) {
return raw_assembler_->CallN(descriptor, code_target, args);
}
Node* CodeStubAssembler::TailCallN(CallDescriptor* descriptor,
Node* code_target, Node** args) {
return raw_assembler_->TailCallN(descriptor, code_target, args);
}
Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id,
Node* context, Node* arg1) {
return raw_assembler_->CallRuntime1(function_id, arg1, context);
}
Node* CodeStubAssembler::CallRuntime(Runtime::FunctionId function_id,
Node* context, Node* arg1, Node* arg2) {
return raw_assembler_->CallRuntime2(function_id, arg1, arg2, context);
}
Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id,
Node* context, Node* arg1) {
return raw_assembler_->TailCallRuntime1(function_id, arg1, context);
}
Node* CodeStubAssembler::TailCallRuntime(Runtime::FunctionId function_id,
Node* context, Node* arg1,
Node* arg2) {
return raw_assembler_->TailCallRuntime2(function_id, arg1, arg2, context);
}
void CodeStubAssembler::AddEndInput(Node* input) {
DCHECK_NOT_NULL(input);
end_nodes_.push_back(input);
}
void CodeStubAssembler::End() {
if (end_nodes_.size() == 0) {
end_nodes_.push_back(graph()->start());
}
int end_count = static_cast<int>(end_nodes_.size());
Node* end = graph()->NewNode(raw_assembler_->common()->End(end_count),
end_count, &end_nodes_[0]);
graph()->SetEnd(end);
}
// RawMachineAssembler delegate helpers:
Isolate* CodeStubAssembler::isolate() { return raw_assembler_->isolate(); }
Graph* CodeStubAssembler::graph() { return raw_assembler_->graph(); }
Zone* CodeStubAssembler::zone() { return raw_assembler_->zone(); }
} // namespace compiler
} // namespace internal
} // namespace v8

View File

@ -0,0 +1,103 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_CODE_STUB_ASSEMBLER_H_
#define V8_COMPILER_CODE_STUB_ASSEMBLER_H_
// Clients of this interface shouldn't depend on lots of compiler internals.
// Do not include anything from src/compiler here!
#include "src/allocation.h"
#include "src/builtins.h"
#include "src/runtime/runtime.h"
#include "src/zone-containers.h"
namespace v8 {
namespace internal {
class CallInterfaceDescriptor;
class Isolate;
class Zone;
namespace compiler {
class CallDescriptor;
class Graph;
class Node;
class Operator;
class RawMachineAssembler;
class Schedule;
class CodeStubAssembler {
public:
CodeStubAssembler(Isolate* isolate, Zone* zone,
const CallInterfaceDescriptor& descriptor, Code::Kind kind,
const char* name);
virtual ~CodeStubAssembler();
Handle<Code> GenerateCode();
// Constants.
Node* Int32Constant(int value);
Node* IntPtrConstant(intptr_t value);
Node* NumberConstant(double value);
Node* HeapConstant(Handle<HeapObject> object);
Node* BooleanConstant(bool value);
Node* Parameter(int value);
void Return(Node* value);
// Tag and untag Smi values.
Node* SmiTag(Node* value);
Node* SmiUntag(Node* value);
// Basic arithmetic operations.
Node* IntPtrAdd(Node* a, Node* b);
Node* IntPtrSub(Node* a, Node* b);
Node* WordShl(Node* value, int shift);
// Load a field from an object on the heap.
Node* LoadObjectField(Node* object, int offset);
// Call runtime function.
Node* CallRuntime(Runtime::FunctionId function_id, Node* context, Node* arg1);
Node* CallRuntime(Runtime::FunctionId function_id, Node* context, Node* arg1,
Node* arg2);
Node* TailCallRuntime(Runtime::FunctionId function_id, Node* context,
Node* arg1);
Node* TailCallRuntime(Runtime::FunctionId function_id, Node* context,
Node* arg1, Node* arg2);
private:
friend class CodeStubAssemblerTester;
// Close the graph.
void End();
Node* CallN(CallDescriptor* descriptor, Node* code_target, Node** args);
Node* TailCallN(CallDescriptor* descriptor, Node* code_target, Node** args);
Node* SmiShiftBitsConstant();
// Adds an end node of the graph.
void AddEndInput(Node* input);
// Private helpers which delegate to RawMachineAssembler.
Graph* graph();
Isolate* isolate();
Zone* zone();
base::SmartPointer<RawMachineAssembler> raw_assembler_;
ZoneVector<Node*> end_nodes_;
Code::Kind kind_;
const char* name_;
bool code_generated_;
DISALLOW_COPY_AND_ASSIGN(CodeStubAssembler);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_CODE_STUB_ASSEMBLER_H_

View File

@ -51,9 +51,9 @@ Handle<Code> InterpreterAssembler::GenerateCode() {
const char* bytecode_name = interpreter::Bytecodes::ToString(bytecode_);
Schedule* schedule = raw_assembler_->Export();
// TODO(rmcilroy): use a non-testing code generator.
Handle<Code> code = Pipeline::GenerateCodeForInterpreter(
Handle<Code> code = Pipeline::GenerateCodeForCodeStub(
isolate(), raw_assembler_->call_descriptor(), graph(), schedule,
bytecode_name);
Code::STUB, bytecode_name);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_trace_ignition_codegen) {

View File

@ -225,8 +225,8 @@ void JSGenericLowering::ReplaceWithRuntimeCall(Node* node,
Operator::Properties properties = node->op()->properties();
const Runtime::Function* fun = Runtime::FunctionForId(f);
int nargs = (nargs_override < 0) ? fun->nargs : nargs_override;
CallDescriptor* desc =
Linkage::GetRuntimeCallDescriptor(zone(), f, nargs, properties);
CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(
zone(), f, nargs, properties, CallDescriptor::kNeedsFrameState);
Node* ref = jsgraph()->ExternalConstant(ExternalReference(f, isolate()));
Node* arity = jsgraph()->Int32Constant(nargs);
node->InsertInput(zone(), 0, jsgraph()->CEntryStubConstant(fun->result_size));
@ -626,7 +626,8 @@ void JSGenericLowering::LowerJSForInPrepare(Node* node) {
Runtime::Function const* function =
Runtime::FunctionForId(Runtime::kGetPropertyNamesFast);
CallDescriptor const* descriptor = Linkage::GetRuntimeCallDescriptor(
zone(), function->function_id, 1, Operator::kNoProperties);
zone(), function->function_id, 1, Operator::kNoProperties,
CallDescriptor::kNeedsFrameState);
Node* cache_type = effect = graph()->NewNode(
common()->Call(descriptor),
jsgraph()->CEntryStubConstant(function->result_size), object,

View File

@ -83,10 +83,6 @@ Reduction JSIntrinsicLowering::Reduce(Node* node) {
return ReduceFixedArrayGet(node);
case Runtime::kInlineFixedArraySet:
return ReduceFixedArraySet(node);
case Runtime::kInlineGetTypeFeedbackVector:
return ReduceGetTypeFeedbackVector(node);
case Runtime::kInlineGetCallerJSFunction:
return ReduceGetCallerJSFunction(node);
case Runtime::kInlineToInteger:
return ReduceToInteger(node);
case Runtime::kInlineToLength:
@ -459,43 +455,6 @@ Reduction JSIntrinsicLowering::ReduceFixedArraySet(Node* node) {
}
Reduction JSIntrinsicLowering::ReduceGetTypeFeedbackVector(Node* node) {
Node* func = node->InputAt(0);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
FieldAccess access = AccessBuilder::ForJSFunctionSharedFunctionInfo();
Node* load =
graph()->NewNode(simplified()->LoadField(access), func, effect, control);
access = AccessBuilder::ForSharedFunctionInfoTypeFeedbackVector();
return Change(node, simplified()->LoadField(access), load, load, control);
}
Reduction JSIntrinsicLowering::ReduceGetCallerJSFunction(Node* node) {
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
Node* const frame_state = NodeProperties::GetFrameStateInput(node, 0);
Node* outer_frame = frame_state->InputAt(kFrameStateOuterStateInput);
if (outer_frame->opcode() == IrOpcode::kFrameState) {
// Use the runtime implementation to throw the appropriate error if the
// containing function is inlined.
return NoChange();
}
// TODO(danno): This implementation forces intrinsic lowering to happen after
// inlining, which is fine for now, but eventually the frame-querying logic
// probably should go later, e.g. in instruction selection, so that there is
// no phase-ordering dependency.
FieldAccess access = AccessBuilder::ForFrameCallerFramePtr();
Node* fp = graph()->NewNode(machine()->LoadFramePointer());
Node* next_fp =
graph()->NewNode(simplified()->LoadField(access), fp, effect, control);
return Change(node, simplified()->LoadField(AccessBuilder::ForFrameMarker()),
next_fp, effect, control);
}
Reduction JSIntrinsicLowering::ReduceThrowNotDateError(Node* node) {
if (mode() != kDeoptimizationEnabled) return NoChange();
Node* const frame_state = NodeProperties::GetFrameStateInput(node, 1);

View File

@ -57,8 +57,6 @@ class JSIntrinsicLowering final : public AdvancedReducer {
Reduction ReduceValueOf(Node* node);
Reduction ReduceFixedArrayGet(Node* node);
Reduction ReduceFixedArraySet(Node* node);
Reduction ReduceGetTypeFeedbackVector(Node* node);
Reduction ReduceGetCallerJSFunction(Node* node);
Reduction ReduceThrowNotDateError(Node* node);
Reduction ReduceToInteger(Node* node);
Reduction ReduceToLength(Node* node);

View File

@ -176,7 +176,6 @@ int Linkage::FrameStateInputCount(Runtime::FunctionId function) {
case Runtime::kInlineArguments:
case Runtime::kInlineArgumentsLength:
case Runtime::kInlineDefaultConstructorCallSuper:
case Runtime::kInlineGetCallerJSFunction:
case Runtime::kInlineGetPrototype:
case Runtime::kInlineRegExpExec:
case Runtime::kInlineSubString:
@ -221,7 +220,7 @@ bool CallDescriptor::UsesOnlyRegisters() const {
CallDescriptor* Linkage::GetRuntimeCallDescriptor(
Zone* zone, Runtime::FunctionId function_id, int js_parameter_count,
Operator::Properties properties, bool needs_frame_state) {
Operator::Properties properties, CallDescriptor::Flags flags) {
const size_t function_count = 1;
const size_t num_args_count = 1;
const size_t context_count = 1;
@ -264,10 +263,10 @@ CallDescriptor* Linkage::GetRuntimeCallDescriptor(
locations.AddParam(regloc(kContextRegister));
types.AddParam(kMachAnyTagged);
CallDescriptor::Flags flags =
needs_frame_state && (Linkage::FrameStateInputCount(function_id) > 0)
? CallDescriptor::kNeedsFrameState
: CallDescriptor::kNoFlags;
if (Linkage::FrameStateInputCount(function_id) == 0) {
flags = static_cast<CallDescriptor::Flags>(
flags & ~CallDescriptor::kNeedsFrameState);
}
// The target for runtime calls is a code object.
MachineType target_type = kMachAnyTagged;

View File

@ -307,7 +307,7 @@ class Linkage : public ZoneObject {
static CallDescriptor* GetRuntimeCallDescriptor(
Zone* zone, Runtime::FunctionId function, int parameter_count,
Operator::Properties properties, bool needs_frame_state = true);
Operator::Properties properties, CallDescriptor::Flags flags);
static CallDescriptor* GetLazyBailoutDescriptor(Zone* zone);

View File

@ -1215,10 +1215,13 @@ Handle<Code> Pipeline::GenerateCode() {
}
Handle<Code> Pipeline::GenerateCodeForInterpreter(
Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
Schedule* schedule, const char* bytecode_name) {
CompilationInfo info(bytecode_name, isolate, graph->zone());
Handle<Code> Pipeline::GenerateCodeForCodeStub(Isolate* isolate,
CallDescriptor* call_descriptor,
Graph* graph, Schedule* schedule,
Code::Kind kind,
const char* code_stub_name) {
CompilationInfo info(code_stub_name, isolate, graph->zone());
info.set_output_code_kind(kind);
// Construct a pipeline for scheduling and code generation.
ZonePool zone_pool;

View File

@ -30,11 +30,13 @@ class Pipeline {
// Run the entire pipeline and generate a handle to a code object.
Handle<Code> GenerateCode();
// Run the pipeline on an interpreter bytecode handler machine graph and
// generate code.
static Handle<Code> GenerateCodeForInterpreter(
Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
Schedule* schedule, const char* bytecode_name);
// Run the pipeline on an code stub machine graph and generate code. A valid
// schedule must be provided.
static Handle<Code> GenerateCodeForCodeStub(Isolate* isolate,
CallDescriptor* call_descriptor,
Graph* graph, Schedule* schedule,
Code::Kind kind,
const char* bytecode_name);
// Run the pipeline on a machine graph and generate code. If {schedule} is
// {nullptr}, then compute a new schedule for code generation.

View File

@ -152,6 +152,52 @@ Node* RawMachineAssembler::CallNWithFrameState(CallDescriptor* desc,
}
Node* RawMachineAssembler::CallRuntime1(Runtime::FunctionId function,
Node* arg1, Node* context) {
CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
zone(), function, 1, Operator::kNoProperties, CallDescriptor::kNoFlags);
Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
Node* ref = AddNode(
common()->ExternalConstant(ExternalReference(function, isolate())));
Node* arity = Int32Constant(1);
return AddNode(common()->Call(descriptor), centry, arg1, ref, arity, context,
graph()->start(), graph()->start());
}
Node* RawMachineAssembler::CallRuntime2(Runtime::FunctionId function,
Node* arg1, Node* arg2, Node* context) {
CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
zone(), function, 2, Operator::kNoProperties, CallDescriptor::kNoFlags);
Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
Node* ref = AddNode(
common()->ExternalConstant(ExternalReference(function, isolate())));
Node* arity = Int32Constant(2);
return AddNode(common()->Call(descriptor), centry, arg1, arg2, ref, arity,
context, graph()->start(), graph()->start());
}
Node* RawMachineAssembler::CallRuntime4(Runtime::FunctionId function,
Node* arg1, Node* arg2, Node* arg3,
Node* arg4, Node* context) {
CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
zone(), function, 4, Operator::kNoProperties, CallDescriptor::kNoFlags);
Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
Node* ref = AddNode(
common()->ExternalConstant(ExternalReference(function, isolate())));
Node* arity = Int32Constant(4);
return AddNode(common()->Call(descriptor), centry, arg1, arg2, arg3, arg4,
ref, arity, context, graph()->start(), graph()->start());
}
Node* RawMachineAssembler::TailCallN(CallDescriptor* desc, Node* function,
Node** args) {
int param_count =
@ -172,49 +218,49 @@ Node* RawMachineAssembler::TailCallN(CallDescriptor* desc, Node* function,
}
Node* RawMachineAssembler::CallRuntime1(Runtime::FunctionId function,
Node* arg1, Node* context) {
CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
zone(), function, 1, Operator::kNoProperties, false);
Node* RawMachineAssembler::TailCallRuntime1(Runtime::FunctionId function,
Node* arg1, Node* context) {
const int kArity = 1;
CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(
zone(), function, kArity, Operator::kNoProperties,
CallDescriptor::kSupportsTailCalls);
Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
Node* ref = AddNode(
common()->ExternalConstant(ExternalReference(function, isolate())));
Node* arity = Int32Constant(1);
Node* arity = Int32Constant(kArity);
return AddNode(common()->Call(descriptor), centry, arg1, ref, arity, context,
graph()->start(), graph()->start());
Node* nodes[] = {centry, arg1, ref, arity, context, graph()->start(),
graph()->start()};
Node* tail_call = MakeNode(common()->TailCall(desc), arraysize(nodes), nodes);
schedule()->AddTailCall(CurrentBlock(), tail_call);
current_block_ = nullptr;
return tail_call;
}
Node* RawMachineAssembler::CallRuntime2(Runtime::FunctionId function,
Node* arg1, Node* arg2, Node* context) {
CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
zone(), function, 2, Operator::kNoProperties, false);
Node* RawMachineAssembler::TailCallRuntime2(Runtime::FunctionId function,
Node* arg1, Node* arg2,
Node* context) {
const int kArity = 2;
CallDescriptor* desc = Linkage::GetRuntimeCallDescriptor(
zone(), function, kArity, Operator::kNoProperties,
CallDescriptor::kSupportsTailCalls);
Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
Node* ref = AddNode(
common()->ExternalConstant(ExternalReference(function, isolate())));
Node* arity = Int32Constant(2);
Node* arity = Int32Constant(kArity);
return AddNode(common()->Call(descriptor), centry, arg1, arg2, ref, arity,
context, graph()->start(), graph()->start());
}
Node* nodes[] = {
centry, arg1, arg2, ref, arity, context, graph()->start(),
graph()->start()};
Node* tail_call = MakeNode(common()->TailCall(desc), arraysize(nodes), nodes);
Node* RawMachineAssembler::CallRuntime4(Runtime::FunctionId function,
Node* arg1, Node* arg2, Node* arg3,
Node* arg4, Node* context) {
CallDescriptor* descriptor = Linkage::GetRuntimeCallDescriptor(
zone(), function, 4, Operator::kNoProperties, false);
Node* centry = HeapConstant(CEntryStub(isolate(), 1).GetCode());
Node* ref = AddNode(
common()->ExternalConstant(ExternalReference(function, isolate())));
Node* arity = Int32Constant(4);
return AddNode(common()->Call(descriptor), centry, arg1, arg2, arg3, arg4,
ref, arity, context, graph()->start(), graph()->start());
schedule()->AddTailCall(CurrentBlock(), tail_call);
current_block_ = nullptr;
return tail_call;
}

View File

@ -543,8 +543,6 @@ class RawMachineAssembler {
// Call a given call descriptor and the given arguments and frame-state.
Node* CallNWithFrameState(CallDescriptor* desc, Node* function, Node** args,
Node* frame_state);
// Tail call the given call descriptor and the given arguments.
Node* TailCallN(CallDescriptor* call_descriptor, Node* function, Node** args);
// Call to a runtime function with one arguments.
Node* CallRuntime1(Runtime::FunctionId function, Node* arg0, Node* context);
// Call to a runtime function with two arguments.
@ -571,6 +569,16 @@ class RawMachineAssembler {
Node* arg1, Node* arg2, Node* arg3, Node* arg4,
Node* arg5, Node* arg6, Node* arg7);
// Tail call the given call descriptor and the given arguments.
Node* TailCallN(CallDescriptor* call_descriptor, Node* function, Node** args);
// Tail call to a runtime function with one argument.
Node* TailCallRuntime1(Runtime::FunctionId function, Node* arg0,
Node* context);
// Tail call to a runtime function with two arguments.
Node* TailCallRuntime2(Runtime::FunctionId function, Node* arg1, Node* arg2,
Node* context);
// ===========================================================================
// The following utility methods deal with control flow, hence might switch
// the current basic block or create new basic blocks for labels.

View File

@ -2717,9 +2717,6 @@ void Heap::CreateInitialObjects() {
set_experimental_extra_natives_source_cache(
*factory->NewFixedArray(ExperimentalExtraNatives::GetBuiltinsCount()));
set_code_stub_natives_source_cache(
*factory->NewFixedArray(CodeStubNatives::GetBuiltinsCount()));
set_undefined_cell(*factory->NewCell(factory->undefined_value()));
// The symbol registry is initialized lazily.
@ -5081,6 +5078,7 @@ bool Heap::CreateHeapObjects() {
set_native_contexts_list(undefined_value());
set_allocation_sites_list(undefined_value());
return true;
}

View File

@ -166,7 +166,6 @@ namespace internal {
V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache) \
V(FixedArray, experimental_extra_natives_source_cache, \
ExperimentalExtraNativesSourceCache) \
V(FixedArray, code_stub_natives_source_cache, CodeStubNativesSourceCache) \
V(Script, empty_script, EmptyScript) \
V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
V(Cell, undefined_cell, UndefinedCell) \
@ -186,8 +185,6 @@ namespace internal {
V(PropertyCell, array_protector, ArrayProtector) \
V(PropertyCell, empty_property_cell, EmptyPropertyCell) \
V(Object, weak_stack_trace_list, WeakStackTraceList) \
V(Object, code_stub_context, CodeStubContext) \
V(JSObject, code_stub_exports_object, CodeStubExportsObject) \
V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos) \
V(FixedArray, interpreter_table, InterpreterTable) \
V(Map, bytecode_array_map, BytecodeArrayMap) \
@ -1153,14 +1150,6 @@ class Heap {
roots_[kMaterializedObjectsRootIndex] = objects;
}
void SetRootCodeStubContext(Object* value) {
roots_[kCodeStubContextRootIndex] = value;
}
void SetRootCodeStubExportsObject(JSObject* value) {
roots_[kCodeStubExportsObjectRootIndex] = value;
}
void SetRootScriptList(Object* value) {
roots_[kScriptListRootIndex] = value;
}

View File

@ -397,27 +397,6 @@ void ApiAccessorDescriptor::InitializePlatformSpecific(
}
void MathRoundVariantCallFromUnoptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
edi, // math rounding function
edx, // vector slot id
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void MathRoundVariantCallFromOptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
edi, // math rounding function
edx, // vector slot id
ebx // type vector
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {

View File

@ -81,6 +81,12 @@ void AllocateMutableHeapNumberDescriptor::InitializePlatformSpecific(
}
void VoidDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
data->InitializePlatformSpecific(0, nullptr);
}
Type::FunctionType* LoadDescriptor::BuildCallInterfaceDescriptorFunctionType(
Isolate* isolate, int paramater_count) {
Zone* zone = isolate->interface_descriptor_zone();
@ -92,6 +98,7 @@ Type::FunctionType* LoadDescriptor::BuildCallInterfaceDescriptorFunctionType(
return function;
}
void LoadDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {ReceiverRegister(), NameRegister(), SlotRegister()};
@ -546,32 +553,5 @@ ApiAccessorDescriptor::BuildCallInterfaceDescriptorFunctionType(
}
Type::FunctionType* MathRoundVariantCallFromUnoptimizedCodeDescriptor::
BuildCallInterfaceDescriptorFunctionType(Isolate* isolate,
int paramater_count) {
Zone* zone = isolate->interface_descriptor_zone();
Type::FunctionType* function =
Type::FunctionType::New(AnyTagged(zone), Type::Undefined(), 4, zone);
function->InitParameter(0, Type::Receiver());
function->InitParameter(1, SmiType(zone));
function->InitParameter(2, AnyTagged(zone));
function->InitParameter(3, AnyTagged(zone));
return function;
}
Type::FunctionType* MathRoundVariantCallFromOptimizedCodeDescriptor::
BuildCallInterfaceDescriptorFunctionType(Isolate* isolate,
int paramater_count) {
Zone* zone = isolate->interface_descriptor_zone();
Type::FunctionType* function =
Type::FunctionType::New(AnyTagged(zone), Type::Undefined(), 5, zone);
function->InitParameter(0, Type::Receiver());
function->InitParameter(1, SmiType(zone));
function->InitParameter(2, AnyTagged(zone));
function->InitParameter(3, AnyTagged(zone));
function->InitParameter(4, AnyTagged(zone));
return function;
}
} // namespace internal
} // namespace v8

View File

@ -14,6 +14,7 @@ namespace internal {
class PlatformInterfaceDescriptor;
#define INTERFACE_DESCRIPTOR_LIST(V) \
V(Void) \
V(Load) \
V(Store) \
V(StoreTransition) \
@ -73,8 +74,6 @@ class PlatformInterfaceDescriptor;
V(MathPowInteger) \
V(ContextOnly) \
V(GrowArrayElements) \
V(MathRoundVariantCallFromUnoptimizedCode) \
V(MathRoundVariantCallFromOptimizedCode) \
V(InterpreterPushArgsAndCall) \
V(InterpreterPushArgsAndConstruct) \
V(InterpreterCEntry)
@ -233,6 +232,14 @@ class CallInterfaceDescriptor {
Isolate* isolate, int register_param_count) override; \
\
public:
class VoidDescriptor : public CallInterfaceDescriptor {
public:
DECLARE_DESCRIPTOR(VoidDescriptor, CallInterfaceDescriptor)
};
// LoadDescriptor is used by all stubs that implement Load/KeyedLoad ICs.
class LoadDescriptor : public CallInterfaceDescriptor {
public:
@ -719,23 +726,6 @@ class MathPowIntegerDescriptor : public CallInterfaceDescriptor {
};
class MathRoundVariantCallFromOptimizedCodeDescriptor
: public CallInterfaceDescriptor {
public:
DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(
MathRoundVariantCallFromOptimizedCodeDescriptor, CallInterfaceDescriptor)
};
class MathRoundVariantCallFromUnoptimizedCodeDescriptor
: public CallInterfaceDescriptor {
public:
DECLARE_DESCRIPTOR_WITH_CUSTOM_FUNCTION_TYPE(
MathRoundVariantCallFromUnoptimizedCodeDescriptor,
CallInterfaceDescriptor)
};
class ContextOnlyDescriptor : public CallInterfaceDescriptor {
public:
DECLARE_DESCRIPTOR(ContextOnlyDescriptor, CallInterfaceDescriptor)

View File

@ -2202,12 +2202,6 @@ bool Isolate::Init(Deserializer* des) {
// occur, clearing/updating ICs.
runtime_profiler_ = new RuntimeProfiler(this);
if (create_heap_objects) {
if (!bootstrapper_->CreateCodeStubContext(this)) {
return false;
}
}
// If we are deserializing, read the state into the now-empty heap.
if (!create_heap_objects) {
des->Deserialize(this);

View File

@ -1,69 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
(function(global, code_stubs) {
"use strict";
code_stubs.StringLengthTFStub = function StringLengthTFStub(call_conv, minor_key) {
var stub = function(receiver, name, i, v) {
// i and v are dummy parameters mandated by the InterfaceDescriptor,
// (LoadWithVectorDescriptor).
return %_StringGetLength(%_JSValueGetValue(receiver));
}
return stub;
}
code_stubs.StringAddTFStub = function StringAddTFStub(call_conv, minor_key) {
var stub = function(left, right) {
return %StringAdd(left, right);
}
return stub;
}
const kTurboFanICCallModeMask = 1;
const kTurboFanICCallForUnptimizedCode = 0;
const kTurboFanICCallForOptimizedCode = 1;
code_stubs.MathFloorStub = function MathFloorStub(call_conv, minor_key) {
var call_from_optimized_ic = function(f, i, tv, receiver, v) {
"use strict";
// |f| is this function's JSFunction
// |i| is TypeFeedbackVector slot # of callee's CallIC for Math.floor call
// |receiver| is receiver, should not be used
// |tv| is the calling function's type vector
// |v| is the value to floor
if (f !== %_FixedArrayGet(tv, i|0)) {
return %_Call(f, receiver, v);
}
var r = %_MathFloor(+v);
if (%_IsMinusZero(r)) {
// Collect type feedback when the result of the floor is -0. This is
// accomplished by storing a sentinel in the second, "extra"
// TypeFeedbackVector slot corresponding to the Math.floor CallIC call in
// the caller's TypeVector.
%_FixedArraySet(tv, ((i|0)+1)|0, 1);
return -0;
}
// Return integers in smi range as smis.
var trunc = r|0;
if (trunc === r) {
return trunc;
}
return r;
}
var call_mode = (minor_key & kTurboFanICCallModeMask);
if (call_mode == kTurboFanICCallForOptimizedCode) {
return call_from_optimized_ic;
} else {
%SetForceInlineFlag(call_from_optimized_ic);
var call_from_unoptimized_ic = function(f, i, receiver, v) {
var tv = %_GetTypeFeedbackVector(%_GetCallerJSFunction());
return call_from_optimized_ic(f, i, tv, receiver, v);
}
return call_from_unoptimized_ic;
}
}
})

View File

@ -391,27 +391,6 @@ void ApiAccessorDescriptor::InitializePlatformSpecific(
}
void MathRoundVariantCallFromUnoptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
a1, // math rounding function
a3, // vector slot id
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void MathRoundVariantCallFromOptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
a1, // math rounding function
a3, // vector slot id
a2, // type vector
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {

View File

@ -391,27 +391,6 @@ void ApiAccessorDescriptor::InitializePlatformSpecific(
}
void MathRoundVariantCallFromUnoptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
a1, // math rounding function
a3, // vector slot id
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void MathRoundVariantCallFromOptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
a1, // math rounding function
a3, // vector slot id
a2, // type vector
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {

View File

@ -386,30 +386,6 @@ RUNTIME_FUNCTION(Runtime_HarmonyToString) {
}
RUNTIME_FUNCTION(Runtime_GetTypeFeedbackVector) {
SealHandleScope shs(isolate);
DCHECK(args.length() == 1);
CONVERT_ARG_CHECKED(JSFunction, function, 0);
return function->shared()->feedback_vector();
}
RUNTIME_FUNCTION(Runtime_GetCallerJSFunction) {
SealHandleScope shs(isolate);
StackFrameIterator it(isolate);
RUNTIME_ASSERT(it.frame()->type() == StackFrame::STUB);
it.Advance();
RUNTIME_ASSERT(it.frame()->type() == StackFrame::JAVA_SCRIPT);
return JavaScriptFrame::cast(it.frame())->function();
}
RUNTIME_FUNCTION(Runtime_GetCodeStubExportsObject) {
HandleScope shs(isolate);
return isolate->heap()->code_stub_exports_object();
}
namespace {
Handle<String> RenderCallSite(Isolate* isolate, Handle<Object> object) {

View File

@ -378,8 +378,7 @@ RUNTIME_FUNCTION(Runtime_AbortJS) {
RUNTIME_FUNCTION(Runtime_NativeScriptsCount) {
DCHECK(args.length() == 0);
return Smi::FromInt(Natives::GetBuiltinsCount() +
ExtraNatives::GetBuiltinsCount() +
CodeStubNatives::GetBuiltinsCount());
ExtraNatives::GetBuiltinsCount());
}

View File

@ -350,9 +350,6 @@ namespace internal {
F(IS_VAR, 1, 1) \
F(IncrementStatsCounter, 1, 1) \
F(HarmonyToString, 0, 1) \
F(GetTypeFeedbackVector, 1, 1) \
F(GetCallerJSFunction, 0, 1) \
F(GetCodeStubExportsObject, 0, 1) \
F(ThrowConstructedNonConstructable, 1, 1) \
F(ThrowCalledNonCallable, 1, 1)

View File

@ -35,12 +35,6 @@ FixedArray* NativesCollection<EXPERIMENTAL_EXTRAS>::GetSourceCache(Heap* heap) {
}
template <>
FixedArray* NativesCollection<CODE_STUB>::GetSourceCache(Heap* heap) {
return heap->code_stub_natives_source_cache();
}
template <NativeType type>
void NativesCollection<type>::UpdateSourceCache(Heap* heap) {
for (int i = 0; i < GetBuiltinsCount(); i++) {
@ -54,7 +48,6 @@ void NativesCollection<type>::UpdateSourceCache(Heap* heap) {
// Explicit template instantiations.
template void NativesCollection<CORE>::UpdateSourceCache(Heap* heap);
template void NativesCollection<CODE_STUB>::UpdateSourceCache(Heap* heap);
template void NativesCollection<EXPERIMENTAL>::UpdateSourceCache(Heap* heap);
template void NativesCollection<EXTRAS>::UpdateSourceCache(Heap* heap);
template void NativesCollection<EXPERIMENTAL_EXTRAS>::UpdateSourceCache(

View File

@ -157,7 +157,6 @@ void ReadNatives() {
if (natives_blob_ && NativesHolder<CORE>::empty()) {
SnapshotByteSource bytes(natives_blob_->data, natives_blob_->raw_size);
NativesHolder<CORE>::set(NativesStore::MakeFromScriptsSource(&bytes));
NativesHolder<CODE_STUB>::set(NativesStore::MakeFromScriptsSource(&bytes));
NativesHolder<EXPERIMENTAL>::set(
NativesStore::MakeFromScriptsSource(&bytes));
NativesHolder<EXTRAS>::set(NativesStore::MakeFromScriptsSource(&bytes));
@ -188,7 +187,6 @@ void SetNativesFromFile(StartupData* natives_blob) {
*/
void DisposeNatives() {
NativesHolder<CORE>::Dispose();
NativesHolder<CODE_STUB>::Dispose();
NativesHolder<EXPERIMENTAL>::Dispose();
NativesHolder<EXTRAS>::Dispose();
NativesHolder<EXPERIMENTAL_EXTRAS>::Dispose();
@ -241,7 +239,6 @@ Vector<const char> NativesCollection<type>::GetScriptsSource() {
template Vector<const char> NativesCollection<T>::GetScriptName(int i); \
template Vector<const char> NativesCollection<T>::GetScriptsSource();
INSTANTIATE_TEMPLATES(CORE)
INSTANTIATE_TEMPLATES(CODE_STUB)
INSTANTIATE_TEMPLATES(EXPERIMENTAL)
INSTANTIATE_TEMPLATES(EXTRAS)
INSTANTIATE_TEMPLATES(EXPERIMENTAL_EXTRAS)

View File

@ -15,7 +15,6 @@ namespace internal {
enum NativeType {
CORE,
CODE_STUB,
EXPERIMENTAL,
EXTRAS,
EXPERIMENTAL_EXTRAS,
@ -49,7 +48,6 @@ class NativesCollection {
};
typedef NativesCollection<CORE> Natives;
typedef NativesCollection<CODE_STUB> CodeStubNatives;
typedef NativesCollection<EXPERIMENTAL> ExperimentalNatives;
typedef NativesCollection<EXTRAS> ExtraNatives;
typedef NativesCollection<EXPERIMENTAL_EXTRAS> ExperimentalExtraNatives;

View File

@ -540,8 +540,7 @@ void Deserializer::Deserialize(Isolate* isolate) {
}
isolate_->heap()->set_native_contexts_list(
isolate_->heap()->code_stub_context());
isolate_->heap()->undefined_value());
// The allocation site list is build during root iteration, but if no sites
// were encountered then it needs to be initialized to undefined.
if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
@ -552,7 +551,6 @@ void Deserializer::Deserialize(Isolate* isolate) {
// Update data pointers to the external strings containing natives sources.
Natives::UpdateSourceCache(isolate_->heap());
ExtraNatives::UpdateSourceCache(isolate_->heap());
CodeStubNatives::UpdateSourceCache(isolate_->heap());
// Issue code events for newly deserialized code objects.
LOG_CODE_EVENT(isolate_, LogCodeObjects());
@ -1168,11 +1166,6 @@ bool Deserializer::ReadData(Object** current, Object** limit, int source_space,
ExtraNatives::GetScriptSource(source_.Get()), current);
break;
case kCodeStubNativesStringResource:
current = CopyInNativesSource(
CodeStubNatives::GetScriptSource(source_.Get()), current);
break;
// Deserialize raw data of variable length.
case kVariableRawData: {
int size_in_bytes = source_.GetInt();
@ -1679,10 +1672,7 @@ StartupSerializer::StartupSerializer(Isolate* isolate, SnapshotByteSink* sink)
void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) {
// Make sure that all functions are derived from the code-stub context
DCHECK(!obj->IsJSFunction() ||
JSFunction::cast(obj)->GetCreationContext() ==
isolate()->heap()->code_stub_context());
DCHECK(!obj->IsJSFunction());
int root_index = root_index_map_.Lookup(obj);
// We can only encode roots as such if it has already been serialized.
@ -2207,12 +2197,6 @@ void Serializer::ObjectSerializer::VisitExternalOneByteString(
kExtraNativesStringResource)) {
return;
}
if (SerializeExternalNativeSourceString(
CodeStubNatives::GetBuiltinsCount(), resource_pointer,
CodeStubNatives::GetSourceCache(serializer_->isolate()->heap()),
kCodeStubNativesStringResource)) {
return;
}
// One of the strings in the natives cache should match the resource. We
// don't expect any other kinds of external strings here.
UNREACHABLE();

View File

@ -221,10 +221,7 @@ class SerializerDeserializer: public ObjectVisitor {
static const int kNativesStringResource = 0x5d;
// Used for the source code for compiled stubs, which is in the executable,
// but is referred to from external strings in the snapshot.
static const int kCodeStubNativesStringResource = 0x5e;
// Used for the source code for V8 extras, which is in the executable,
// but is referred to from external strings in the snapshot.
static const int kExtraNativesStringResource = 0x5f;
static const int kExtraNativesStringResource = 0x5e;
// A tag emitted at strategic points in the snapshot to delineate sections.
// If the deserializer does not find these at the expected moments then it
// is an indication that the snapshot and the VM do not fit together.

View File

@ -391,27 +391,6 @@ void ApiAccessorDescriptor::InitializePlatformSpecific(
}
void MathRoundVariantCallFromUnoptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
rdi, // math rounding function
rdx, // vector slot id
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void MathRoundVariantCallFromOptimizedCodeDescriptor::
InitializePlatformSpecific(CallInterfaceDescriptorData* data) {
Register registers[] = {
rdi, // math rounding function
rdx, // vector slot id
rbx // type vector
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndCallDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {

View File

@ -52,6 +52,7 @@
'compiler/test-basic-block-profiler.cc',
'compiler/test-branch-combine.cc',
'compiler/test-changes-lowering.cc',
'compiler/test-code-stub-assembler.cc',
'compiler/test-gap-resolver.cc',
'compiler/test-graph-visualizer.cc',
'compiler/test-instruction.cc',

View File

@ -35,18 +35,30 @@ class FunctionTester : public InitializedHandleScope {
CHECK_EQ(0u, flags_ & ~supported_flags);
}
// TODO(turbofan): generalize FunctionTester to work with N arguments. Now, it
// can handle up to four.
explicit FunctionTester(Graph* graph)
FunctionTester(Graph* graph, int param_count)
: isolate(main_isolate()),
function(NewFunction("(function(a,b,c,d){})")),
function(NewFunction(BuildFunction(param_count).c_str())),
flags_(0) {
CompileGraph(graph);
}
FunctionTester(const CallInterfaceDescriptor& descriptor, Handle<Code> code)
: isolate(main_isolate()),
function(
(FLAG_allow_natives_syntax = true,
NewFunction(BuildFunctionFromDescriptor(descriptor).c_str()))),
flags_(0) {
Compile(function);
function->ReplaceCode(*code);
}
Isolate* isolate;
Handle<JSFunction> function;
MaybeHandle<Object> Call() {
return Execution::Call(isolate, function, undefined(), 0, nullptr);
}
MaybeHandle<Object> Call(Handle<Object> a, Handle<Object> b) {
Handle<Object> args[] = {a, b};
return Execution::Call(isolate, function, undefined(), 2, args);
@ -180,10 +192,10 @@ class FunctionTester : public InitializedHandleScope {
return function;
}
static Handle<JSFunction> ForMachineGraph(Graph* graph) {
static Handle<JSFunction> ForMachineGraph(Graph* graph, int param_count) {
JSFunction* p = NULL;
{ // because of the implicit handle scope of FunctionTester.
FunctionTester f(graph);
FunctionTester f(graph, param_count);
p = *f.function;
}
return Handle<JSFunction>(p); // allocated in outer handle scope.
@ -192,6 +204,25 @@ class FunctionTester : public InitializedHandleScope {
private:
uint32_t flags_;
std::string BuildFunction(int param_count) {
std::string function_string = "(function(";
if (param_count > 0) {
char next = 'a';
function_string += next;
while (param_count-- > 0) {
function_string += ',';
function_string += ++next;
}
}
function_string += "){})";
return function_string;
}
std::string BuildFunctionFromDescriptor(
const CallInterfaceDescriptor& descriptor) {
return BuildFunction(descriptor.GetParameterCount());
}
// Compile the given machine graph instead of the source of the function
// and replace the JSFunction's code with the result.
Handle<JSFunction> CompileGraph(Graph* graph) {

View File

@ -0,0 +1,125 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/interface-descriptors.h"
#include "src/isolate.h"
#include "test/cctest/compiler/function-tester.h"
namespace v8 {
namespace internal {
namespace compiler {
class CodeStubAssemblerTester : public CodeStubAssembler {
public:
CodeStubAssemblerTester(Isolate* isolate,
const CallInterfaceDescriptor& descriptor)
: CodeStubAssembler(isolate, isolate->runtime_zone(), descriptor,
Code::STUB, "test"),
scope_(isolate) {}
private:
HandleScope scope_;
LocalContext context_;
};
TEST(SimpleSmiReturn) {
Isolate* isolate(CcTest::InitIsolateOnce());
VoidDescriptor descriptor(isolate);
CodeStubAssemblerTester m(isolate, descriptor);
m.Return(m.SmiTag(m.Int32Constant(37)));
Handle<Code> code = m.GenerateCode();
FunctionTester ft(descriptor, code);
MaybeHandle<Object> result = ft.Call();
CHECK_EQ(37, Handle<Smi>::cast(result.ToHandleChecked())->value());
}
TEST(SimpleIntPtrReturn) {
Isolate* isolate(CcTest::InitIsolateOnce());
VoidDescriptor descriptor(isolate);
CodeStubAssemblerTester m(isolate, descriptor);
int test;
m.Return(m.IntPtrConstant(reinterpret_cast<intptr_t>(&test)));
Handle<Code> code = m.GenerateCode();
FunctionTester ft(descriptor, code);
MaybeHandle<Object> result = ft.Call();
CHECK_EQ(reinterpret_cast<intptr_t>(&test),
reinterpret_cast<intptr_t>(*result.ToHandleChecked()));
}
TEST(SimpleDoubleReturn) {
Isolate* isolate(CcTest::InitIsolateOnce());
VoidDescriptor descriptor(isolate);
CodeStubAssemblerTester m(isolate, descriptor);
m.Return(m.NumberConstant(0.5));
Handle<Code> code = m.GenerateCode();
FunctionTester ft(descriptor, code);
MaybeHandle<Object> result = ft.Call();
CHECK_EQ(0.5, Handle<HeapNumber>::cast(result.ToHandleChecked())->value());
}
TEST(SimpleCallRuntime1Arg) {
Isolate* isolate(CcTest::InitIsolateOnce());
VoidDescriptor descriptor(isolate);
CodeStubAssemblerTester m(isolate, descriptor);
Node* context = m.HeapConstant(Handle<Context>(isolate->native_context()));
Node* b = m.SmiTag(m.Int32Constant(256));
m.Return(m.CallRuntime(Runtime::kMathSqrt, context, b));
Handle<Code> code = m.GenerateCode();
FunctionTester ft(descriptor, code);
MaybeHandle<Object> result = ft.Call();
CHECK_EQ(16, Handle<Smi>::cast(result.ToHandleChecked())->value());
}
TEST(SimpleTailCallRuntime1Arg) {
Isolate* isolate(CcTest::InitIsolateOnce());
VoidDescriptor descriptor(isolate);
CodeStubAssemblerTester m(isolate, descriptor);
Node* context = m.HeapConstant(Handle<Context>(isolate->native_context()));
Node* b = m.SmiTag(m.Int32Constant(256));
m.TailCallRuntime(Runtime::kMathSqrt, context, b);
Handle<Code> code = m.GenerateCode();
FunctionTester ft(descriptor, code);
MaybeHandle<Object> result = ft.Call();
CHECK_EQ(16, Handle<Smi>::cast(result.ToHandleChecked())->value());
}
TEST(SimpleCallRuntime2Arg) {
Isolate* isolate(CcTest::InitIsolateOnce());
VoidDescriptor descriptor(isolate);
CodeStubAssemblerTester m(isolate, descriptor);
Node* context = m.HeapConstant(Handle<Context>(isolate->native_context()));
Node* a = m.SmiTag(m.Int32Constant(2));
Node* b = m.SmiTag(m.Int32Constant(4));
m.Return(m.CallRuntime(Runtime::kMathPow, context, a, b));
Handle<Code> code = m.GenerateCode();
FunctionTester ft(descriptor, code);
MaybeHandle<Object> result = ft.Call();
CHECK_EQ(16, Handle<Smi>::cast(result.ToHandleChecked())->value());
}
TEST(SimpleTailCallRuntime2Arg) {
Isolate* isolate(CcTest::InitIsolateOnce());
VoidDescriptor descriptor(isolate);
CodeStubAssemblerTester m(isolate, descriptor);
Node* context = m.HeapConstant(Handle<Context>(isolate->native_context()));
Node* a = m.SmiTag(m.Int32Constant(2));
Node* b = m.SmiTag(m.Int32Constant(4));
m.TailCallRuntime(Runtime::kMathPow, context, a, b);
Handle<Code> code = m.GenerateCode();
FunctionTester ft(descriptor, code);
MaybeHandle<Object> result = ft.Call();
CHECK_EQ(16, Handle<Smi>::cast(result.ToHandleChecked())->value());
}
} // namespace compiler
} // namespace internal
} // namespace v8

View File

@ -22,55 +22,13 @@ namespace internal {
namespace compiler {
TEST(RunOptimizedMathFloorStub) {
HandleAndZoneScope scope;
Isolate* isolate = scope.main_isolate();
// Create code and an accompanying descriptor.
MathFloorStub stub(isolate, TurboFanIC::CALL_FROM_OPTIMIZED_CODE);
Handle<Code> code = stub.GenerateCode();
Zone* zone = scope.main_zone();
CompilationInfo info(&stub, isolate, zone);
CallDescriptor* descriptor = Linkage::ComputeIncoming(zone, &info);
Handle<FixedArray> tv = isolate->factory()->NewFixedArray(10);
// Create a function to call the code using the descriptor.
Graph graph(zone);
CommonOperatorBuilder common(zone);
JSOperatorBuilder javascript(zone);
MachineOperatorBuilder machine(zone);
JSGraph js(isolate, &graph, &common, &javascript, nullptr, &machine);
// FunctionTester (ab)uses a 2-argument function
Node* start = graph.NewNode(common.Start(4));
// Parameter 0 is the number to round
Node* numberParam = graph.NewNode(common.Parameter(1), start);
Node* theCode = graph.NewNode(common.HeapConstant(code));
Node* vector = graph.NewNode(common.HeapConstant(tv));
Node* dummyContext = graph.NewNode(common.NumberConstant(0.0));
Node* call =
graph.NewNode(common.Call(descriptor), theCode, js.UndefinedConstant(),
js.OneConstant(), vector, js.UndefinedConstant(),
numberParam, dummyContext, start, start);
Node* ret = graph.NewNode(common.Return(), call, call, start);
Node* end = graph.NewNode(common.End(1), ret);
graph.SetStart(start);
graph.SetEnd(end);
FunctionTester ft(&graph);
Handle<Object> value = ft.Val(1.5);
Handle<Object> result = ft.Call(value, value).ToHandleChecked();
CHECK_EQ(1, Smi::cast(*result)->value());
}
TEST(RunStringLengthTFStub) {
TEST(RunStringLengthStub) {
HandleAndZoneScope scope;
Isolate* isolate = scope.main_isolate();
Zone* zone = scope.main_zone();
// Create code and an accompanying descriptor.
StringLengthTFStub stub(isolate);
StringLengthStub stub(isolate);
Handle<Code> code = stub.GenerateCode();
CompilationInfo info(&stub, isolate, zone);
CallDescriptor* descriptor = Linkage::ComputeIncoming(zone, &info);
@ -94,7 +52,7 @@ TEST(RunStringLengthTFStub) {
Node* end = graph.NewNode(common.End(1), ret);
graph.SetStart(start);
graph.SetEnd(end);
FunctionTester ft(&graph);
FunctionTester ft(&graph, 4);
// Actuall call through to the stub, verifying its result.
const char* testString = "Und das Lamm schrie HURZ!";
@ -109,42 +67,6 @@ TEST(RunStringLengthTFStub) {
}
TEST(RunStringAddTFStub) {
HandleAndZoneScope scope;
Isolate* isolate = scope.main_isolate();
Zone* zone = scope.main_zone();
// Create code and an accompanying descriptor.
StringAddTFStub stub(isolate, STRING_ADD_CHECK_BOTH, NOT_TENURED);
Handle<Code> code = stub.GenerateCode();
CompilationInfo info(&stub, isolate, zone);
CallDescriptor* descriptor = Linkage::ComputeIncoming(zone, &info);
// Create a function to call the code using the descriptor.
Graph graph(zone);
CommonOperatorBuilder common(zone);
// FunctionTester (ab)uses a 2-argument function
Node* start = graph.NewNode(common.Start(4));
// Parameter 0 is the receiver
Node* leftParam = graph.NewNode(common.Parameter(1), start);
Node* rightParam = graph.NewNode(common.Parameter(2), start);
Node* theCode = graph.NewNode(common.HeapConstant(code));
Node* dummyContext = graph.NewNode(common.NumberConstant(0.0));
Node* call = graph.NewNode(common.Call(descriptor), theCode, leftParam,
rightParam, dummyContext, start, start);
Node* ret = graph.NewNode(common.Return(), call, call, start);
Node* end = graph.NewNode(common.End(1), ret);
graph.SetStart(start);
graph.SetEnd(end);
FunctionTester ft(&graph);
// Actuall call through to the stub, verifying its result.
Handle<String> leftArg = ft.Val("links");
Handle<String> rightArg = ft.Val("rechts");
Handle<Object> result = ft.Call(leftArg, rightArg).ToHandleChecked();
CHECK(String::Equals(ft.Val("linksrechts"), Handle<String>::cast(result)));
}
} // namespace compiler
} // namespace internal
} // namespace v8

View File

@ -83,7 +83,7 @@ class SimplifiedLoweringTester : public GraphBuilderTester<ReturnType> {
T* CallWithPotentialGC() {
// TODO(titzer): we wrap the code in a JSFunction here to reuse the
// JSEntryStub; that could be done with a special prologue or other stub.
Handle<JSFunction> fun = FunctionTester::ForMachineGraph(this->graph());
Handle<JSFunction> fun = FunctionTester::ForMachineGraph(this->graph(), 0);
Handle<Object>* args = NULL;
MaybeHandle<Object> result = Execution::Call(
this->isolate(), fun, factory()->undefined_value(), 0, args);

View File

@ -13218,8 +13218,7 @@ static int GetGlobalObjectsCount() {
count++;
}
}
// Subtract one to compensate for the code stub context that is always present
return count - 1;
return count;
}

View File

@ -96,11 +96,10 @@ class NamedEntriesDetector {
static const v8::HeapGraphNode* GetGlobalObject(
const v8::HeapSnapshot* snapshot) {
CHECK_EQ(3, snapshot->GetRoot()->GetChildrenCount());
// The 0th-child is (GC Roots), 1st is code stubs context, 2nd is the user
// root.
CHECK_EQ(2, snapshot->GetRoot()->GetChildrenCount());
// The 0th-child is (GC Roots), 1st is the user root.
const v8::HeapGraphNode* global_obj =
snapshot->GetRoot()->GetChild(2)->GetToNode();
snapshot->GetRoot()->GetChild(1)->GetToNode();
CHECK_EQ(0, strncmp("Object", const_cast<i::HeapEntry*>(
reinterpret_cast<const i::HeapEntry*>(global_obj))->name(), 6));
return global_obj;
@ -993,7 +992,7 @@ TEST(HeapSnapshotJSONSerialization) {
v8::Local<v8::String> ref_string =
CompileRun(STRING_LITERAL_FOR_TEST)->ToString(isolate);
#undef STRING_LITERAL_FOR_TEST
CHECK_LT(0, strcmp(*v8::String::Utf8Value(ref_string),
CHECK_EQ(0, strcmp(*v8::String::Utf8Value(ref_string),
*v8::String::Utf8Value(string)));
}
@ -2121,7 +2120,6 @@ TEST(NoDebugObjectInSnapshot) {
CHECK(ValidateSnapshot(snapshot));
const v8::HeapGraphNode* root = snapshot->GetRoot();
int globals_count = 0;
bool found = false;
for (int i = 0; i < root->GetChildrenCount(); ++i) {
const v8::HeapGraphEdge* edge = root->GetChild(i);
if (edge->GetType() == v8::HeapGraphEdge::kShortcut) {
@ -2129,13 +2127,10 @@ TEST(NoDebugObjectInSnapshot) {
const v8::HeapGraphNode* global = edge->GetToNode();
const v8::HeapGraphNode* foo =
GetProperty(global, v8::HeapGraphEdge::kProperty, "foo");
if (foo != nullptr) {
found = true;
}
CHECK(foo);
}
}
CHECK_EQ(2, globals_count);
CHECK(found);
CHECK_EQ(1, globals_count);
}

View File

@ -1643,8 +1643,7 @@ int CountNativeContexts() {
count++;
object = Context::cast(object)->get(Context::NEXT_CONTEXT_LINK);
}
// Subtract one to compensate for the code stub context that is always present
return count - 1;
return count;
}
@ -1783,8 +1782,7 @@ static int CountNativeContextsWithGC(Isolate* isolate, int n) {
Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK),
isolate);
}
// Subtract one to compensate for the code stub context that is always present
return count - 1;
return count;
}
@ -2361,10 +2359,7 @@ static int NumberOfGlobalObjects() {
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
if (obj->IsJSGlobalObject()) count++;
}
// Subtract two to compensate for the two global objects (not global
// JSObjects, of which there would only be one) that are part of the code stub
// context, which is always present.
return count - 1;
return count;
}

View File

@ -804,7 +804,7 @@ TEST(DontLeakContextOnObserve) {
}
CcTest::isolate()->ContextDisposedNotification();
CheckSurvivingGlobalObjectsCount(1);
CheckSurvivingGlobalObjectsCount(0);
}
@ -827,7 +827,7 @@ TEST(DontLeakContextOnGetNotifier) {
}
CcTest::isolate()->ContextDisposedNotification();
CheckSurvivingGlobalObjectsCount(1);
CheckSurvivingGlobalObjectsCount(0);
}
@ -858,7 +858,7 @@ TEST(DontLeakContextOnNotifierPerformChange) {
}
CcTest::isolate()->ContextDisposedNotification();
CheckSurvivingGlobalObjectsCount(1);
CheckSurvivingGlobalObjectsCount(0);
}

View File

@ -1,61 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --noalways-opt --turbo-filter=*
var stubs = %GetCodeStubExportsObject();
const kExtraTypeFeedbackMinusZeroSentinel = 1;
const kFirstJSFunctionTypeFeedbackIndex = 5;
const kFirstSlotExtraTypeFeedbackIndex = 5;
(function() {
var stub1 = stubs.MathFloorStub("MathFloorStub", 1);
var tempForTypeVector = function(d) {
return Math.round(d);
}
tempForTypeVector(5);
var tv = %GetTypeFeedbackVector(tempForTypeVector);
var floorFunc1 = function(v, first) {
if (first) return;
return stub1(stub1, kFirstSlotExtraTypeFeedbackIndex - 1, tv, undefined, v);
};
%OptimizeFunctionOnNextCall(stub1);
floorFunc1(5, true);
%FixedArraySet(tv, kFirstSlotExtraTypeFeedbackIndex - 1, stub1);
assertTrue(kExtraTypeFeedbackMinusZeroSentinel !==
%FixedArrayGet(tv, kFirstSlotExtraTypeFeedbackIndex));
assertEquals(5.0, floorFunc1(5.5));
assertTrue(kExtraTypeFeedbackMinusZeroSentinel !==
%FixedArrayGet(tv, kFirstSlotExtraTypeFeedbackIndex));
// Executing floor such that it returns -0 should set the proper sentinel in
// the feedback vector.
assertEquals(-Infinity, 1/floorFunc1(-0));
assertEquals(kExtraTypeFeedbackMinusZeroSentinel,
%FixedArrayGet(tv, kFirstSlotExtraTypeFeedbackIndex));
%ClearFunctionTypeFeedback(floorFunc1);
})();

View File

@ -1,14 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --noalways-opt --nostress-opt
// Ensure that "real" js functions that call GetCallerJSFunction get an
// exception, since they are not stubs.
(function() {
var a = function() {
return %_GetCallerJSFunction();
}
assertThrows(a);
}());

View File

@ -1,21 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax --turbo-filter=* --nostress-opt
// Test that for fully optimized but non inlined code, GetCallerJSFunction walks
// up a single stack frame to get the calling function. Full optimization elides
// the check in the runtime version of the intrinsic that would throw since the
// caller isn't a stub. It's a bit of a hack, but allows minimal testing of the
// intrinsic without writing a full-blown cctest.
(function() {
var a = function() {
return %_GetCallerJSFunction();
};
var b = function() {
return a();
};
%OptimizeFunctionOnNextCall(a);
assertEquals(b, b());
}());

View File

@ -181,7 +181,6 @@
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/code-stub-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
@ -231,7 +230,6 @@
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/code-stub-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/extras-libraries.cc',
'<(SHARED_INTERMEDIATE_DIR)/experimental-extras-libraries.cc',
@ -483,6 +481,8 @@
'../../src/compiler/code-generator-impl.h',
'../../src/compiler/code-generator.cc',
'../../src/compiler/code-generator.h',
'../../src/compiler/code-stub-assembler.cc',
'../../src/compiler/code-stub-assembler.h',
'../../src/compiler/common-node-cache.cc',
'../../src/compiler/common-node-cache.h',
'../../src/compiler/common-operator-reducer.cc',
@ -1777,7 +1777,6 @@
'inputs': [
'../../tools/concatenate-files.py',
'<(SHARED_INTERMEDIATE_DIR)/libraries.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-code-stub.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-experimental.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-extras.bin',
'<(SHARED_INTERMEDIATE_DIR)/libraries-experimental-extras.bin',
@ -1888,13 +1887,7 @@
'../../src/js/harmony-simd.js',
'../../src/js/promise-extra.js',
],
'code_stub_library_files': [
'../../src/js/macros.py',
'../../src/messages.h',
'../../src/js/code-stubs.js',
],
'libraries_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries.bin',
'libraries_code_stub_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-code-stub.bin',
'libraries_experimental_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-experimental.bin',
'libraries_extras_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-extras.bin',
'libraries_experimental_extras_bin_file': '<(SHARED_INTERMEDIATE_DIR)/libraries-experimental-extras.bin',
@ -1968,38 +1961,6 @@
'--nojs',
],
},
{
'action_name': 'js2c_code_stubs',
'inputs': [
'../../tools/js2c.py',
'<@(code_stub_library_files)',
],
'outputs': ['<(SHARED_INTERMEDIATE_DIR)/code-stub-libraries.cc'],
'action': [
'python',
'../../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/code-stub-libraries.cc',
'CODE_STUB',
'<@(code_stub_library_files)'
],
},
{
'action_name': 'js2c_code_stubs_bin',
'inputs': [
'../../tools/js2c.py',
'<@(code_stub_library_files)',
],
'outputs': ['<@(libraries_code_stub_bin_file)'],
'action': [
'python',
'../../tools/js2c.py',
'<(SHARED_INTERMEDIATE_DIR)/code-stub-libraries.cc',
'CODE_STUB',
'<@(code_stub_library_files)',
'--startup_blob', '<@(libraries_code_stub_bin_file)',
'--nojs',
],
},
{
'action_name': 'js2c_extras',
'inputs': [