Reland "[nojit] Remove code stubs"

This is a reland of f849396c3a

Original change's description:
> [nojit] Remove code stubs
>
> All stubs have been migrated to builtins. This CL removes most related
> code.
>
> Bug: v8:7777, v8:5784
> Change-Id: I4470cfef34788e6c8e0fd5fd09e40e250d088dad
> Reviewed-on: https://chromium-review.googlesource.com/c/1365284
> Commit-Queue: Jakob Gruber <jgruber@chromium.org>
> Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
> Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
> Reviewed-by: Yang Guo <yangguo@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#58093}

Tbr: mstarzinger@chromium.org,yangguo@chromium.org,jkummerow@chromium.org,bmeurer@chromium.org
Bug: v8:7777, v8:5784
Change-Id: I005ee2a820d49a75a90481d262a310e4ccfd1391
Reviewed-on: https://chromium-review.googlesource.com/c/1367746
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58101}
This commit is contained in:
Jakob Gruber 2018-12-07 15:53:22 +01:00 committed by Commit Bot
parent c99604d98c
commit 24e766168b
112 changed files with 71 additions and 3008 deletions

View File

@ -1739,9 +1739,6 @@ v8_source_set("v8_base") {
"src/code-factory.h",
"src/code-reference.cc",
"src/code-reference.h",
"src/code-stubs-utils.h",
"src/code-stubs.cc",
"src/code-stubs.h",
"src/code-tracer.h",
"src/codegen.cc",
"src/codegen.h",

View File

@ -26,7 +26,6 @@
#include "src/bootstrapper.h"
#include "src/builtins/builtins-utils.h"
#include "src/char-predicates-inl.h"
#include "src/code-stubs.h"
#include "src/compiler-dispatcher/compiler-dispatcher.h"
#include "src/compiler.h"
#include "src/contexts.h"

View File

@ -42,7 +42,6 @@
#include "src/assembler-inl.h"
#include "src/base/bits.h"
#include "src/base/cpu.h"
#include "src/code-stubs.h"
#include "src/deoptimizer.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
@ -399,13 +398,6 @@ Operand Operand::EmbeddedNumber(double value) {
return result;
}
Operand Operand::EmbeddedCode(CodeStub* stub) {
Operand result(0, RelocInfo::CODE_TARGET);
result.is_heap_object_request_ = true;
result.value_.heap_object_request = HeapObjectRequest(stub);
return result;
}
Operand Operand::EmbeddedStringConstant(const StringConstantBase* str) {
Operand result(0, RelocInfo::EMBEDDED_OBJECT);
result.is_heap_object_request_ = true;
@ -476,10 +468,6 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
object =
isolate->factory()->NewHeapNumber(request.heap_number(), TENURED);
break;
case HeapObjectRequest::kCodeStub:
request.code_stub()->set_isolate(isolate);
object = request.code_stub()->GetCode();
break;
case HeapObjectRequest::kStringConstant: {
const StringConstantBase* str = request.string();
CHECK_NOT_NULL(str);

View File

@ -425,7 +425,6 @@ class Operand {
explicit Operand(Register rm, ShiftOp shift_op, Register rs);
static Operand EmbeddedNumber(double number); // Smi or HeapNumber.
static Operand EmbeddedCode(CodeStub* stub);
static Operand EmbeddedStringConstant(const StringConstantBase* str);
// Return true if this is a register operand.

View File

@ -13,7 +13,6 @@
#include "src/bootstrapper.h"
#include "src/callable.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/double.h"
@ -1691,21 +1690,6 @@ void MacroAssembler::CompareRoot(Register obj, RootIndex index) {
cmp(obj, scratch);
}
void MacroAssembler::CallStub(CodeStub* stub,
Condition cond) {
DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, CAN_INLINE_TARGET_ADDRESS,
false);
}
void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
}
bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame() || !stub->SometimesSetsUpAFrame();
}
void MacroAssembler::TryDoubleToInt32Exact(Register result,
DwVfpRegister double_input,
LowDwVfpRegister double_scratch) {

View File

@ -294,8 +294,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// Print a message to stdout and abort execution.
void Abort(AbortReason msg);
inline bool AllowThisStubCall(CodeStub* stub);
void LslPair(Register dst_low, Register dst_high, Register src_low,
Register src_high, Register shift);
void LslPair(Register dst_low, Register dst_high, Register src_low,
@ -512,8 +510,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// the JS bitwise operations. See ECMA-262 9.5: ToInt32. Goes to 'done' if it
// succeeds, otherwise falls through if result is saturated. On return
// 'result' either holds answer, or is clobbered on fall through.
//
// Only public for the test code in test-code-stubs-arm.cc.
void TryInlineTruncateDoubleToI(Register result, DwVfpRegister input,
Label* done);
@ -764,13 +760,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// ---------------------------------------------------------------------------
// Runtime calls
// Call a code stub.
void CallStub(CodeStub* stub,
Condition cond = al);
// Call a code stub.
void TailCallStub(CodeStub* stub, Condition cond = al);
// Call a runtime routine.
void CallRuntime(const Runtime::Function* f,
int num_arguments,

View File

@ -340,8 +340,6 @@ Operand Operand::ToExtendedRegister() const {
Immediate Operand::immediate_for_heap_object_request() const {
DCHECK((heap_object_request().kind() == HeapObjectRequest::kHeapNumber &&
immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT) ||
(heap_object_request().kind() == HeapObjectRequest::kCodeStub &&
immediate_.rmode() == RelocInfo::CODE_TARGET) ||
(heap_object_request().kind() == HeapObjectRequest::kStringConstant &&
immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT));
return immediate_;

View File

@ -33,7 +33,6 @@
#include "src/arm64/assembler-arm64-inl.h"
#include "src/base/bits.h"
#include "src/base/cpu.h"
#include "src/code-stubs.h"
#include "src/frame-constants.h"
#include "src/register-configuration.h"
#include "src/string-constants.h"
@ -592,15 +591,6 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
set_target_address_at(pc, 0 /* unused */, object.address());
break;
}
case HeapObjectRequest::kCodeStub: {
request.code_stub()->set_isolate(isolate);
Instruction* instr = reinterpret_cast<Instruction*>(pc);
DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
DCHECK_EQ(instr->ImmPCOffset() % kInstrSize, 0);
UpdateCodeTarget(instr->ImmPCOffset() >> kInstrSizeLog2,
request.code_stub()->GetCode());
break;
}
case HeapObjectRequest::kStringConstant: {
const StringConstantBase* str = request.string();
CHECK_NOT_NULL(str);
@ -1717,13 +1707,6 @@ Operand Operand::EmbeddedNumber(double number) {
return result;
}
Operand Operand::EmbeddedCode(CodeStub* stub) {
Operand result(0, RelocInfo::CODE_TARGET);
result.heap_object_request_.emplace(stub);
DCHECK(result.IsHeapObjectRequest());
return result;
}
Operand Operand::EmbeddedStringConstant(const StringConstantBase* str) {
Operand result(0, RelocInfo::EMBEDDED_OBJECT);
result.heap_object_request_.emplace(str);

View File

@ -737,7 +737,6 @@ class Operand {
unsigned shift_amount = 0);
static Operand EmbeddedNumber(double number); // Smi or HeapNumber.
static Operand EmbeddedCode(CodeStub* stub);
static Operand EmbeddedStringConstant(const StringConstantBase* str);
inline bool IsHeapObjectRequest() const;

View File

@ -10,7 +10,6 @@
#include "src/bootstrapper.h"
#include "src/callable.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/external-reference-table.h"
@ -1715,15 +1714,6 @@ void TurboAssembler::AssertPositiveOrZero(Register value) {
}
}
void MacroAssembler::CallStub(CodeStub* stub) {
DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
Call(stub->GetCode(), RelocInfo::CODE_TARGET);
}
void MacroAssembler::TailCallStub(CodeStub* stub) {
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
}
void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
Register centry) {
const Runtime::Function* f = Runtime::FunctionForId(fid);
@ -2755,10 +2745,6 @@ void MacroAssembler::TestAndSplit(const Register& reg,
}
}
bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame() || !stub->SometimesSetsUpAFrame();
}
void MacroAssembler::PopSafepointRegisters() {
const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
DCHECK_GE(num_unsaved, 0);

View File

@ -555,8 +555,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
inline void Isb();
inline void Csdb();
bool AllowThisStubCall(CodeStub* stub);
// Call a runtime routine. This expects {centry} to contain a fitting CEntry
// builtin for the target runtime function and uses an indirect call.
void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
@ -1159,8 +1157,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
//
// On successful conversion, the least significant 32 bits of the result are
// equivalent to the ECMA-262 operation "ToInt32".
//
// Only public for the test code in test-code-stubs-arm64.cc.
void TryConvertDoubleToInt64(Register result, DoubleRegister input,
Label* done);
@ -1252,7 +1248,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
bool allow_macro_instructions_ = true;
#endif
// Scratch registers available for use by the MacroAssembler.
CPURegList tmp_list_ = DefaultTmpList();
CPURegList fptmp_list_ = DefaultFPTmpList();
@ -1750,9 +1745,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// ---- Calling / Jumping helpers ----
void CallStub(CodeStub* stub);
void TailCallStub(CodeStub* stub);
void CallRuntime(const Runtime::Function* f,
int num_arguments,
SaveFPRegsMode save_doubles = kDontSaveFPRegs);

View File

@ -35,7 +35,6 @@
#include "src/assembler.h"
#include "src/assembler-inl.h"
#include "src/code-stubs.h"
#include "src/deoptimizer.h"
#include "src/disassembler.h"
#include "src/isolate.h"
@ -173,12 +172,6 @@ HeapObjectRequest::HeapObjectRequest(double heap_number, int offset)
DCHECK(!IsSmiDouble(value_.heap_number));
}
HeapObjectRequest::HeapObjectRequest(CodeStub* code_stub, int offset)
: kind_(kCodeStub), offset_(offset) {
value_.code_stub = code_stub;
DCHECK_NOT_NULL(value_.code_stub);
}
HeapObjectRequest::HeapObjectRequest(const StringConstantBase* string,
int offset)
: kind_(kStringConstant), offset_(offset) {

View File

@ -90,10 +90,9 @@ class JumpOptimizationInfo {
class HeapObjectRequest {
public:
explicit HeapObjectRequest(double heap_number, int offset = -1);
explicit HeapObjectRequest(CodeStub* code_stub, int offset = -1);
explicit HeapObjectRequest(const StringConstantBase* string, int offset = -1);
enum Kind { kHeapNumber, kCodeStub, kStringConstant };
enum Kind { kHeapNumber, kStringConstant };
Kind kind() const { return kind_; }
double heap_number() const {
@ -101,11 +100,6 @@ class HeapObjectRequest {
return value_.heap_number;
}
CodeStub* code_stub() const {
DCHECK_EQ(kind(), kCodeStub);
return value_.code_stub;
}
const StringConstantBase* string() const {
DCHECK_EQ(kind(), kStringConstant);
return value_.string;
@ -127,7 +121,6 @@ class HeapObjectRequest {
union {
double heap_number;
CodeStub* code_stub;
const StringConstantBase* string;
} value_;

View File

@ -12,7 +12,6 @@
#include "src/base/hashmap.h"
#include "src/builtins/builtins-constructor.h"
#include "src/builtins/builtins.h"
#include "src/code-stubs.h"
#include "src/contexts.h"
#include "src/conversions-inl.h"
#include "src/double.h"

View File

@ -8,7 +8,6 @@
#include "src/api-inl.h"
#include "src/api-natives.h"
#include "src/base/ieee754.h"
#include "src/code-stubs.h"
#include "src/compiler.h"
#include "src/counters.h"
#include "src/debug/debug.h"

View File

@ -7,7 +7,6 @@
#include "src/api-arguments.h"
#include "src/assembler-inl.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/deoptimizer.h"

View File

@ -7,7 +7,6 @@
#include "src/api-arguments.h"
#include "src/arm64/macro-assembler-arm64-inl.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/deoptimizer.h"

View File

@ -6,7 +6,6 @@
#include "src/api-arguments.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/deoptimizer.h"

View File

@ -6,7 +6,6 @@
#include "src/api-arguments.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/deoptimizer.h"

View File

@ -120,7 +120,7 @@ Code BuildWithMacroAssembler(Isolate* isolate, int32_t builtin_index,
Handle<Code> code = isolate->factory()->NewCode(
desc, Code::BUILTIN, masm.CodeObject(), builtin_index,
MaybeHandle<ByteArray>(), DeoptimizationData::Empty(isolate), kMovable, 0,
MaybeHandle<ByteArray>(), DeoptimizationData::Empty(isolate), kMovable,
kIsNotTurbofanned, kStackSlots, kSafepointTableOffset,
handler_table_offset);
PostBuildProfileAndTracing(isolate, *code, s_name);
@ -195,7 +195,7 @@ Code BuildWithCodeStubAssemblerCS(Isolate* isolate, int32_t builtin_index,
DCHECK_LE(0, descriptor.GetRegisterParameterCount());
compiler::CodeAssemblerState state(
isolate, &zone, descriptor, Code::BUILTIN, name,
PoisoningMitigationLevel::kDontPoison, 0, builtin_index);
PoisoningMitigationLevel::kDontPoison, builtin_index);
generator(&state);
Handle<Code> code = compiler::CodeAssembler::GenerateCode(
&state, BuiltinAssemblerOptions(isolate, builtin_index));

View File

@ -12,17 +12,6 @@
namespace v8 {
namespace internal {
namespace {
// TODO(ishell): make it (const Stub& stub) once CodeStub::GetCode() is const.
template <typename Stub>
Callable make_callable(Stub& stub) {
typedef typename Stub::Descriptor Descriptor;
return Callable(stub.GetCode(), Descriptor{});
}
} // namespace
// static
Handle<Code> CodeFactory::RuntimeCEntry(Isolate* isolate, int result_size) {
return CodeFactory::CEntry(isolate, result_size);

View File

@ -7,9 +7,9 @@
#include "src/allocation.h"
#include "src/callable.h"
#include "src/code-stubs.h"
#include "src/globals.h"
#include "src/interface-descriptors.h"
#include "src/type-hints.h"
namespace v8 {
namespace internal {
@ -61,9 +61,6 @@ class V8_EXPORT_PRIVATE CodeFactory final {
static Callable ApiGetter(Isolate* isolate);
static Callable CallApiCallback(Isolate* isolate);
// Code stubs. Add methods here as needed to reduce dependency on
// code-stubs.h.
static Callable NonPrimitiveToPrimitive(
Isolate* isolate, ToPrimitiveHint hint = ToPrimitiveHint::kDefault);
static Callable OrdinaryToPrimitive(Isolate* isolate,

View File

@ -10,6 +10,7 @@
#include "src/bailout-reason.h"
#include "src/base/macros.h"
#include "src/compiler/code-assembler.h"
#include "src/frames.h"
#include "src/globals.h"
#include "src/message-template.h"
#include "src/objects.h"

View File

@ -1,49 +0,0 @@
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_CODE_STUBS_UTILS_H_
#define V8_CODE_STUBS_UTILS_H_
namespace v8 {
namespace internal {
namespace compiler {
class CodeAssemblerState;
} // namespace compiler
// ----------------------------------------------------------------------------
// Support macro for defining code stubs with Turbofan.
// ----------------------------------------------------------------------------
//
// A code stub generator is defined by writing:
//
// TF_STUB(name, code_assember_base_class) {
// ...
// }
//
// In the body of the generator function the arguments can be accessed
// as "Parameter(n)".
#define TF_STUB(StubName, AssemblerBase) \
class StubName##Assembler : public AssemblerBase { \
public: \
typedef StubName::Descriptor Descriptor; \
\
explicit StubName##Assembler(compiler::CodeAssemblerState* state) \
: AssemblerBase(state) {} \
void Generate##StubName##Impl(const StubName* stub); \
\
Node* Parameter(Descriptor::ParameterIndices index) { \
return CodeAssembler::Parameter(static_cast<int>(index)); \
} \
}; \
void StubName::GenerateAssembly(compiler::CodeAssemblerState* state) const { \
StubName##Assembler assembler(state); \
assembler.Generate##StubName##Impl(this); \
} \
void StubName##Assembler::Generate##StubName##Impl(const StubName* stub)
} // namespace internal
} // namespace v8
#endif // V8_CODE_STUBS_UTILS_H_

View File

@ -1,257 +0,0 @@
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/code-stubs.h"
#include <sstream>
#include "src/arguments.h"
#include "src/assembler-inl.h"
#include "src/ast/ast.h"
#include "src/bootstrapper.h"
#include "src/code-factory.h"
#include "src/code-stubs-utils.h"
#include "src/code-tracer.h"
#include "src/counters.h"
#include "src/gdb-jit.h"
#include "src/heap/heap-inl.h"
#include "src/ic/ic-stats.h"
#include "src/ic/ic.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
#include "src/objects/hash-table-inl.h"
#include "src/tracing/tracing-category-observer.h"
namespace v8 {
namespace internal {
using compiler::CodeAssemblerState;
CodeStubDescriptor::CodeStubDescriptor(CodeStub* stub)
: isolate_(stub->isolate()),
call_descriptor_(stub->GetCallInterfaceDescriptor()),
stack_parameter_count_(no_reg),
hint_stack_parameter_count_(-1),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
deoptimization_handler_(kNullAddress),
miss_handler_(),
has_miss_handler_(false) {}
CodeStubDescriptor::CodeStubDescriptor(Isolate* isolate, uint32_t stub_key)
: isolate_(isolate),
stack_parameter_count_(no_reg),
hint_stack_parameter_count_(-1),
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
deoptimization_handler_(kNullAddress),
miss_handler_(),
has_miss_handler_(false) {
CodeStub::InitializeDescriptor(isolate, stub_key, this);
}
void CodeStubDescriptor::Initialize(Address deoptimization_handler,
int hint_stack_parameter_count,
StubFunctionMode function_mode) {
deoptimization_handler_ = deoptimization_handler;
hint_stack_parameter_count_ = hint_stack_parameter_count;
function_mode_ = function_mode;
}
void CodeStubDescriptor::Initialize(Register stack_parameter_count,
Address deoptimization_handler,
int hint_stack_parameter_count,
StubFunctionMode function_mode) {
Initialize(deoptimization_handler, hint_stack_parameter_count, function_mode);
stack_parameter_count_ = stack_parameter_count;
}
bool CodeStub::FindCodeInCache(Code* code_out) {
SimpleNumberDictionary stubs = isolate()->heap()->code_stubs();
int index = stubs->FindEntry(isolate(), GetKey());
if (index != SimpleNumberDictionary::kNotFound) {
*code_out = Code::cast(stubs->ValueAt(index));
return true;
}
return false;
}
void CodeStub::RecordCodeGeneration(Handle<Code> code) {
std::ostringstream os;
os << *this;
PROFILE(isolate(),
CodeCreateEvent(CodeEventListener::STUB_TAG,
AbstractCode::cast(*code), os.str().c_str()));
Counters* counters = isolate()->counters();
counters->total_stubs_code_size()->Increment(code->raw_instruction_size());
#ifdef DEBUG
code->VerifyEmbeddedObjects(isolate());
#endif
}
void CodeStub::DeleteStubFromCacheForTesting() {
Heap* heap = isolate_->heap();
Handle<SimpleNumberDictionary> dict(heap->code_stubs(), isolate());
int entry = dict->FindEntry(isolate(), GetKey());
DCHECK_NE(SimpleNumberDictionary::kNotFound, entry);
dict = SimpleNumberDictionary::DeleteEntry(isolate(), dict, entry);
heap->SetRootCodeStubs(*dict);
}
Handle<Code> PlatformCodeStub::GenerateCode() {
Factory* factory = isolate()->factory();
// Generate the new code.
// TODO(yangguo): remove this once we can serialize IC stubs.
AssemblerOptions options = AssemblerOptions::Default(isolate(), true);
MacroAssembler masm(isolate(), options, nullptr, 256,
CodeObjectRequired::kYes);
{
// Update the static counter each time a new code stub is generated.
isolate()->counters()->code_stubs()->Increment();
// Generate the code for the stub.
NoCurrentFrameScope scope(&masm);
Generate(&masm);
}
// Generate the handler table.
int handler_table_offset = GenerateHandlerTable(&masm);
// Create the code object.
CodeDesc desc;
masm.GetCode(isolate(), &desc);
// Copy the generated code into a heap object.
Handle<Code> new_object = factory->NewCode(
desc, Code::STUB, masm.CodeObject(), Builtins::kNoBuiltinId,
MaybeHandle<ByteArray>(), DeoptimizationData::Empty(isolate()),
NeedsImmovableCode(), GetKey(), false, 0, 0, handler_table_offset);
return new_object;
}
Handle<Code> CodeStub::GetCode() {
Heap* heap = isolate()->heap();
Code code;
if (FindCodeInCache(&code)) {
DCHECK(code->is_stub());
return handle(code, isolate_);
}
{
HandleScope scope(isolate());
// Canonicalize handles, so that we can share constant pool entries pointing
// to code targets without dereferencing their handles.
CanonicalHandleScope canonical(isolate());
Handle<Code> new_object = GenerateCode();
DCHECK_EQ(GetKey(), new_object->stub_key());
RecordCodeGeneration(new_object);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_code_stubs) {
CodeTracer::Scope trace_scope(isolate()->GetCodeTracer());
OFStream os(trace_scope.file());
std::ostringstream name;
name << *this;
new_object->Disassemble(name.str().c_str(), os);
os << "\n";
}
#endif
// Update the dictionary and the root in Heap.
Handle<SimpleNumberDictionary> dict = SimpleNumberDictionary::Set(
isolate(), handle(heap->code_stubs(), isolate_), GetKey(), new_object);
heap->SetRootCodeStubs(*dict);
code = *new_object;
}
DCHECK(!NeedsImmovableCode() || heap->IsImmovable(code));
return Handle<Code>(code, isolate());
}
CodeStub::Major CodeStub::GetMajorKey(const Code code_stub) {
return MajorKeyFromKey(code_stub->stub_key());
}
const char* CodeStub::MajorName(CodeStub::Major major_key) {
switch (major_key) {
#define DEF_CASE(name) case name: return #name "Stub";
CODE_STUB_LIST(DEF_CASE)
#undef DEF_CASE
case NoCache:
return "<NoCache>Stub";
case NUMBER_OF_IDS:
UNREACHABLE();
}
return nullptr;
}
void CodeStub::PrintBaseName(std::ostream& os) const { // NOLINT
os << MajorName(MajorKey());
}
void CodeStub::PrintName(std::ostream& os) const { // NOLINT
PrintBaseName(os);
PrintState(os);
}
void CodeStub::Dispatch(Isolate* isolate, uint32_t key, void** value_out,
DispatchedCall call) {
switch (MajorKeyFromKey(key)) {
#define DEF_CASE(NAME) \
case NAME: { \
NAME##Stub stub(key, isolate); \
CodeStub* pstub = &stub; \
call(pstub, value_out); \
break; \
}
CODE_STUB_LIST(DEF_CASE)
#undef DEF_CASE
case NUMBER_OF_IDS:
case NoCache:
UNREACHABLE();
break;
}
}
int PlatformCodeStub::GenerateHandlerTable(MacroAssembler* masm) { return 0; }
static void InitializeDescriptorDispatchedCall(CodeStub* stub,
void** value_out) {
CodeStubDescriptor* descriptor_out =
reinterpret_cast<CodeStubDescriptor*>(value_out);
descriptor_out->set_call_descriptor(stub->GetCallInterfaceDescriptor());
}
void CodeStub::InitializeDescriptor(Isolate* isolate, uint32_t key,
CodeStubDescriptor* desc) {
void** value_out = reinterpret_cast<void**>(desc);
Dispatch(isolate, key, value_out, &InitializeDescriptorDispatchedCall);
}
void CodeStub::GetCodeDispatchCall(CodeStub* stub, void** value_out) {
Handle<Code>* code_out = reinterpret_cast<Handle<Code>*>(value_out);
*code_out = stub->GetCode();
}
MaybeHandle<Code> CodeStub::GetCode(Isolate* isolate, uint32_t key) {
HandleScope scope(isolate);
Handle<Code> code;
void** value_out = reinterpret_cast<void**>(&code);
Dispatch(isolate, key, value_out, &GetCodeDispatchCall);
return scope.CloseAndEscape(code);
}
} // namespace internal
} // namespace v8

View File

@ -1,318 +0,0 @@
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_CODE_STUBS_H_
#define V8_CODE_STUBS_H_
#include "src/frames.h"
#include "src/interface-descriptors.h"
#include "src/type-hints.h"
namespace v8 {
namespace internal {
// Forward declarations.
class CodeStubDescriptor;
class Isolate;
class MacroAssembler;
class TurboAssembler;
namespace compiler {
class CodeAssemblerState;
}
// List of code stubs used on all platforms.
#define CODE_STUB_LIST(V)
static const int kHasReturnedMinusZeroSentinel = 1;
class CodeStub : public ZoneObject {
public:
enum Major {
// TODO(mvstanton): eliminate the NoCache key by getting rid
// of the non-monomorphic-cache.
NoCache = 0, // marker for stubs that do custom caching]
#define DEF_ENUM(name) name,
CODE_STUB_LIST(DEF_ENUM)
#undef DEF_ENUM
NUMBER_OF_IDS
};
// Retrieve the code for the stub. Generate the code if needed.
Handle<Code> GetCode();
static Major MajorKeyFromKey(uint32_t key) {
return static_cast<Major>(MajorKeyBits::decode(key));
}
static uint32_t MinorKeyFromKey(uint32_t key) {
return MinorKeyBits::decode(key);
}
// Gets the major key from a code object that is a code stub or binary op IC.
static Major GetMajorKey(const Code code_stub);
static uint32_t NoCacheKey() { return MajorKeyBits::encode(NoCache); }
static const char* MajorName(Major major_key);
explicit CodeStub(Isolate* isolate) : minor_key_(0), isolate_(isolate) {}
virtual ~CodeStub() = default;
// Some stubs put untagged junk on the stack that cannot be scanned by the
// GC. This means that we must be statically sure that no GC can occur while
// they are running. If that is the case they should override this to return
// true, which will cause an assertion if we try to call something that can
// GC or if we try to put a stack frame on top of the junk, which would not
// result in a traversable stack.
virtual bool SometimesSetsUpAFrame() { return true; }
// Lookup the code in the (possibly custom) cache.
bool FindCodeInCache(Code* code_out);
virtual CallInterfaceDescriptor GetCallInterfaceDescriptor() const = 0;
virtual int GetStackParameterCount() const {
return GetCallInterfaceDescriptor().GetStackParameterCount();
}
static void InitializeDescriptor(Isolate* isolate, uint32_t key,
CodeStubDescriptor* desc);
static MaybeHandle<Code> GetCode(Isolate* isolate, uint32_t key);
// Returns information for computing the number key.
virtual Major MajorKey() const = 0;
uint32_t MinorKey() const { return minor_key_; }
friend std::ostream& operator<<(std::ostream& os, const CodeStub& s) {
s.PrintName(os);
return os;
}
Isolate* isolate() const { return isolate_; }
void set_isolate(Isolate* isolate) {
DCHECK_NOT_NULL(isolate);
DCHECK(isolate_ == nullptr || isolate_ == isolate);
isolate_ = isolate;
}
void DeleteStubFromCacheForTesting();
protected:
CodeStub(uint32_t key, Isolate* isolate)
: minor_key_(MinorKeyFromKey(key)), isolate_(isolate) {}
// Generates the assembler code for the stub.
virtual Handle<Code> GenerateCode() = 0;
// Returns whether the code generated for this stub needs to be allocated as
// a fixed (non-moveable) code object.
// TODO(jgruber): Only required by DirectCEntryStub. Can be removed when/if
// that is ported to a builtin.
virtual Movability NeedsImmovableCode() { return kMovable; }
virtual void PrintName(std::ostream& os) const; // NOLINT
virtual void PrintBaseName(std::ostream& os) const; // NOLINT
virtual void PrintState(std::ostream& os) const { ; } // NOLINT
// Computes the key based on major and minor.
uint32_t GetKey() {
DCHECK(static_cast<int>(MajorKey()) < NUMBER_OF_IDS);
return MinorKeyBits::encode(MinorKey()) | MajorKeyBits::encode(MajorKey());
}
uint32_t minor_key_;
private:
// Perform bookkeeping required after code generation when stub code is
// initially generated.
void RecordCodeGeneration(Handle<Code> code);
// We use this dispatch to statically instantiate the correct code stub for
// the given stub key and call the passed function with that code stub.
typedef void (*DispatchedCall)(CodeStub* stub, void** value_out);
static void Dispatch(Isolate* isolate, uint32_t key, void** value_out,
DispatchedCall call);
static void GetCodeDispatchCall(CodeStub* stub, void** value_out);
STATIC_ASSERT(NUMBER_OF_IDS < (1 << kStubMajorKeyBits));
class MajorKeyBits: public BitField<uint32_t, 0, kStubMajorKeyBits> {};
class MinorKeyBits: public BitField<uint32_t,
kStubMajorKeyBits, kStubMinorKeyBits> {}; // NOLINT
friend class BreakPointIterator;
Isolate* isolate_;
};
#define DEFINE_CODE_STUB_BASE(NAME, SUPER) \
public: \
NAME(uint32_t key, Isolate* isolate) : SUPER(key, isolate) {} \
\
private: \
DISALLOW_COPY_AND_ASSIGN(NAME)
#define DEFINE_CODE_STUB(NAME, SUPER) \
public: \
inline Major MajorKey() const override { return NAME; }; \
\
DEFINE_CODE_STUB_BASE(NAME##Stub, SUPER)
#define DEFINE_PLATFORM_CODE_STUB(NAME, SUPER) \
private: \
void Generate(MacroAssembler* masm) override; \
DEFINE_CODE_STUB(NAME, SUPER)
#define DEFINE_TURBOFAN_CODE_STUB(NAME, SUPER) \
public: \
void GenerateAssembly(compiler::CodeAssemblerState* state) const override; \
DEFINE_CODE_STUB(NAME, SUPER)
#define DEFINE_CALL_INTERFACE_DESCRIPTOR(NAME) \
public: \
typedef NAME##Descriptor Descriptor; \
CallInterfaceDescriptor GetCallInterfaceDescriptor() const override { \
return Descriptor(); \
}
// There are some code stubs we just can't describe right now with a
// CallInterfaceDescriptor. Isolate behavior for those cases with this macro.
// An attempt to retrieve a descriptor will fail.
#define DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR() \
public: \
CallInterfaceDescriptor GetCallInterfaceDescriptor() const override { \
UNREACHABLE(); \
return CallInterfaceDescriptor(); \
}
class PlatformCodeStub : public CodeStub {
public:
// Retrieve the code for the stub. Generate the code if needed.
Handle<Code> GenerateCode() override;
protected:
explicit PlatformCodeStub(Isolate* isolate) : CodeStub(isolate) {}
// Generates the assembler code for the stub.
virtual void Generate(MacroAssembler* masm) = 0;
// Generates the exception handler table for the stub.
virtual int GenerateHandlerTable(MacroAssembler* masm);
DEFINE_CODE_STUB_BASE(PlatformCodeStub, CodeStub);
};
enum StubFunctionMode { NOT_JS_FUNCTION_STUB_MODE, JS_FUNCTION_STUB_MODE };
class CodeStubDescriptor {
public:
explicit CodeStubDescriptor(CodeStub* stub);
CodeStubDescriptor(Isolate* isolate, uint32_t stub_key);
void Initialize(Address deoptimization_handler = kNullAddress,
int hint_stack_parameter_count = -1,
StubFunctionMode function_mode = NOT_JS_FUNCTION_STUB_MODE);
void Initialize(Register stack_parameter_count,
Address deoptimization_handler = kNullAddress,
int hint_stack_parameter_count = -1,
StubFunctionMode function_mode = NOT_JS_FUNCTION_STUB_MODE);
void SetMissHandler(Runtime::FunctionId id) {
miss_handler_id_ = id;
miss_handler_ = ExternalReference::Create(Runtime::FunctionForId(id));
has_miss_handler_ = true;
// Our miss handler infrastructure doesn't currently support
// variable stack parameter counts.
DCHECK(!stack_parameter_count_.is_valid());
}
void set_call_descriptor(CallInterfaceDescriptor d) {
call_descriptor_ = std::move(d);
}
CallInterfaceDescriptor call_descriptor() const { return call_descriptor_; }
int GetRegisterParameterCount() const {
return call_descriptor().GetRegisterParameterCount();
}
int GetStackParameterCount() const {
return call_descriptor().GetStackParameterCount();
}
int GetParameterCount() const {
return call_descriptor().GetParameterCount();
}
Register GetRegisterParameter(int index) const {
return call_descriptor().GetRegisterParameter(index);
}
MachineType GetParameterType(int index) const {
return call_descriptor().GetParameterType(index);
}
ExternalReference miss_handler() const {
DCHECK(has_miss_handler_);
return miss_handler_;
}
Runtime::FunctionId miss_handler_id() const {
DCHECK(has_miss_handler_);
return miss_handler_id_;
}
bool has_miss_handler() const {
return has_miss_handler_;
}
int GetHandlerParameterCount() const {
int params = GetParameterCount();
if (PassesArgumentsToDeoptimizationHandler()) {
params += 1;
}
return params;
}
int hint_stack_parameter_count() const { return hint_stack_parameter_count_; }
Register stack_parameter_count() const { return stack_parameter_count_; }
StubFunctionMode function_mode() const { return function_mode_; }
Address deoptimization_handler() const { return deoptimization_handler_; }
private:
bool PassesArgumentsToDeoptimizationHandler() const {
return stack_parameter_count_.is_valid();
}
Isolate* isolate_;
CallInterfaceDescriptor call_descriptor_;
Register stack_parameter_count_;
// If hint_stack_parameter_count_ > 0, the code stub can optimize the
// return sequence. Default value is -1, which means it is ignored.
int hint_stack_parameter_count_;
StubFunctionMode function_mode_;
Address deoptimization_handler_;
ExternalReference miss_handler_;
Runtime::FunctionId miss_handler_id_;
bool has_miss_handler_;
};
} // namespace internal
} // namespace v8
#undef DEFINE_CALL_INTERFACE_DESCRIPTOR
#undef DEFINE_PLATFORM_CODE_STUB
#undef DEFINE_CODE_STUB
#undef DEFINE_CODE_STUB_BASE
#endif // V8_CODE_STUBS_H_

View File

@ -5,7 +5,6 @@
#ifndef V8_COMPILER_BACKEND_CODE_GENERATOR_IMPL_H_
#define V8_COMPILER_BACKEND_CODE_GENERATOR_IMPL_H_
#include "src/code-stubs.h"
#include "src/compiler/backend/code-generator.h"
#include "src/compiler/backend/instruction.h"
#include "src/compiler/linkage.h"

View File

@ -399,7 +399,7 @@ MaybeHandle<Code> CodeGenerator::FinalizeCode() {
MaybeHandle<Code> maybe_code = isolate()->factory()->TryNewCode(
desc, info()->code_kind(), Handle<Object>(), info()->builtin_index(),
source_positions, deopt_data, kMovable, info()->stub_key(), true,
source_positions, deopt_data, kMovable, true,
frame()->GetTotalFrameSlotCount(), safepoints()->GetCodeOffset(),
handler_table_offset_);
@ -719,7 +719,7 @@ void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) {
source_position, false);
if (FLAG_code_comments) {
OptimizedCompilationInfo* info = this->info();
if (info->IsStub()) return;
if (info->IsNotOptimizedFunctionOrWasmFunction()) return;
std::ostringstream buffer;
buffer << "-- ";
// Turbolizer only needs the source position, as it can reconstruct

View File

@ -45,7 +45,7 @@ static_assert(
CodeAssemblerState::CodeAssemblerState(
Isolate* isolate, Zone* zone, const CallInterfaceDescriptor& descriptor,
Code::Kind kind, const char* name, PoisoningMitigationLevel poisoning_level,
uint32_t stub_key, int32_t builtin_index)
int32_t builtin_index)
// TODO(rmcilroy): Should we use Linkage::GetBytecodeDispatchDescriptor for
// bytecode handlers?
: CodeAssemblerState(
@ -53,7 +53,7 @@ CodeAssemblerState::CodeAssemblerState(
Linkage::GetStubCallDescriptor(
zone, descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kNoFlags, Operator::kNoProperties),
kind, name, poisoning_level, stub_key, builtin_index) {}
kind, name, poisoning_level, builtin_index) {}
CodeAssemblerState::CodeAssemblerState(Isolate* isolate, Zone* zone,
int parameter_count, Code::Kind kind,
@ -67,13 +67,13 @@ CodeAssemblerState::CodeAssemblerState(Isolate* isolate, Zone* zone,
(kind == Code::BUILTIN ? CallDescriptor::kPushArgumentCount
: CallDescriptor::kNoFlags) |
CallDescriptor::kCanUseRoots),
kind, name, poisoning_level, 0, builtin_index) {}
kind, name, poisoning_level, builtin_index) {}
CodeAssemblerState::CodeAssemblerState(Isolate* isolate, Zone* zone,
CallDescriptor* call_descriptor,
Code::Kind kind, const char* name,
PoisoningMitigationLevel poisoning_level,
uint32_t stub_key, int32_t builtin_index)
int32_t builtin_index)
: raw_assembler_(new RawMachineAssembler(
isolate, new (zone) Graph(zone), call_descriptor,
MachineType::PointerRepresentation(),
@ -81,7 +81,6 @@ CodeAssemblerState::CodeAssemblerState(Isolate* isolate, Zone* zone,
InstructionSelector::AlignmentRequirements(), poisoning_level)),
kind_(kind),
name_(name),
stub_key_(stub_key),
builtin_index_(builtin_index),
code_generated_(false),
variables_(zone) {}
@ -183,8 +182,8 @@ Handle<Code> CodeAssembler::GenerateCode(CodeAssemblerState* state,
code = Pipeline::GenerateCodeForCodeStub(
rasm->isolate(), rasm->call_descriptor(), graph, nullptr,
state->kind_, state->name_, state->stub_key_,
state->builtin_index_, nullptr, rasm->poisoning_level(), options)
state->kind_, state->name_, state->builtin_index_, nullptr,
rasm->poisoning_level(), options)
.ToHandleChecked();
} else {
Schedule* schedule = rasm->Export();
@ -193,13 +192,12 @@ Handle<Code> CodeAssembler::GenerateCode(CodeAssemblerState* state,
bool should_optimize_jumps =
rasm->isolate()->serializer_enabled() && FLAG_turbo_rewrite_far_jumps;
code =
Pipeline::GenerateCodeForCodeStub(
rasm->isolate(), rasm->call_descriptor(), rasm->graph(), schedule,
state->kind_, state->name_, state->stub_key_, state->builtin_index_,
should_optimize_jumps ? &jump_opt : nullptr,
rasm->poisoning_level(), options)
.ToHandleChecked();
code = Pipeline::GenerateCodeForCodeStub(
rasm->isolate(), rasm->call_descriptor(), rasm->graph(),
schedule, state->kind_, state->name_, state->builtin_index_,
should_optimize_jumps ? &jump_opt : nullptr,
rasm->poisoning_level(), options)
.ToHandleChecked();
if (jump_opt.is_optimizable()) {
jump_opt.set_optimizing();
@ -207,9 +205,8 @@ Handle<Code> CodeAssembler::GenerateCode(CodeAssemblerState* state,
// Regenerate machine code
code = Pipeline::GenerateCodeForCodeStub(
rasm->isolate(), rasm->call_descriptor(), rasm->graph(),
schedule, state->kind_, state->name_, state->stub_key_,
state->builtin_index_, &jump_opt, rasm->poisoning_level(),
options)
schedule, state->kind_, state->name_, state->builtin_index_,
&jump_opt, rasm->poisoning_level(), options)
.ToHandleChecked();
}
}

View File

@ -64,6 +64,7 @@ class PromiseFulfillReactionJobTask;
class PromiseReaction;
class PromiseReactionJobTask;
class PromiseRejectReactionJobTask;
class WasmDebugInfo;
class WeakFactoryCleanupJobTask;
class Zone;
@ -1605,7 +1606,6 @@ class V8_EXPORT_PRIVATE CodeAssemblerState {
CodeAssemblerState(Isolate* isolate, Zone* zone,
const CallInterfaceDescriptor& descriptor, Code::Kind kind,
const char* name, PoisoningMitigationLevel poisoning_level,
uint32_t stub_key = 0,
int32_t builtin_index = Builtins::kNoBuiltinId);
// Create with JSCall linkage.
@ -1636,7 +1636,7 @@ class V8_EXPORT_PRIVATE CodeAssemblerState {
CodeAssemblerState(Isolate* isolate, Zone* zone,
CallDescriptor* call_descriptor, Code::Kind kind,
const char* name, PoisoningMitigationLevel poisoning_level,
uint32_t stub_key, int32_t builtin_index);
int32_t builtin_index);
void PushExceptionHandler(CodeAssemblerExceptionHandlerLabel* label);
void PopExceptionHandler();
@ -1644,7 +1644,6 @@ class V8_EXPORT_PRIVATE CodeAssemblerState {
std::unique_ptr<RawMachineAssembler> raw_assembler_;
Code::Kind kind_;
const char* name_;
uint32_t stub_key_;
int32_t builtin_index_;
bool code_generated_;
ZoneSet<CodeAssemblerVariable::Impl*, CodeAssemblerVariable::ImplComparator>

View File

@ -8,7 +8,6 @@
#include <sstream>
#include <string>
#include "src/code-stubs.h"
#include "src/compiler/all-nodes.h"
#include "src/compiler/backend/register-allocator.h"
#include "src/compiler/compiler-source-position-table.h"

View File

@ -8,7 +8,6 @@
#include "src/builtins/builtins-promise-gen.h"
#include "src/builtins/builtins-utils.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/compiler/access-builder.h"
#include "src/compiler/access-info.h"
#include "src/compiler/allocation-builder.h"

View File

@ -7,7 +7,6 @@
#include "src/ast/ast.h"
#include "src/builtins/builtins-constructor.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/machine-operator.h"

View File

@ -5,7 +5,6 @@
#include "src/compiler/linkage.h"
#include "src/assembler-inl.h"
#include "src/code-stubs.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/frame.h"
#include "src/compiler/node.h"
@ -141,7 +140,7 @@ int CallDescriptor::CalculateFixedFrameSize() const {
CallDescriptor* Linkage::ComputeIncoming(Zone* zone,
OptimizedCompilationInfo* info) {
DCHECK(!info->IsStub());
DCHECK(!info->IsNotOptimizedFunctionOrWasmFunction());
if (!info->closure().is_null()) {
// If we are compiling a JS function, use a JS call descriptor,
// plus the receiver.

View File

@ -622,7 +622,7 @@ void PrintCode(Isolate* isolate, Handle<Code> code,
#ifdef ENABLE_DISASSEMBLER
AllowDeferredHandleDereference allow_deference_for_print_code;
bool print_code =
FLAG_print_code || (info->IsStub() && FLAG_print_code_stubs) ||
FLAG_print_code ||
(info->IsOptimizing() && FLAG_print_opt_code &&
info->shared_info()->PassesFilter(FLAG_print_opt_code_filter));
if (print_code) {
@ -2024,11 +2024,10 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub(
Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
Schedule* schedule, Code::Kind kind, const char* debug_name,
uint32_t stub_key, int32_t builtin_index, JumpOptimizationInfo* jump_opt,
int32_t builtin_index, JumpOptimizationInfo* jump_opt,
PoisoningMitigationLevel poisoning_level, const AssemblerOptions& options) {
OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
info.set_builtin_index(builtin_index);
info.set_stub_key(stub_key);
if (poisoning_level != PoisoningMitigationLevel::kDontPoison) {
info.SetPoisoningMitigationLevel(poisoning_level);
@ -2469,9 +2468,10 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) {
<< "--------------------------------------------------\n";
}
Zone temp_zone(data->allocator(), ZONE_NAME);
MachineGraphVerifier::Run(data->graph(), data->schedule(), linkage,
data->info()->IsStub(), data->debug_name(),
&temp_zone);
MachineGraphVerifier::Run(
data->graph(), data->schedule(), linkage,
data->info()->IsNotOptimizedFunctionOrWasmFunction(),
data->debug_name(), &temp_zone);
}
data->InitializeInstructionSequence(call_descriptor);

View File

@ -72,7 +72,7 @@ class Pipeline : public AllStatic {
static MaybeHandle<Code> GenerateCodeForCodeStub(
Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
Schedule* schedule, Code::Kind kind, const char* debug_name,
uint32_t stub_key, int32_t builtin_index, JumpOptimizationInfo* jump_opt,
int32_t builtin_index, JumpOptimizationInfo* jump_opt,
PoisoningMitigationLevel poisoning_level,
const AssemblerOptions& options);

View File

@ -1399,10 +1399,6 @@ class RuntimeCallTimerScope {
SC(store_buffer_overflows, V8.StoreBufferOverflows)
#define STATS_COUNTER_LIST_2(SC) \
/* Number of code stubs. */ \
SC(code_stubs, V8.CodeStubs) \
/* Amount of stub code. */ \
SC(total_stubs_code_size, V8.TotalStubsCodeSize) \
/* Amount of (JS) compiled code. */ \
SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \
SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \

View File

@ -13,7 +13,6 @@
#include "src/base/platform/mutex.h"
#include "src/bootstrapper.h"
#include "src/builtins/builtins.h"
#include "src/code-stubs.h"
#include "src/compilation-cache.h"
#include "src/compiler.h"
#include "src/counters.h"

View File

@ -10,7 +10,6 @@
#include "src/assembler-inl.h"
#include "src/code-reference.h"
#include "src/code-stubs.h"
#include "src/debug/debug.h"
#include "src/deoptimizer.h"
#include "src/disasm.h"
@ -234,16 +233,7 @@ static void PrintRelocInfo(StringBuilder* out, Isolate* isolate,
Code code = isolate->heap()->GcSafeFindCodeForInnerPointer(
relocinfo->target_address());
Code::Kind kind = code->kind();
if (kind == Code::STUB) {
// Get the STUB key and extract major and minor key.
uint32_t key = code->stub_key();
uint32_t minor_key = CodeStub::MinorKeyFromKey(key);
CodeStub::Major major_key = CodeStub::GetMajorKey(code);
DCHECK(major_key == CodeStub::MajorKeyFromKey(key));
out->AddFormatted(" %s, %s, ", Code::Kind2String(kind),
CodeStub::MajorName(major_key));
out->AddFormatted("minor: %d", minor_key);
} else if (code->is_builtin()) {
if (code->is_builtin()) {
out->AddFormatted(" Builtin::%s", Builtins::name(code->builtin_index()));
} else {
out->AddFormatted(" %s", Code::Kind2String(kind));

View File

@ -3,7 +3,6 @@
// found in the LICENSE file.
#include "src/feedback-vector.h"
#include "src/code-stubs.h"
#include "src/feedback-vector-inl.h"
#include "src/ic/ic-inl.h"
#include "src/objects.h"
@ -1175,9 +1174,6 @@ KeyedAccessStoreMode FeedbackNexus::GetKeyedAccessStoreMode() const {
mode = KeyedAccessStoreModeForBuiltin(builtin_index);
break;
} else {
CHECK(CodeStub::MajorKeyFromKey(handler->stub_key()) ==
CodeStub::NoCache);
}
}

View File

@ -875,6 +875,9 @@ DEFINE_BOOL(expose_async_hooks, false, "expose async_hooks object")
DEFINE_BOOL(allow_unsafe_function_constructor, false,
"allow invoking the function constructor without security checks")
DEFINE_BOOL(force_slow_path, false, "always take the slow path for builtins")
DEFINE_BOOL(test_small_max_function_context_stub_size, false,
"enable testing the function context size overflow path "
"by making the maximum size smaller")
// builtins-ia32.cc
DEFINE_BOOL(inline_new, true, "use fast inline allocation")
@ -1347,18 +1350,6 @@ DEFINE_BOOL(trace_elements_transitions, false, "trace elements transitions")
DEFINE_BOOL(trace_creation_allocation_sites, false,
"trace the creation of allocation sites")
// code-stubs.cc
DEFINE_BOOL(print_code_stubs, false, "print code stubs")
DEFINE_BOOL(test_secondary_stub_cache, false,
"test secondary stub cache by disabling the primary one")
DEFINE_BOOL(test_primary_stub_cache, false,
"test primary stub cache by disabling the secondary one")
DEFINE_BOOL(test_small_max_function_context_stub_size, false,
"enable testing the function context size overflow path "
"by making the maximum size smaller")
// codegen-ia32.cc / codegen-arm.cc
DEFINE_BOOL(print_code, false, "print generated code")
DEFINE_BOOL(print_opt_code, false, "print optimized code")
@ -1374,7 +1365,6 @@ DEFINE_BOOL(sodium, false,
"print generated code output suitable for use with "
"the Sodium code viewer")
DEFINE_IMPLICATION(sodium, print_code_stubs)
DEFINE_IMPLICATION(sodium, print_code)
DEFINE_IMPLICATION(sodium, print_opt_code)
DEFINE_IMPLICATION(sodium, code_comments)
@ -1384,7 +1374,6 @@ DEFINE_IMPLICATION(print_all_code, print_code)
DEFINE_IMPLICATION(print_all_code, print_opt_code)
DEFINE_IMPLICATION(print_all_code, print_code_verbose)
DEFINE_IMPLICATION(print_all_code, print_builtin_code)
DEFINE_IMPLICATION(print_all_code, print_code_stubs)
DEFINE_IMPLICATION(print_all_code, code_comments)
#endif

View File

@ -531,7 +531,6 @@ class Arguments;
class Assembler;
class Code;
class CodeSpace;
class CodeStub;
class Context;
class DeclarationScope;
class Debug;

View File

@ -66,7 +66,7 @@ void InitializeCode(Heap* heap, Handle<Code> code, int object_size,
Handle<ByteArray> source_position_table,
Handle<DeoptimizationData> deopt_data,
Handle<ByteArray> reloc_info,
Handle<CodeDataContainer> data_container, uint32_t stub_key,
Handle<CodeDataContainer> data_container,
bool is_turbofanned, int stack_slots,
int safepoint_table_offset, int handler_table_offset) {
DCHECK(IsAligned(code->address(), kCodeAlignment));
@ -85,7 +85,6 @@ void InitializeCode(Heap* heap, Handle<Code> code, int object_size,
code->set_handler_table_offset(handler_table_offset);
code->set_code_data_container(*data_container);
code->set_deoptimization_data(*deopt_data);
code->set_stub_key(stub_key);
code->set_source_position_table(*source_position_table);
code->set_constant_pool_offset(desc.instr_size - desc.constant_pool_size);
code->set_builtin_index(builtin_index);
@ -2695,8 +2694,8 @@ MaybeHandle<Code> Factory::TryNewCode(
const CodeDesc& desc, Code::Kind kind, Handle<Object> self_ref,
int32_t builtin_index, MaybeHandle<ByteArray> maybe_source_position_table,
MaybeHandle<DeoptimizationData> maybe_deopt_data, Movability movability,
uint32_t stub_key, bool is_turbofanned, int stack_slots,
int safepoint_table_offset, int handler_table_offset) {
bool is_turbofanned, int stack_slots, int safepoint_table_offset,
int handler_table_offset) {
// Allocate objects needed for code initialization.
Handle<ByteArray> reloc_info = NewByteArray(
desc.reloc_size,
@ -2734,7 +2733,7 @@ MaybeHandle<Code> Factory::TryNewCode(
InitializeCode(heap, code, object_size, desc, kind, self_ref, builtin_index,
source_position_table, deopt_data, reloc_info,
data_container, stub_key, is_turbofanned, stack_slots,
data_container, is_turbofanned, stack_slots,
safepoint_table_offset, handler_table_offset);
// Flush the instruction cache before changing the permissions.
@ -2752,8 +2751,8 @@ Handle<Code> Factory::NewCode(
const CodeDesc& desc, Code::Kind kind, Handle<Object> self_ref,
int32_t builtin_index, MaybeHandle<ByteArray> maybe_source_position_table,
MaybeHandle<DeoptimizationData> maybe_deopt_data, Movability movability,
uint32_t stub_key, bool is_turbofanned, int stack_slots,
int safepoint_table_offset, int handler_table_offset) {
bool is_turbofanned, int stack_slots, int safepoint_table_offset,
int handler_table_offset) {
// Allocate objects needed for code initialization.
Handle<ByteArray> reloc_info = NewByteArray(
desc.reloc_size,
@ -2788,7 +2787,7 @@ Handle<Code> Factory::NewCode(
InitializeCode(heap, code, object_size, desc, kind, self_ref, builtin_index,
source_position_table, deopt_data, reloc_info,
data_container, stub_key, is_turbofanned, stack_slots,
data_container, is_turbofanned, stack_slots,
safepoint_table_offset, handler_table_offset);
// Flush the instruction cache before changing the permissions.

View File

@ -779,7 +779,7 @@ class V8_EXPORT_PRIVATE Factory {
MaybeHandle<ByteArray>(),
MaybeHandle<DeoptimizationData> maybe_deopt_data =
MaybeHandle<DeoptimizationData>(),
Movability movability = kMovable, uint32_t stub_key = 0,
Movability movability = kMovable,
bool is_turbofanned = false, int stack_slots = 0,
int safepoint_table_offset = 0,
int handler_table_offset = 0);
@ -794,9 +794,9 @@ class V8_EXPORT_PRIVATE Factory {
MaybeHandle<ByteArray>(),
MaybeHandle<DeoptimizationData> maybe_deopt_data =
MaybeHandle<DeoptimizationData>(),
Movability movability = kMovable, uint32_t stub_key = 0,
bool is_turbofanned = false, int stack_slots = 0,
int safepoint_table_offset = 0, int handler_table_offset = 0);
Movability movability = kMovable, bool is_turbofanned = false,
int stack_slots = 0, int safepoint_table_offset = 0,
int handler_table_offset = 0);
// Allocates a new code object and initializes it as the trampoline to the
// given off-heap entry point.

View File

@ -103,10 +103,6 @@ MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
void Heap::SetRootCodeStubs(SimpleNumberDictionary value) {
roots_table()[RootIndex::kCodeStubs] = value;
}
void Heap::SetRootMaterializedObjects(FixedArray objects) {
roots_table()[RootIndex::kMaterializedObjects] = objects;
}

View File

@ -14,7 +14,6 @@
#include "src/base/once.h"
#include "src/base/utils/random-number-generator.h"
#include "src/bootstrapper.h"
#include "src/code-stubs.h"
#include "src/compilation-cache.h"
#include "src/conversions.h"
#include "src/debug/debug.h"

View File

@ -669,8 +669,6 @@ class Heap {
MUTABLE_ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
// Sets the stub_cache_ (only used when expanding the dictionary).
V8_INLINE void SetRootCodeStubs(SimpleNumberDictionary value);
V8_INLINE void SetRootMaterializedObjects(FixedArray objects);
V8_INLINE void SetRootScriptList(Object* value);
V8_INLINE void SetRootStringTable(StringTable value);

View File

@ -4,7 +4,6 @@
#include "src/heap/incremental-marking.h"
#include "src/code-stubs.h"
#include "src/compilation-cache.h"
#include "src/conversions.h"
#include "src/heap/concurrent-marking.h"

View File

@ -8,7 +8,6 @@
#include "src/base/utils/random-number-generator.h"
#include "src/cancelable-task.h"
#include "src/code-stubs.h"
#include "src/compilation-cache.h"
#include "src/deoptimizer.h"
#include "src/execution.h"

View File

@ -734,10 +734,6 @@ void ObjectStatsCollectorImpl::CollectGlobalStatistics() {
RecordSimpleVirtualObjectStats(nullptr,
WeakArrayList::cast(heap_->script_list()),
ObjectStats::SCRIPT_LIST_TYPE);
// HashTable.
RecordHashTableVirtualObjectStats(nullptr, heap_->code_stubs(),
ObjectStats::CODE_STUBS_TABLE_TYPE);
}
void ObjectStatsCollectorImpl::RecordObjectStats(HeapObject* obj,

View File

@ -22,7 +22,6 @@
V(BOILERPLATE_PROPERTY_DICTIONARY_TYPE) \
V(BYTECODE_ARRAY_CONSTANT_POOL_TYPE) \
V(BYTECODE_ARRAY_HANDLER_TABLE_TYPE) \
V(CODE_STUBS_TABLE_TYPE) \
V(COW_ARRAY_TYPE) \
V(DEOPTIMIZATION_DATA_TYPE) \
V(DEPENDENT_CODE_TYPE) \

View File

@ -705,10 +705,6 @@ void Heap::CreateInitialObjects() {
set_interpreter_entry_trampoline_for_profiling(roots.undefined_value());
// Create the code_stubs dictionary. The initial size is set to avoid
// expanding the dictionary during bootstrapping.
set_code_stubs(*SimpleNumberDictionary::New(isolate(), 128));
{
HandleScope scope(isolate());
#define SYMBOL_INIT(_, name) \

View File

@ -50,7 +50,6 @@
#include "src/assembler-inl.h"
#include "src/base/bits.h"
#include "src/base/cpu.h"
#include "src/code-stubs.h"
#include "src/conversions-inl.h"
#include "src/deoptimizer.h"
#include "src/disassembler.h"
@ -70,13 +69,6 @@ Immediate Immediate::EmbeddedNumber(double value) {
return result;
}
Immediate Immediate::EmbeddedCode(CodeStub* stub) {
Immediate result(0, RelocInfo::CODE_TARGET);
result.is_heap_object_request_ = true;
result.value_.heap_object_request = HeapObjectRequest(stub);
return result;
}
Immediate Immediate::EmbeddedStringConstant(const StringConstantBase* str) {
Immediate result(0, RelocInfo::EMBEDDED_OBJECT);
result.is_heap_object_request_ = true;
@ -303,10 +295,6 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
object =
isolate->factory()->NewHeapNumber(request.heap_number(), TENURED);
break;
case HeapObjectRequest::kCodeStub:
request.code_stub()->set_isolate(isolate);
object = request.code_stub()->GetCode();
break;
case HeapObjectRequest::kStringConstant: {
const StringConstantBase* str = request.string();
CHECK_NOT_NULL(str);
@ -1643,12 +1631,6 @@ void Assembler::call(Handle<Code> code, RelocInfo::Mode rmode) {
emit(code, rmode);
}
void Assembler::call(CodeStub* stub) {
EnsureSpace ensure_space(this);
EMIT(0xE8);
emit(Immediate::EmbeddedCode(stub));
}
void Assembler::jmp_rel(int offset) {
EnsureSpace ensure_space(this);
const int short_size = 2;

View File

@ -223,7 +223,6 @@ class Immediate {
: Immediate(static_cast<intptr_t>(value.ptr())) {}
static Immediate EmbeddedNumber(double number); // Smi or HeapNumber.
static Immediate EmbeddedCode(CodeStub* code);
static Immediate EmbeddedStringConstant(const StringConstantBase* str);
static Immediate CodeRelativeOffset(Label* label) {
@ -840,7 +839,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
void call(Register reg) { call(Operand(reg)); }
void call(Operand adr);
void call(Handle<Code> code, RelocInfo::Mode rmode);
void call(CodeStub* stub);
void wasm_call(Address address, RelocInfo::Mode rmode);
// Jumps

View File

@ -10,7 +10,6 @@
#include "src/bootstrapper.h"
#include "src/callable.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/external-reference-table.h"
@ -960,20 +959,6 @@ void MacroAssembler::PopStackHandler(Register scratch) {
add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
}
void MacroAssembler::CallStub(CodeStub* stub) {
DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
Call(stub->GetCode(), RelocInfo::CODE_TARGET);
}
void MacroAssembler::TailCallStub(CodeStub* stub) {
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
}
bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame() || !stub->SometimesSetsUpAFrame();
}
void MacroAssembler::CallRuntime(const Runtime::Function* f,
int num_arguments,
SaveFPRegsMode save_doubles) {

View File

@ -144,8 +144,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
call(target, rmode);
}
inline bool AllowThisStubCall(CodeStub* stub);
// Call a runtime routine. This expects {centry} to contain a fitting CEntry
// builtin for the target runtime function and uses an indirect call.
void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
@ -668,12 +666,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// ---------------------------------------------------------------------------
// Runtime calls
// Call a code stub. Generate the code if necessary.
void CallStub(CodeStub* stub);
// Tail call a code stub (jump). Generate the code if necessary.
void TailCallStub(CodeStub* stub);
// Call a runtime routine.
void CallRuntime(const Runtime::Function* f, int num_arguments,
SaveFPRegsMode save_doubles = kDontSaveFPRegs);

View File

@ -6,7 +6,6 @@
#include "src/ast/ast.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/ic/handler-configuration.h"
#include "src/ic/ic.h"
@ -2305,15 +2304,6 @@ void AccessorAssembler::TryProbeStubCacheTable(
Node* name, Node* map, Label* if_handler,
TVariable<MaybeObject>* var_handler, Label* if_miss) {
StubCache::Table table = static_cast<StubCache::Table>(table_id);
#ifdef DEBUG
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
Goto(if_miss);
return;
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
Goto(if_miss);
return;
}
#endif
// The {table_offset} holds the entry offset times four (due to masking
// and shifting optimizations).
const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift;

View File

@ -5,7 +5,7 @@
#ifndef V8_IC_CALL_OPTIMIZATION_H_
#define V8_IC_CALL_OPTIMIZATION_H_
#include "src/code-stubs.h"
#include "src/api-arguments.h"
#include "src/macro-assembler.h"
#include "src/objects.h"

View File

@ -7,7 +7,6 @@
#include "src/ast/ast-value-factory.h"
#include "src/ast/ast.h"
#include "src/builtins/builtins-constructor.h"
#include "src/code-stubs.h"
#include "src/objects-inl.h"
namespace v8 {

View File

@ -8,7 +8,6 @@
#include "src/ast/ast-source-ranges.h"
#include "src/ast/scopes.h"
#include "src/builtins/builtins-constructor.h"
#include "src/code-stubs.h"
#include "src/compiler.h"
#include "src/interpreter/bytecode-flags.h"
#include "src/interpreter/bytecode-jump-table.h"

View File

@ -3219,7 +3219,7 @@ Handle<Code> GenerateBytecodeHandler(Isolate* isolate, Bytecode bytecode,
FLAG_untrusted_code_mitigations
? PoisoningMitigationLevel::kPoisonCriticalOnly
: PoisoningMitigationLevel::kDontPoison,
0, builtin_index);
builtin_index);
switch (bytecode) {
#define CALL_GENERATOR(Name, ...) \

View File

@ -25,7 +25,6 @@
#include "src/builtins/builtins-promise-gen.h"
#include "src/builtins/constants-table-builder.h"
#include "src/cancelable-task.h"
#include "src/code-stubs.h"
#include "src/compilation-cache.h"
#include "src/compilation-statistics.h"
#include "src/compiler-dispatcher/compiler-dispatcher.h"
@ -534,13 +533,7 @@ class FrameArrayBuilder {
Handle<AbstractCode> abstract_code = summary.abstract_code();
const int offset = summary.code_offset();
bool is_constructor = summary.is_constructor();
// Help CallSite::IsConstructor correctly detect hand-written
// construct stubs.
if (abstract_code->IsCode() &&
Code::cast(*abstract_code)->is_construct_stub()) {
is_constructor = true;
}
const bool is_constructor = summary.is_constructor();
int flags = 0;
Handle<JSFunction> function = summary.function();

View File

@ -12,7 +12,6 @@
#include "src/bailout-reason.h"
#include "src/base/platform/platform.h"
#include "src/bootstrapper.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/deoptimizer.h"
#include "src/global-handles.h"
@ -2047,9 +2046,7 @@ void ExistingCodeLogger::LogCodeObject(Object* object) {
case AbstractCode::BYTECODE_HANDLER:
return; // We log it later by walking the dispatch table.
case AbstractCode::STUB:
description =
CodeStub::MajorName(CodeStub::GetMajorKey(abstract_code->GetCode()));
if (description == nullptr) description = "A stub from before profiling";
description = "STUB code";
tag = CodeEventListener::STUB_TAG;
break;
case AbstractCode::REGEXP:

View File

@ -175,23 +175,6 @@ class AllowExternalCallThatCantCauseGC: public FrameScope {
: FrameScope(masm, StackFrame::NONE) { }
};
class NoCurrentFrameScope {
public:
explicit NoCurrentFrameScope(MacroAssembler* masm)
: masm_(masm), saved_(masm->has_frame()) {
masm->set_has_frame(false);
}
~NoCurrentFrameScope() {
masm_->set_has_frame(saved_);
}
private:
MacroAssembler* masm_;
bool saved_;
};
// Prevent the use of the RootArray during the lifetime of this
// scope object.
class NoRootArrayScope {

View File

@ -38,7 +38,6 @@
#include "src/base/bits.h"
#include "src/base/cpu.h"
#include "src/code-stubs.h"
#include "src/deoptimizer.h"
#include "src/mips/assembler-mips-inl.h"
#include "src/string-constants.h"
@ -231,13 +230,6 @@ Operand Operand::EmbeddedNumber(double value) {
return result;
}
Operand Operand::EmbeddedCode(CodeStub* stub) {
Operand result(0, RelocInfo::CODE_TARGET);
result.is_heap_object_request_ = true;
result.value_.heap_object_request = HeapObjectRequest(stub);
return result;
}
Operand Operand::EmbeddedStringConstant(const StringConstantBase* str) {
Operand result(0, RelocInfo::EMBEDDED_OBJECT);
result.is_heap_object_request_ = true;
@ -264,10 +256,6 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
object =
isolate->factory()->NewHeapNumber(request.heap_number(), TENURED);
break;
case HeapObjectRequest::kCodeStub:
request.code_stub()->set_isolate(isolate);
object = request.code_stub()->GetCode();
break;
case HeapObjectRequest::kStringConstant:
const StringConstantBase* str = request.string();
CHECK_NOT_NULL(str);

View File

@ -409,7 +409,6 @@ class Operand {
}
static Operand EmbeddedNumber(double number); // Smi or HeapNumber.
static Operand EmbeddedCode(CodeStub* stub);
static Operand EmbeddedStringConstant(const StringConstantBase* str);
// Register.

View File

@ -12,7 +12,6 @@
#include "src/bootstrapper.h"
#include "src/callable.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/external-reference-table.h"
@ -4504,27 +4503,6 @@ void MacroAssembler::GetObjectType(Register object,
// -----------------------------------------------------------------------------
// Runtime calls.
void MacroAssembler::CallStub(CodeStub* stub,
Condition cond,
Register r1,
const Operand& r2,
BranchDelaySlot bd) {
DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd);
}
void MacroAssembler::TailCallStub(CodeStub* stub,
Condition cond,
Register r1,
const Operand& r2,
BranchDelaySlot bd) {
Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd);
}
bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame() || !stub->SometimesSetsUpAFrame();
}
void TurboAssembler::AddOverflow(Register dst, Register left,
const Operand& right, Register overflow) {
BlockTrampolinePoolScope block_trampoline_pool(this);

View File

@ -163,8 +163,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// Print a message to stdout and abort execution.
void Abort(AbortReason msg);
inline bool AllowThisStubCall(CodeStub* stub);
// Arguments macros.
#define COND_TYPED_ARGS Condition cond, Register r1, const Operand& r2
#define COND_ARGS cond, r1, r2
@ -543,15 +541,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// builtin for the target runtime function and uses an indirect call.
void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
// Performs a truncating conversion of a floating point number as used by
// the JS bitwise operations. See ECMA-262 9.5: ToInt32. Goes to 'done' if it
// succeeds, otherwise falls through if result is saturated. On return
// 'result' either holds answer, or is clobbered on fall through.
//
// Only public for the test code in test-code-stubs-arm.cc.
void TryInlineTruncateDoubleToI(Register result, DoubleRegister input,
Label* done);
// Performs a truncating conversion of a floating point number as used by
// the JS bitwise operations. See ECMA-262 9.5: ToInt32.
// Exits with 'result' holding the answer.
@ -863,6 +852,13 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
private:
bool has_double_zero_reg_set_ = false;
// Performs a truncating conversion of a floating point number as used by
// the JS bitwise operations. See ECMA-262 9.5: ToInt32. Goes to 'done' if it
// succeeds, otherwise falls through if result is saturated. On return
// 'result' either holds answer, or is clobbered on fall through.
void TryInlineTruncateDoubleToI(Register result, DoubleRegister input,
Label* done);
void CallCFunctionHelper(Register function_base, int16_t function_offset,
int num_reg_arguments, int num_double_arguments);
@ -1067,18 +1063,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// -------------------------------------------------------------------------
// Runtime calls.
#define COND_ARGS Condition cond = al, Register rs = zero_reg, \
const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
// Call a code stub.
void CallStub(CodeStub* stub,
COND_ARGS);
// Tail call a code stub (jump).
void TailCallStub(CodeStub* stub, COND_ARGS);
#undef COND_ARGS
// Call a runtime routine.
void CallRuntime(const Runtime::Function* f, int num_arguments,
SaveFPRegsMode save_doubles = kDontSaveFPRegs);

View File

@ -37,7 +37,6 @@
#if V8_TARGET_ARCH_MIPS64
#include "src/base/cpu.h"
#include "src/code-stubs.h"
#include "src/deoptimizer.h"
#include "src/mips64/assembler-mips64-inl.h"
#include "src/string-constants.h"
@ -208,13 +207,6 @@ Operand Operand::EmbeddedNumber(double value) {
return result;
}
Operand Operand::EmbeddedCode(CodeStub* stub) {
Operand result(0, RelocInfo::CODE_TARGET);
result.is_heap_object_request_ = true;
result.value_.heap_object_request = HeapObjectRequest(stub);
return result;
}
Operand Operand::EmbeddedStringConstant(const StringConstantBase* str) {
Operand result(0, RelocInfo::EMBEDDED_OBJECT);
result.is_heap_object_request_ = true;
@ -242,10 +234,6 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
object =
isolate->factory()->NewHeapNumber(request.heap_number(), TENURED);
break;
case HeapObjectRequest::kCodeStub:
request.code_stub()->set_isolate(isolate);
object = request.code_stub()->GetCode();
break;
case HeapObjectRequest::kStringConstant:
const StringConstantBase* str = request.string();
CHECK_NOT_NULL(str);

View File

@ -414,7 +414,6 @@ class Operand {
}
static Operand EmbeddedNumber(double number); // Smi or HeapNumber.
static Operand EmbeddedCode(CodeStub* stub);
static Operand EmbeddedStringConstant(const StringConstantBase* str);
// Register.

View File

@ -12,7 +12,6 @@
#include "src/bootstrapper.h"
#include "src/callable.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/external-reference-table.h"
@ -4855,27 +4854,6 @@ void MacroAssembler::GetObjectType(Register object,
// -----------------------------------------------------------------------------
// Runtime calls.
void MacroAssembler::CallStub(CodeStub* stub,
Condition cond,
Register r1,
const Operand& r2,
BranchDelaySlot bd) {
DCHECK(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd);
}
void MacroAssembler::TailCallStub(CodeStub* stub,
Condition cond,
Register r1,
const Operand& r2,
BranchDelaySlot bd) {
Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd);
}
bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame() || !stub->SometimesSetsUpAFrame();
}
void TurboAssembler::DaddOverflow(Register dst, Register left,
const Operand& right, Register overflow) {
BlockTrampolinePoolScope block_trampoline_pool(this);

View File

@ -185,8 +185,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// Print a message to stdout and abort execution.
void Abort(AbortReason msg);
inline bool AllowThisStubCall(CodeStub* stub);
// Arguments macros.
#define COND_TYPED_ARGS Condition cond, Register r1, const Operand& r2
#define COND_ARGS cond, r1, r2
@ -571,15 +569,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// builtin for the target runtime function and uses an indirect call.
void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
// Performs a truncating conversion of a floating point number as used by
// the JS bitwise operations. See ECMA-262 9.5: ToInt32. Goes to 'done' if it
// succeeds, otherwise falls through if result is saturated. On return
// 'result' either holds answer, or is clobbered on fall through.
//
// Only public for the test code in test-code-stubs-arm.cc.
void TryInlineTruncateDoubleToI(Register result, DoubleRegister input,
Label* done);
// Performs a truncating conversion of a floating point number as used by
// the JS bitwise operations. See ECMA-262 9.5: ToInt32.
// Exits with 'result' holding the answer.
@ -874,6 +863,13 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
private:
bool has_double_zero_reg_set_ = false;
// Performs a truncating conversion of a floating point number as used by
// the JS bitwise operations. See ECMA-262 9.5: ToInt32. Goes to 'done' if it
// succeeds, otherwise falls through if result is saturated. On return
// 'result' either holds answer, or is clobbered on fall through.
void TryInlineTruncateDoubleToI(Register result, DoubleRegister input,
Label* done);
void CompareF(SecondaryField sizeField, FPUCondition cc, FPURegister cmp1,
FPURegister cmp2);
@ -1118,17 +1114,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// -------------------------------------------------------------------------
// Runtime calls.
#define COND_ARGS Condition cond = al, Register rs = zero_reg, \
const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
// Call a code stub.
void CallStub(CodeStub* stub, COND_ARGS);
// Tail call a code stub (jump).
void TailCallStub(CodeStub* stub, COND_ARGS);
#undef COND_ARGS
// Call a runtime routine.
void CallRuntime(const Runtime::Function* f, int num_arguments,
SaveFPRegsMode save_doubles = kDontSaveFPRegs);

View File

@ -2311,38 +2311,6 @@ bool TransitionsAccessor::IsConsistentWithBackPointers() {
return true;
}
// Estimates if there is a path from the object to a context.
// This function is not precise, and can return false even if
// there is a path to a context.
bool CanLeak(Object* obj, Isolate* isolate) {
if (!obj->IsHeapObject()) return false;
if (obj->IsCell()) {
return CanLeak(Cell::cast(obj)->value(), isolate);
}
if (obj->IsPropertyCell()) {
return CanLeak(PropertyCell::cast(obj)->value(), isolate);
}
if (obj->IsContext()) return true;
if (obj->IsMap()) {
Map map = Map::cast(obj);
for (RootIndex root_index = RootIndex::kFirstStrongOrReadOnlyRoot;
root_index <= RootIndex::kLastStrongOrReadOnlyRoot; ++root_index) {
if (map == isolate->root(root_index)) return false;
}
return true;
}
return CanLeak(HeapObject::cast(obj)->map(), isolate);
}
void Code::VerifyEmbeddedObjects(Isolate* isolate, VerifyMode mode) {
if (kind() == OPTIMIZED_FUNCTION) return;
int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
for (RelocIterator it(*this, mask); !it.done(); it.next()) {
Object* target = it.rinfo()->target_object();
DCHECK(!CanLeak(target, isolate));
}
}
#endif // DEBUG
} // namespace internal

View File

@ -24,7 +24,6 @@
#include "src/base/utils/random-number-generator.h"
#include "src/bootstrapper.h"
#include "src/builtins/builtins.h"
#include "src/code-stubs.h"
#include "src/compiler.h"
#include "src/counters-inl.h"
#include "src/counters.h"
@ -3656,9 +3655,7 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
case CODE_TYPE: {
Code code = Code::cast(this);
os << "<Code " << Code::Kind2String(code->kind());
if (code->is_stub()) {
os << " " << CodeStub::MajorName(CodeStub::GetMajorKey(code));
} else if (code->is_builtin()) {
if (code->is_builtin()) {
os << " " << Builtins::name(code->builtin_index());
}
os << ">";
@ -15084,9 +15081,7 @@ void DeoptimizationData::DeoptimizationDataPrint(std::ostream& os) { // NOLINT
}
const char* Code::GetName(Isolate* isolate) const {
if (is_stub()) {
return CodeStub::MajorName(CodeStub::GetMajorKey(*this));
} else if (kind() == BYTECODE_HANDLER) {
if (kind() == BYTECODE_HANDLER) {
return isolate->interpreter()->LookupNameOfBytecodeHandler(*this);
} else {
// There are some handlers and ICs that we can also find names for with
@ -15116,11 +15111,6 @@ inline void DisassembleCodeRange(Isolate* isolate, std::ostream& os, Code code,
void Code::Disassemble(const char* name, std::ostream& os, Address current_pc) {
Isolate* isolate = GetIsolate();
os << "kind = " << Kind2String(kind()) << "\n";
if (is_stub()) {
const char* n = CodeStub::MajorName(CodeStub::GetMajorKey(*this));
os << "major_key = " << (n == nullptr ? "null" : n) << "\n";
os << "minor_key = " << CodeStub::MinorKeyFromKey(this->stub_key()) << "\n";
}
if (name == nullptr) {
name = GetName(isolate);
}

View File

@ -236,16 +236,6 @@ ByteArray Code::SourcePositionTable() const {
->source_position_table();
}
uint32_t Code::stub_key() const {
DCHECK(is_stub());
return READ_UINT32_FIELD(this, kStubKeyOffset);
}
void Code::set_stub_key(uint32_t key) {
DCHECK(is_stub() || key == 0); // Allow zero initialization.
WRITE_UINT32_FIELD(this, kStubKeyOffset, key);
}
Object* Code::next_code_link() const {
return code_data_container()->next_code_link();
}
@ -433,19 +423,6 @@ inline void Code::set_can_have_weak_objects(bool value) {
code_data_container()->set_kind_specific_flags(updated);
}
inline bool Code::is_construct_stub() const {
DCHECK(kind() == BUILTIN);
int32_t flags = code_data_container()->kind_specific_flags();
return IsConstructStubField::decode(flags);
}
inline void Code::set_is_construct_stub(bool value) {
DCHECK(kind() == BUILTIN);
int32_t previous = code_data_container()->kind_specific_flags();
int32_t updated = IsConstructStubField::update(previous, value);
code_data_container()->set_kind_specific_flags(updated);
}
inline bool Code::is_promise_rejection() const {
DCHECK(kind() == BUILTIN);
int32_t flags = code_data_container()->kind_specific_flags();

View File

@ -91,10 +91,6 @@ class Code : public HeapObjectPtr {
// [code_data_container]: A container indirection for all mutable fields.
DECL_ACCESSORS2(code_data_container, CodeDataContainer)
// [stub_key]: The major/minor key of a code stub.
inline uint32_t stub_key() const;
inline void set_stub_key(uint32_t key);
// [next_code_link]: Link for lists of optimized or deoptimized code.
// Note that this field is stored in the {CodeDataContainer} to be mutable.
inline Object* next_code_link() const;
@ -136,12 +132,6 @@ class Code : public HeapObjectPtr {
inline bool can_have_weak_objects() const;
inline void set_can_have_weak_objects(bool value);
// [is_construct_stub]: For kind BUILTIN, tells whether the code object
// represents a hand-written construct stub
// (e.g., NumberConstructor_ConstructStub).
inline bool is_construct_stub() const;
inline void set_is_construct_stub(bool value);
// [builtin_index]: For builtins, tells which builtin index the code object
// has. The builtin index is a non-negative integer for builtins, and -1
// otherwise.
@ -352,12 +342,6 @@ class Code : public HeapObjectPtr {
inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
#ifdef DEBUG
enum VerifyMode { kNoContextSpecificPointers, kNoContextRetainingPointers };
void VerifyEmbeddedObjects(Isolate* isolate,
VerifyMode mode = kNoContextRetainingPointers);
#endif // DEBUG
bool IsIsolateIndependent(Isolate* isolate);
inline bool CanContainWeakObjects();
@ -385,7 +369,6 @@ class Code : public HeapObjectPtr {
V(kFlagsOffset, kIntSize) \
V(kSafepointTableOffsetOffset, kIntSize) \
V(kHandlerTableOffsetOffset, kIntSize) \
V(kStubKeyOffset, kIntSize) \
V(kConstantPoolOffset, FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
V(kBuiltinIndexOffset, kIntSize) \
/* Add padding to align the instruction start following right after */ \
@ -419,7 +402,6 @@ class Code : public HeapObjectPtr {
V(EmbeddedObjectsClearedField, bool, 1, _) \
V(DeoptAlreadyCountedField, bool, 1, _) \
V(CanHaveWeakObjectsField, bool, 1, _) \
V(IsConstructStubField, bool, 1, _) \
V(IsPromiseRejectionField, bool, 1, _) \
V(IsExceptionCaughtField, bool, 1, _)
DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)

View File

@ -73,8 +73,6 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
Handle<JSFunction> closure() const { return closure_; }
Handle<Code> code() const { return code_; }
Code::Kind code_kind() const { return code_kind_; }
uint32_t stub_key() const { return stub_key_; }
void set_stub_key(uint32_t stub_key) { stub_key_ = stub_key; }
int32_t builtin_index() const { return builtin_index_; }
void set_builtin_index(int32_t index) { builtin_index_ = index; }
BailoutId osr_offset() const { return osr_offset_; }
@ -188,7 +186,7 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
// Accessors for the different compilation modes.
bool IsOptimizing() const { return code_kind() == Code::OPTIMIZED_FUNCTION; }
bool IsWasm() const { return code_kind() == Code::WASM_FUNCTION; }
bool IsStub() const {
bool IsNotOptimizedFunctionOrWasmFunction() const {
return code_kind() != Code::OPTIMIZED_FUNCTION &&
code_kind() != Code::WASM_FUNCTION;
}
@ -280,7 +278,6 @@ class V8_EXPORT_PRIVATE OptimizedCompilationInfo final {
PoisoningMitigationLevel::kDontPoison;
Code::Kind code_kind_;
uint32_t stub_key_ = 0;
int32_t builtin_index_ = -1;
// We retain a reference the bytecode array specifically to ensure it doesn't

View File

@ -7,7 +7,6 @@
#include <utility>
#include "src/api-inl.h"
#include "src/code-stubs.h"
#include "src/conversions.h"
#include "src/debug/debug.h"
#include "src/global-handles.h"
@ -830,7 +829,6 @@ void V8HeapExplorer::ExtractJSObjectReferences(HeapEntry* entry,
TagObject(js_fun->context(), "(context)");
SetInternalReference(entry, "context", js_fun->context(),
JSFunction::kContextOffset);
TagCodeObject(js_fun->code());
SetInternalReference(entry, "code", js_fun->code(),
JSFunction::kCodeOffset);
} else if (obj->IsJSGlobalObject()) {
@ -1114,16 +1112,7 @@ void V8HeapExplorer::TagBuiltinCodeObject(Code code, const char* name) {
TagObject(code, names_->GetFormatted("(%s builtin)", name));
}
void V8HeapExplorer::TagCodeObject(Code code) {
if (code->kind() == Code::STUB) {
TagObject(code, names_->GetFormatted(
"(%s code)",
CodeStub::MajorName(CodeStub::GetMajorKey(code))));
}
}
void V8HeapExplorer::ExtractCodeReferences(HeapEntry* entry, Code code) {
TagCodeObject(code);
TagObject(code->relocation_info(), "(code relocation info)");
SetInternalReference(entry, "relocation_info", code->relocation_info(),
Code::kRelocationInfoOffset);

View File

@ -322,7 +322,6 @@ class V8HeapExplorer : public HeapEntriesAllocator {
int EstimateObjectsCount();
bool IterateAndExtractReferences(HeapSnapshotGenerator* generator);
void TagGlobalObjects();
void TagCodeObject(Code code);
void TagBuiltinCodeObject(Code code, const char* name);
HeapEntry* AddEntry(Address address,
HeapEntry::Type type,

View File

@ -7,7 +7,6 @@
#include "src/regexp/arm/regexp-macro-assembler-arm.h"
#include "src/assembler-inl.h"
#include "src/code-stubs.h"
#include "src/heap/factory.h"
#include "src/log.h"
#include "src/macro-assembler.h"

View File

@ -7,7 +7,6 @@
#include "src/regexp/arm64/regexp-macro-assembler-arm64.h"
#include "src/arm64/macro-assembler-arm64-inl.h"
#include "src/code-stubs.h"
#include "src/log.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"

View File

@ -7,7 +7,6 @@
#include "src/regexp/mips/regexp-macro-assembler-mips.h"
#include "src/assembler-inl.h"
#include "src/code-stubs.h"
#include "src/log.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"

View File

@ -7,7 +7,6 @@
#include "src/regexp/mips64/regexp-macro-assembler-mips64.h"
#include "src/assembler-inl.h"
#include "src/code-stubs.h"
#include "src/log.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"

View File

@ -6,7 +6,6 @@
#include "src/assembler-inl.h"
#include "src/code-reference.h"
#include "src/code-stubs.h"
#include "src/deoptimize-reason.h"
#include "src/deoptimizer.h"
#include "src/heap/heap-write-barrier-inl.h"
@ -464,8 +463,6 @@ void RelocInfo::Print(Isolate* isolate, std::ostream& os) { // NOLINT
os << " (" << Code::Kind2String(code->kind());
if (Builtins::IsBuiltin(code)) {
os << " " << Builtins::name(code->builtin_index());
} else if (code->kind() == Code::STUB) {
os << " " << CodeStub::MajorName(CodeStub::GetMajorKey(code));
}
os << ") (" << reinterpret_cast<const void*>(target_address()) << ")";
} else if (IsRuntimeEntry(rmode_) && isolate->deoptimizer_data() != nullptr) {

View File

@ -269,7 +269,6 @@ class RootVisitor;
V(NameDictionary, api_symbol_table, ApiSymbolTable) \
V(NameDictionary, api_private_symbol_table, ApiPrivateSymbolTable) \
V(WeakArrayList, script_list, ScriptList) \
V(SimpleNumberDictionary, code_stubs, CodeStubs) \
V(FixedArray, materialized_objects, MaterializedObjects) \
V(WeakArrayList, detached_contexts, DetachedContexts) \
V(WeakArrayList, retaining_path_targets, RetainingPathTargets) \

View File

@ -7,7 +7,6 @@
#include "src/assembler.h"
#include "src/base/platform/platform.h"
#include "src/bootstrapper.h"
#include "src/code-stubs.h"
#include "src/compilation-cache.h"
#include "src/compiler.h"
#include "src/execution.h"

View File

@ -3,7 +3,6 @@
// found in the LICENSE file.
#include "src/arguments-inl.h"
#include "src/code-stubs.h"
#include "src/conversions-inl.h"
#include "src/counters.h"
#include "src/debug/debug.h"

View File

@ -4,7 +4,6 @@
#include "src/snapshot/code-serializer.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/log.h"
@ -142,11 +141,6 @@ void CodeSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
case Code::BYTECODE_HANDLER: // No direct references to handlers.
break; // hit UNREACHABLE below.
case Code::STUB:
if (code_object->builtin_index() == -1) {
return SerializeCodeStub(code_object, how_to_code, where_to_point);
} else {
return SerializeCodeObject(code_object, how_to_code, where_to_point);
}
case Code::BUILTIN:
default:
return SerializeCodeObject(code_object, how_to_code, where_to_point);
@ -242,25 +236,6 @@ void CodeSerializer::SerializeGeneric(HeapObject* heap_object,
serializer.Serialize();
}
void CodeSerializer::SerializeCodeStub(Code code_stub, HowToCode how_to_code,
WhereToPoint where_to_point) {
// We only arrive here if we have not encountered this code stub before.
DCHECK(!reference_map()->LookupReference(code_stub).is_valid());
uint32_t stub_key = code_stub->stub_key();
DCHECK(CodeStub::MajorKeyFromKey(stub_key) != CodeStub::NoCache);
DCHECK(!CodeStub::GetCode(isolate(), stub_key).is_null());
stub_keys_.push_back(stub_key);
SerializerReference reference =
reference_map()->AddAttachedReference(code_stub);
if (FLAG_trace_serializer) {
PrintF(" Encoding code stub %s as attached reference %d\n",
CodeStub::MajorName(CodeStub::MajorKeyFromKey(stub_key)),
reference.attached_reference_index());
}
PutAttachedReference(reference, how_to_code, where_to_point);
}
MaybeHandle<SharedFunctionInfo> CodeSerializer::Deserialize(
Isolate* isolate, ScriptData* cached_data, Handle<String> source,
ScriptOriginOptions origin_options) {
@ -331,13 +306,12 @@ MaybeHandle<SharedFunctionInfo> CodeSerializer::Deserialize(
SerializedCodeData::SerializedCodeData(const std::vector<byte>* payload,
const CodeSerializer* cs) {
DisallowHeapAllocation no_gc;
const std::vector<uint32_t>* stub_keys = cs->stub_keys();
std::vector<Reservation> reservations = cs->EncodeReservations();
// Calculate sizes.
uint32_t reservation_size =
static_cast<uint32_t>(reservations.size()) * kUInt32Size;
uint32_t num_stub_keys = static_cast<uint32_t>(stub_keys->size());
uint32_t num_stub_keys = 0; // TODO(jgruber): Remove.
uint32_t stub_keys_size = num_stub_keys * kUInt32Size;
uint32_t payload_offset = kHeaderSize + reservation_size + stub_keys_size;
uint32_t padded_payload_offset = POINTER_SIZE_ALIGN(payload_offset);
@ -371,10 +345,6 @@ SerializedCodeData::SerializedCodeData(const std::vector<byte>* payload,
reinterpret_cast<const byte*>(reservations.data()),
reservation_size);
// Copy code stub keys.
CopyBytes(data_ + kHeaderSize + reservation_size,
reinterpret_cast<const byte*>(stub_keys->data()), stub_keys_size);
// Copy serialized data.
CopyBytes(data_ + padded_payload_offset, payload->data(),
static_cast<size_t>(payload->size()));
@ -455,6 +425,7 @@ Vector<const byte> SerializedCodeData::Payload() const {
}
Vector<const uint32_t> SerializedCodeData::CodeStubKeys() const {
// TODO(jgruber): Remove.
int reservations_size = GetHeaderValue(kNumReservationsOffset) * kInt32Size;
const byte* start = data_ + kHeaderSize + reservations_size;
return Vector<const uint32_t>(reinterpret_cast<const uint32_t*>(start),

View File

@ -52,8 +52,6 @@ class CodeSerializer : public Serializer {
Isolate* isolate, ScriptData* cached_data, Handle<String> source,
ScriptOriginOptions origin_options);
const std::vector<uint32_t>* stub_keys() const { return &stub_keys_; }
uint32_t source_hash() const { return source_hash_; }
protected:
@ -73,15 +71,11 @@ class CodeSerializer : public Serializer {
void SerializeObject(HeapObject* o, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) override;
void SerializeCodeStub(Code code_stub, HowToCode how_to_code,
WhereToPoint where_to_point);
bool SerializeReadOnlyObject(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip);
DISALLOW_HEAP_ALLOCATION(no_gc_);
uint32_t source_hash_;
std::vector<uint32_t> stub_keys_;
DISALLOW_COPY_AND_ASSIGN(CodeSerializer);
};

View File

@ -5,7 +5,6 @@
#include "src/snapshot/object-deserializer.h"
#include "src/assembler-inl.h"
#include "src/code-stubs.h"
#include "src/isolate.h"
#include "src/objects.h"
#include "src/objects/slots.h"
@ -24,12 +23,6 @@ ObjectDeserializer::DeserializeSharedFunctionInfo(
d.AddAttachedObject(source);
Vector<const uint32_t> code_stub_keys = data->CodeStubKeys();
for (int i = 0; i < code_stub_keys.length(); i++) {
d.AddAttachedObject(
CodeStub::GetCode(isolate, code_stub_keys[i]).ToHandleChecked());
}
Handle<HeapObject> result;
return d.Deserialize(isolate).ToHandle(&result)
? Handle<SharedFunctionInfo>::cast(result)

View File

@ -6,7 +6,6 @@
#include "src/api.h"
#include "src/assembler-inl.h"
#include "src/code-stubs.h"
#include "src/heap/heap-inl.h"
#include "src/snapshot/read-only-deserializer.h"
#include "src/snapshot/snapshot.h"

View File

@ -18,7 +18,6 @@
#include "src/assembler-inl.h"
#include "src/base/bits.h"
#include "src/base/cpu.h"
#include "src/code-stubs.h"
#include "src/deoptimizer.h"
#include "src/macro-assembler.h"
#include "src/string-constants.h"
@ -335,11 +334,6 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) {
Memory<Handle<Object>>(pc) = object;
break;
}
case HeapObjectRequest::kCodeStub: {
request.code_stub()->set_isolate(isolate);
UpdateCodeTarget(Memory<int32_t>(pc), request.code_stub()->GetCode());
break;
}
case HeapObjectRequest::kStringConstant: {
const StringConstantBase* str = request.string();
CHECK_NOT_NULL(str);
@ -1111,16 +1105,6 @@ void Assembler::call(Address entry, RelocInfo::Mode rmode) {
emit_runtime_entry(entry, rmode);
}
void Assembler::call(CodeStub* stub) {
EnsureSpace ensure_space(this);
// 1110 1000 #32-bit disp.
emit(0xE8);
RequestHeapObject(HeapObjectRequest(stub));
RecordRelocInfo(RelocInfo::CODE_TARGET);
int code_target_index = AddCodeTarget(Handle<Code>());
emitl(code_target_index);
}
void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) {
DCHECK(RelocInfo::IsCodeTarget(rmode));
EnsureSpace ensure_space(this);

View File

@ -911,7 +911,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
void call(Address entry, RelocInfo::Mode rmode);
void near_call(Address entry, RelocInfo::Mode rmode);
void near_jmp(Address entry, RelocInfo::Mode rmode);
void call(CodeStub* stub);
void call(Handle<Code> target,
RelocInfo::Mode rmode = RelocInfo::CODE_TARGET);

View File

@ -10,7 +10,6 @@
#include "src/bootstrapper.h"
#include "src/callable.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/external-reference-table.h"
@ -80,6 +79,7 @@ MacroAssembler::MacroAssembler(Isolate* isolate,
// marker in order to disambiguate between self-references during nested
// code generation (e.g.: codegen of the current object triggers stub
// compilation through CodeStub::GetCode()).
// TODO(jgruber): We can likely remove this now that code stubs are gone.
code_object_ = Handle<HeapObject>::New(
*isolate->factory()->NewSelfReferenceMarker(), isolate);
}
@ -553,20 +553,6 @@ void TurboAssembler::Abort(AbortReason reason) {
int3();
}
void MacroAssembler::CallStub(CodeStub* stub) {
DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
Call(stub->GetCode(), RelocInfo::CODE_TARGET);
}
void MacroAssembler::TailCallStub(CodeStub* stub) {
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
}
bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
return has_frame() || !stub->SometimesSetsUpAFrame();
}
void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
Register centry) {
const Runtime::Function* f = Runtime::FunctionForId(fid);

View File

@ -457,8 +457,6 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
Register caller_args_count_reg, Register scratch0,
Register scratch1);
inline bool AllowThisStubCall(CodeStub* stub);
// Call a runtime routine. This expects {centry} to contain a fitting CEntry
// builtin for the target runtime function and uses an indirect call.
void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
@ -833,14 +831,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// ---------------------------------------------------------------------------
// Runtime calls
// Call a code stub.
// The code object is generated immediately, in contrast to
// TurboAssembler::CallStubDelayed.
void CallStub(CodeStub* stub);
// Tail call a code stub (jump).
void TailCallStub(CodeStub* stub);
// Call a runtime routine.
void CallRuntime(const Runtime::Function* f,
int num_arguments,

View File

@ -97,7 +97,6 @@ v8_source_set("cctest_sources") {
"compiler/test-run-native-calls.cc",
"compiler/test-run-retpoline.cc",
"compiler/test-run-stackcheck.cc",
"compiler/test-run-stubs.cc",
"compiler/test-run-tail-calls.cc",
"compiler/test-run-unwinding-info.cc",
"compiler/test-run-variables.cc",
@ -284,9 +283,6 @@ v8_source_set("cctest_sources") {
"assembler-helper-arm.cc",
"assembler-helper-arm.h",
"test-assembler-arm.cc",
"test-code-stubs-arm.cc",
"test-code-stubs.cc",
"test-code-stubs.h",
"test-disasm-arm.cc",
"test-macro-assembler-arm.cc",
"test-poison-disasm-arm.cc",
@ -295,9 +291,6 @@ v8_source_set("cctest_sources") {
} else if (v8_current_cpu == "arm64") {
sources += [ ### gcmole(arch:arm64) ###
"test-assembler-arm64.cc",
"test-code-stubs-arm64.cc",
"test-code-stubs.cc",
"test-code-stubs.h",
"test-disasm-arm64.cc",
"test-fuzz-arm64.cc",
"test-javascript-arm64.cc",
@ -309,54 +302,36 @@ v8_source_set("cctest_sources") {
} else if (v8_current_cpu == "x86") {
sources += [ ### gcmole(arch:ia32) ###
"test-assembler-ia32.cc",
"test-code-stubs-ia32.cc",
"test-code-stubs.cc",
"test-code-stubs.h",
"test-disasm-ia32.cc",
"test-log-stack-tracer.cc",
]
} else if (v8_current_cpu == "mips") {
sources += [ ### gcmole(arch:mips) ###
"test-assembler-mips.cc",
"test-code-stubs-mips.cc",
"test-code-stubs.cc",
"test-code-stubs.h",
"test-disasm-mips.cc",
"test-macro-assembler-mips.cc",
]
} else if (v8_current_cpu == "mipsel") {
sources += [ ### gcmole(arch:mipsel) ###
"test-assembler-mips.cc",
"test-code-stubs-mips.cc",
"test-code-stubs.cc",
"test-code-stubs.h",
"test-disasm-mips.cc",
"test-macro-assembler-mips.cc",
]
} else if (v8_current_cpu == "mips64") {
sources += [ ### gcmole(arch:mips64) ###
"test-assembler-mips64.cc",
"test-code-stubs-mips64.cc",
"test-code-stubs.cc",
"test-code-stubs.h",
"test-disasm-mips64.cc",
"test-macro-assembler-mips64.cc",
]
} else if (v8_current_cpu == "mips64el") {
sources += [ ### gcmole(arch:mips64el) ###
"test-assembler-mips64.cc",
"test-code-stubs-mips64.cc",
"test-code-stubs.cc",
"test-code-stubs.h",
"test-disasm-mips64.cc",
"test-macro-assembler-mips64.cc",
]
} else if (v8_current_cpu == "x64") {
sources += [ ### gcmole(arch:x64) ###
"test-assembler-x64.cc",
"test-code-stubs-x64.cc",
"test-code-stubs.cc",
"test-code-stubs.h",
"test-disasm-x64.cc",
"test-log-stack-tracer.cc",
"test-macro-assembler-x64.cc",

View File

@ -106,13 +106,6 @@
'test-func-name-inference/UpperCaseClass': [FAIL],
'test-func-name-inference/LowerCaseClass': [FAIL],
# Bug(5784). StubCache tests need to be redesigned, as a) they don't work
# in the new (ignition + turbofan) pipeline environment, and b) they are
# stymied by a move of code stubs into builtins.
'test-api/PrimaryStubCache': [SKIP],
'test-api/SecondaryStubCache': [SKIP],
'test-api/AccessCheckInIC': [SKIP],
# BUG(3742).
'test-mark-compact/MarkCompactCollector': [PASS, ['arch==arm', NO_VARIANTS]],

View File

@ -46,7 +46,7 @@ class CodeAssemblerTester {
: zone_(isolate->allocator(), ZONE_NAME),
scope_(isolate),
state_(isolate, &zone_, call_descriptor, Code::STUB, name,
PoisoningMitigationLevel::kDontPoison, 0, -1) {}
PoisoningMitigationLevel::kDontPoison, Builtins::kNoBuiltinId) {}
CodeAssemblerState* state() { return &state_; }

View File

@ -4,7 +4,6 @@
#include "src/api-inl.h"
#include "src/code-factory.h"
#include "src/code-stubs.h"
#include "src/compiler.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/graph.h"

View File

@ -1,238 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/bootstrapper.h"
#include "src/callable.h"
#include "src/code-stubs.h"
#include "src/compiler/common-operator.h"
#include "src/compiler/graph.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/js-operator.h"
#include "src/compiler/linkage.h"
#include "src/compiler/machine-operator.h"
#include "src/compiler/pipeline.h"
#include "src/objects-inl.h"
#include "src/objects/js-array-inl.h"
#include "src/optimized-compilation-info.h"
#include "test/cctest/compiler/function-tester.h"
namespace v8 {
namespace internal {
namespace compiler {
class StubTester {
public:
StubTester(Zone* zone, CodeStub* stub)
: zone_(zone),
info_(ArrayVector("test"), zone, Code::STUB),
interface_descriptor_(stub->GetCallInterfaceDescriptor()),
descriptor_(Linkage::GetStubCallDescriptor(
zone, interface_descriptor_, stub->GetStackParameterCount(),
CallDescriptor::kNoFlags, Operator::kNoProperties)),
graph_(zone_),
common_(zone_),
tester_(InitializeFunctionTester(stub->GetCode()),
GetParameterCountWithContext()) {}
StubTester(Isolate* isolate, Zone* zone, Builtins::Name name)
: zone_(zone),
info_(ArrayVector("test"), zone, Code::STUB),
interface_descriptor_(
Builtins::CallableFor(isolate, name).descriptor()),
descriptor_(Linkage::GetStubCallDescriptor(
zone, interface_descriptor_,
interface_descriptor_.GetStackParameterCount(),
CallDescriptor::kNoFlags, Operator::kNoProperties)),
graph_(zone_),
common_(zone_),
tester_(InitializeFunctionTester(
Handle<Code>(isolate->builtins()->builtin(name), isolate)),
GetParameterCountWithContext()) {}
template <typename... Args>
Handle<Object> Call(Args... args) {
DCHECK_EQ(interface_descriptor_.GetParameterCount(), sizeof...(args));
MaybeHandle<Object> result =
tester_
.Call(args...,
Handle<HeapObject>(tester_.function->context(), ft().isolate))
.ToHandleChecked();
return result.ToHandleChecked();
}
FunctionTester& ft() { return tester_; }
private:
Graph* InitializeFunctionTester(Handle<Code> stub) {
// Add target, effect and control.
int node_count = GetParameterCountWithContext() + 3;
// Add extra inputs for the JSFunction parameter and the receiver (which for
// the tester is always undefined) to the start node.
Node* start =
graph_.NewNode(common_.Start(GetParameterCountWithContext() + 2));
Node** node_array = zone_->NewArray<Node*>(node_count);
node_array[0] = graph_.NewNode(common_.HeapConstant(stub));
for (int i = 0; i < GetParameterCountWithContext(); ++i) {
CHECK(IsAnyTagged(descriptor_->GetParameterType(i).representation()));
node_array[i + 1] = graph_.NewNode(common_.Parameter(i + 1), start);
}
node_array[node_count - 2] = start;
node_array[node_count - 1] = start;
Node* call =
graph_.NewNode(common_.Call(descriptor_), node_count, &node_array[0]);
Node* zero = graph_.NewNode(common_.Int32Constant(0));
Node* ret = graph_.NewNode(common_.Return(), zero, call, call, start);
Node* end = graph_.NewNode(common_.End(1), ret);
graph_.SetStart(start);
graph_.SetEnd(end);
return &graph_;
}
int GetParameterCountWithContext() {
return interface_descriptor_.GetParameterCount() + 1;
}
Zone* zone_;
OptimizedCompilationInfo info_;
CallInterfaceDescriptor interface_descriptor_;
CallDescriptor* descriptor_;
Graph graph_;
CommonOperatorBuilder common_;
FunctionTester tester_;
};
TEST(RunStringWrapperLengthStub) {
HandleAndZoneScope scope;
Isolate* isolate = scope.main_isolate();
Zone* zone = scope.main_zone();
StubTester tester(isolate, zone, Builtins::kLoadIC_StringWrapperLength);
// Actuall call through to the stub, verifying its result.
const char* testString = "Und das Lamm schrie HURZ!";
Handle<Object> receiverArg =
Object::ToObject(isolate, tester.ft().Val(testString)).ToHandleChecked();
Handle<Object> nameArg = tester.ft().Val("length");
Handle<Object> slot = tester.ft().Val(0.0);
Handle<Object> vector = tester.ft().Val(0.0);
Handle<Object> result = tester.Call(receiverArg, nameArg, slot, vector);
CHECK_EQ(static_cast<int>(strlen(testString)), Smi::ToInt(*result));
}
TEST(RunArrayExtractStubSimple) {
HandleAndZoneScope scope;
Isolate* isolate = scope.main_isolate();
Zone* zone = scope.main_zone();
StubTester tester(isolate, zone, Builtins::kExtractFastJSArray);
// Actuall call through to the stub, verifying its result.
Handle<JSArray> source_array = isolate->factory()->NewJSArray(
PACKED_ELEMENTS, 5, 10, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
FixedArray source_elements = FixedArray::cast(source_array->elements());
source_elements->set(0, Smi::FromInt(5));
source_elements->set(1, Smi::FromInt(4));
source_elements->set(2, Smi::FromInt(3));
source_elements->set(3, Smi::FromInt(2));
source_elements->set(4, Smi::FromInt(1));
Handle<JSArray> result = Handle<JSArray>::cast(
tester.Call(source_array, Handle<Smi>(Smi::FromInt(0), isolate),
Handle<Smi>(Smi::FromInt(5), isolate)));
CHECK_NE(*source_array, *result);
CHECK_EQ(result->GetElementsKind(), PACKED_ELEMENTS);
FixedArray result_elements = FixedArray::cast(result->elements());
CHECK_EQ(result_elements->get(0), Smi::FromInt(5));
CHECK_EQ(result_elements->get(1), Smi::FromInt(4));
CHECK_EQ(result_elements->get(2), Smi::FromInt(3));
CHECK_EQ(result_elements->get(3), Smi::FromInt(2));
CHECK_EQ(result_elements->get(4), Smi::FromInt(1));
}
TEST(RunArrayExtractDoubleStubSimple) {
HandleAndZoneScope scope;
Isolate* isolate = scope.main_isolate();
Zone* zone = scope.main_zone();
StubTester tester(isolate, zone, Builtins::kExtractFastJSArray);
// Actuall call through to the stub, verifying its result.
Handle<JSArray> source_array = isolate->factory()->NewJSArray(
PACKED_DOUBLE_ELEMENTS, 5, 10, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
FixedDoubleArray source_elements =
FixedDoubleArray::cast(source_array->elements());
source_elements->set(0, 5);
source_elements->set(1, 4);
source_elements->set(2, 3);
source_elements->set(3, 2);
source_elements->set(4, 1);
Handle<JSArray> result = Handle<JSArray>::cast(
tester.Call(source_array, Handle<Smi>(Smi::FromInt(0), isolate),
Handle<Smi>(Smi::FromInt(5), isolate)));
CHECK_NE(*source_array, *result);
CHECK_EQ(result->GetElementsKind(), PACKED_DOUBLE_ELEMENTS);
FixedDoubleArray result_elements = FixedDoubleArray::cast(result->elements());
CHECK_EQ(result_elements->get_scalar(0), 5);
CHECK_EQ(result_elements->get_scalar(1), 4);
CHECK_EQ(result_elements->get_scalar(2), 3);
CHECK_EQ(result_elements->get_scalar(3), 2);
CHECK_EQ(result_elements->get_scalar(4), 1);
}
TEST(RunArrayExtractStubTooBigForNewSpace) {
HandleAndZoneScope scope;
Isolate* isolate = scope.main_isolate();
Zone* zone = scope.main_zone();
StubTester tester(isolate, zone, Builtins::kExtractFastJSArray);
// Actuall call through to the stub, verifying its result.
Handle<JSArray> source_array = isolate->factory()->NewJSArray(
PACKED_ELEMENTS, 500000, 500000, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
Handle<FixedArray> source_elements(FixedArray::cast(source_array->elements()),
isolate);
for (int i = 0; i < 500000; ++i) {
source_elements->set(i, Smi::FromInt(i));
}
Handle<JSArray> result = Handle<JSArray>::cast(
tester.Call(source_array, Handle<Smi>(Smi::FromInt(0), isolate),
Handle<Smi>(Smi::FromInt(500000), isolate)));
CHECK_NE(*source_array, *result);
CHECK_EQ(result->GetElementsKind(), PACKED_ELEMENTS);
FixedArray result_elements = FixedArray::cast(result->elements());
for (int i = 0; i < 500000; ++i) {
CHECK_EQ(source_elements->get(i), result_elements->get(i));
}
}
TEST(RunArrayExtractDoubleStubTooBigForNewSpace) {
HandleAndZoneScope scope;
Isolate* isolate = scope.main_isolate();
Zone* zone = scope.main_zone();
StubTester tester(isolate, zone, Builtins::kExtractFastJSArray);
// Actuall call through to the stub, verifying its result.
Handle<JSArray> source_array = isolate->factory()->NewJSArray(
PACKED_DOUBLE_ELEMENTS, 500000, 500000,
INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE, TENURED);
Handle<FixedDoubleArray> source_elements(
FixedDoubleArray::cast(source_array->elements()), isolate);
for (int i = 0; i < 500000; ++i) {
source_elements->set(i, i);
}
Handle<JSArray> result = Handle<JSArray>::cast(
tester.Call(source_array, Handle<Smi>(Smi::FromInt(0), isolate),
Handle<Smi>(Smi::FromInt(500000), isolate)));
CHECK_NE(*source_array, *result);
CHECK_EQ(result->GetElementsKind(), PACKED_DOUBLE_ELEMENTS);
FixedDoubleArray result_elements = FixedDoubleArray::cast(result->elements());
for (int i = 0; i < 500000; ++i) {
CHECK_EQ(source_elements->get_scalar(i), result_elements->get_scalar(i));
}
}
} // namespace compiler
} // namespace internal
} // namespace v8

Some files were not shown because too many files have changed in this diff Show More