v8/src/code-stubs.h

2506 lines
74 KiB
C++

// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_CODE_STUBS_H_
#define V8_CODE_STUBS_H_
#include "allocation.h"
#include "assembler.h"
#include "codegen.h"
#include "globals.h"
#include "macro-assembler.h"
namespace v8 {
namespace internal {
// List of code stubs used on all platforms.
#define CODE_STUB_LIST_ALL_PLATFORMS(V) \
V(CallFunction) \
V(CallConstruct) \
V(BinaryOp) \
V(StringAdd) \
V(NewStringAdd) \
V(SubString) \
V(StringCompare) \
V(Compare) \
V(CompareIC) \
V(CompareNilIC) \
V(MathPow) \
V(StringLength) \
V(FunctionPrototype) \
V(StoreArrayLength) \
V(RecordWrite) \
V(StoreBufferOverflow) \
V(RegExpExec) \
V(TranscendentalCache) \
V(Instanceof) \
V(ConvertToDouble) \
V(WriteInt32ToHeapNumber) \
V(StackCheck) \
V(Interrupt) \
V(FastNewClosure) \
V(FastNewContext) \
V(FastNewBlockContext) \
V(FastCloneShallowArray) \
V(FastCloneShallowObject) \
V(CreateAllocationSite) \
V(ToBoolean) \
V(ToNumber) \
V(ArgumentsAccess) \
V(RegExpConstructResult) \
V(NumberToString) \
V(DoubleToI) \
V(CEntry) \
V(JSEntry) \
V(KeyedLoadElement) \
V(ArrayNoArgumentConstructor) \
V(ArraySingleArgumentConstructor) \
V(ArrayNArgumentsConstructor) \
V(InternalArrayNoArgumentConstructor) \
V(InternalArraySingleArgumentConstructor) \
V(InternalArrayNArgumentsConstructor) \
V(KeyedStoreElement) \
V(DebuggerStatement) \
V(NameDictionaryLookup) \
V(ElementsTransitionAndStore) \
V(TransitionElementsKind) \
V(StoreArrayLiteralElement) \
V(StubFailureTrampoline) \
V(StubFailureTailCallTrampoline) \
V(ArrayConstructor) \
V(InternalArrayConstructor) \
V(ProfileEntryHook) \
V(StoreGlobal) \
/* IC Handler stubs */ \
V(LoadField) \
V(KeyedLoadField) \
V(KeyedArrayCall)
// List of code stubs only used on ARM platforms.
#if V8_TARGET_ARCH_ARM
#define CODE_STUB_LIST_ARM(V) \
V(GetProperty) \
V(SetProperty) \
V(InvokeBuiltin) \
V(DirectCEntry)
#else
#define CODE_STUB_LIST_ARM(V)
#endif
// List of code stubs only used on MIPS platforms.
#if V8_TARGET_ARCH_MIPS
#define CODE_STUB_LIST_MIPS(V) \
V(RegExpCEntry) \
V(DirectCEntry)
#else
#define CODE_STUB_LIST_MIPS(V)
#endif
// Combined list of code stubs.
#define CODE_STUB_LIST(V) \
CODE_STUB_LIST_ALL_PLATFORMS(V) \
CODE_STUB_LIST_ARM(V) \
CODE_STUB_LIST_MIPS(V)
// Mode to overwrite BinaryExpression values.
enum OverwriteMode { NO_OVERWRITE, OVERWRITE_LEFT, OVERWRITE_RIGHT };
// Stub is base classes of all stubs.
class CodeStub BASE_EMBEDDED {
public:
enum Major {
#define DEF_ENUM(name) name,
CODE_STUB_LIST(DEF_ENUM)
#undef DEF_ENUM
NoCache, // marker for stubs that do custom caching
NUMBER_OF_IDS
};
// Retrieve the code for the stub. Generate the code if needed.
Handle<Code> GetCode(Isolate* isolate);
// Retrieve the code for the stub, make and return a copy of the code.
Handle<Code> GetCodeCopyFromTemplate(Isolate* isolate);
static Major MajorKeyFromKey(uint32_t key) {
return static_cast<Major>(MajorKeyBits::decode(key));
}
static int MinorKeyFromKey(uint32_t key) {
return MinorKeyBits::decode(key);
}
// Gets the major key from a code object that is a code stub or binary op IC.
static Major GetMajorKey(Code* code_stub) {
return static_cast<Major>(code_stub->major_key());
}
static const char* MajorName(Major major_key, bool allow_unknown_keys);
virtual ~CodeStub() {}
bool CompilingCallsToThisStubIsGCSafe(Isolate* isolate) {
bool is_pregenerated = IsPregenerated(isolate);
Code* code = NULL;
CHECK(!is_pregenerated || FindCodeInCache(&code, isolate));
return is_pregenerated;
}
// See comment above, where Instanceof is defined.
virtual bool IsPregenerated(Isolate* isolate) { return false; }
static void GenerateStubsAheadOfTime(Isolate* isolate);
static void GenerateStubsRequiringBuiltinsAheadOfTime(Isolate* isolate);
static void GenerateFPStubs(Isolate* isolate);
// Some stubs put untagged junk on the stack that cannot be scanned by the
// GC. This means that we must be statically sure that no GC can occur while
// they are running. If that is the case they should override this to return
// true, which will cause an assertion if we try to call something that can
// GC or if we try to put a stack frame on top of the junk, which would not
// result in a traversable stack.
virtual bool SometimesSetsUpAFrame() { return true; }
// Lookup the code in the (possibly custom) cache.
bool FindCodeInCache(Code** code_out, Isolate* isolate);
// Returns information for computing the number key.
virtual Major MajorKey() = 0;
virtual int MinorKey() = 0;
virtual InlineCacheState GetICState() {
return UNINITIALIZED;
}
virtual Code::ExtraICState GetExtraICState() {
return Code::kNoExtraICState;
}
virtual Code::StubType GetStubType() {
return Code::NORMAL;
}
virtual int GetStubFlags() {
return -1;
}
virtual void PrintName(StringStream* stream);
// Returns a name for logging/debugging purposes.
SmartArrayPointer<const char> GetName();
protected:
static bool CanUseFPRegisters();
// Generates the assembler code for the stub.
virtual Handle<Code> GenerateCode(Isolate* isolate) = 0;
virtual void VerifyPlatformFeatures(Isolate* isolate);
// Returns whether the code generated for this stub needs to be allocated as
// a fixed (non-moveable) code object.
virtual bool NeedsImmovableCode() { return false; }
virtual void PrintBaseName(StringStream* stream);
virtual void PrintState(StringStream* stream) { }
private:
// Perform bookkeeping required after code generation when stub code is
// initially generated.
void RecordCodeGeneration(Code* code, Isolate* isolate);
// Finish the code object after it has been generated.
virtual void FinishCode(Handle<Code> code) { }
// Activate newly generated stub. Is called after
// registering stub in the stub cache.
virtual void Activate(Code* code) { }
// BinaryOpStub needs to override this.
virtual Code::Kind GetCodeKind() const;
// Add the code to a specialized cache, specific to an individual
// stub type. Please note, this method must add the code object to a
// roots object, otherwise we will remove the code during GC.
virtual void AddToSpecialCache(Handle<Code> new_object) { }
// Find code in a specialized cache, work is delegated to the specific stub.
virtual bool FindCodeInSpecialCache(Code** code_out, Isolate* isolate) {
return false;
}
// If a stub uses a special cache override this.
virtual bool UseSpecialCache() { return false; }
// Computes the key based on major and minor.
uint32_t GetKey() {
ASSERT(static_cast<int>(MajorKey()) < NUMBER_OF_IDS);
return MinorKeyBits::encode(MinorKey()) |
MajorKeyBits::encode(MajorKey());
}
class MajorKeyBits: public BitField<uint32_t, 0, kStubMajorKeyBits> {};
class MinorKeyBits: public BitField<uint32_t,
kStubMajorKeyBits, kStubMinorKeyBits> {}; // NOLINT
friend class BreakPointIterator;
};
class PlatformCodeStub : public CodeStub {
public:
// Retrieve the code for the stub. Generate the code if needed.
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual Code::Kind GetCodeKind() const { return Code::STUB; }
protected:
// Generates the assembler code for the stub.
virtual void Generate(MacroAssembler* masm) = 0;
};
enum StubFunctionMode { NOT_JS_FUNCTION_STUB_MODE, JS_FUNCTION_STUB_MODE };
enum HandlerArgumentsMode { DONT_PASS_ARGUMENTS, PASS_ARGUMENTS };
enum ContinuationType { NORMAL_CONTINUATION, TAIL_CALL_CONTINUATION };
struct CodeStubInterfaceDescriptor {
CodeStubInterfaceDescriptor();
int register_param_count_;
Register stack_parameter_count_;
// if hint_stack_parameter_count_ > 0, the code stub can optimize the
// return sequence. Default value is -1, which means it is ignored.
int hint_stack_parameter_count_;
ContinuationType continuation_type_;
StubFunctionMode function_mode_;
Register* register_params_;
Address deoptimization_handler_;
HandlerArgumentsMode handler_arguments_mode_;
bool initialized() const { return register_param_count_ >= 0; }
bool HasTailCallContinuation() const {
return continuation_type_ == TAIL_CALL_CONTINUATION;
}
int environment_length() const {
return register_param_count_;
}
void SetMissHandler(ExternalReference handler) {
miss_handler_ = handler;
has_miss_handler_ = true;
// Our miss handler infrastructure doesn't currently support
// variable stack parameter counts.
ASSERT(!stack_parameter_count_.is_valid());
}
ExternalReference miss_handler() {
ASSERT(has_miss_handler_);
return miss_handler_;
}
bool has_miss_handler() {
return has_miss_handler_;
}
Register GetParameterRegister(int index) {
return register_params_[index];
}
bool IsParameterCountRegister(int index) {
return GetParameterRegister(index).is(stack_parameter_count_);
}
int GetHandlerParameterCount() {
int params = environment_length();
if (handler_arguments_mode_ == PASS_ARGUMENTS) {
params += 1;
}
return params;
}
private:
ExternalReference miss_handler_;
bool has_miss_handler_;
};
class HydrogenCodeStub : public CodeStub {
public:
enum InitializationState {
UNINITIALIZED,
INITIALIZED
};
explicit HydrogenCodeStub(InitializationState state = INITIALIZED) {
is_uninitialized_ = (state == UNINITIALIZED);
}
virtual Code::Kind GetCodeKind() const { return Code::STUB; }
CodeStubInterfaceDescriptor* GetInterfaceDescriptor(Isolate* isolate) {
return isolate->code_stub_interface_descriptor(MajorKey());
}
bool IsUninitialized() { return is_uninitialized_; }
template<class SubClass>
static Handle<Code> GetUninitialized(Isolate* isolate) {
SubClass::GenerateAheadOfTime(isolate);
return SubClass().GetCode(isolate);
}
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) = 0;
// Retrieve the code for the stub. Generate the code if needed.
virtual Handle<Code> GenerateCode(Isolate* isolate) = 0;
virtual int NotMissMinorKey() = 0;
Handle<Code> GenerateLightweightMissCode(Isolate* isolate);
template<class StateType>
void TraceTransition(StateType from, StateType to);
private:
class MinorKeyBits: public BitField<int, 0, kStubMinorKeyBits - 1> {};
class IsMissBits: public BitField<bool, kStubMinorKeyBits - 1, 1> {};
void GenerateLightweightMiss(MacroAssembler* masm);
virtual int MinorKey() {
return IsMissBits::encode(is_uninitialized_) |
MinorKeyBits::encode(NotMissMinorKey());
}
bool is_uninitialized_;
};
// Helper interface to prepare to/restore after making runtime calls.
class RuntimeCallHelper {
public:
virtual ~RuntimeCallHelper() {}
virtual void BeforeCall(MacroAssembler* masm) const = 0;
virtual void AfterCall(MacroAssembler* masm) const = 0;
protected:
RuntimeCallHelper() {}
private:
DISALLOW_COPY_AND_ASSIGN(RuntimeCallHelper);
};
// TODO(bmeurer): Move to the StringAddStub declaration once we're
// done with the translation to a hydrogen code stub.
enum StringAddFlags {
// Omit both parameter checks.
STRING_ADD_CHECK_NONE = 0,
// Check left parameter.
STRING_ADD_CHECK_LEFT = 1 << 0,
// Check right parameter.
STRING_ADD_CHECK_RIGHT = 1 << 1,
// Check both parameters.
STRING_ADD_CHECK_BOTH = STRING_ADD_CHECK_LEFT | STRING_ADD_CHECK_RIGHT
};
} } // namespace v8::internal
#if V8_TARGET_ARCH_IA32
#include "ia32/code-stubs-ia32.h"
#elif V8_TARGET_ARCH_X64
#include "x64/code-stubs-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/code-stubs-arm.h"
#elif V8_TARGET_ARCH_MIPS
#include "mips/code-stubs-mips.h"
#else
#error Unsupported target architecture.
#endif
namespace v8 {
namespace internal {
// RuntimeCallHelper implementation used in stubs: enters/leaves a
// newly created internal frame before/after the runtime call.
class StubRuntimeCallHelper : public RuntimeCallHelper {
public:
StubRuntimeCallHelper() {}
virtual void BeforeCall(MacroAssembler* masm) const;
virtual void AfterCall(MacroAssembler* masm) const;
};
// Trivial RuntimeCallHelper implementation.
class NopRuntimeCallHelper : public RuntimeCallHelper {
public:
NopRuntimeCallHelper() {}
virtual void BeforeCall(MacroAssembler* masm) const {}
virtual void AfterCall(MacroAssembler* masm) const {}
};
class ToNumberStub: public HydrogenCodeStub {
public:
ToNumberStub() { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Major MajorKey() { return ToNumber; }
int NotMissMinorKey() { return 0; }
};
class NumberToStringStub V8_FINAL : public HydrogenCodeStub {
public:
NumberToStringStub() {}
virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE;
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
static void InstallDescriptors(Isolate* isolate);
// Parameters accessed via CodeStubGraphBuilder::GetParameter()
static const int kNumber = 0;
private:
virtual Major MajorKey() V8_OVERRIDE { return NumberToString; }
virtual int NotMissMinorKey() V8_OVERRIDE { return 0; }
};
class FastNewClosureStub : public HydrogenCodeStub {
public:
explicit FastNewClosureStub(LanguageMode language_mode, bool is_generator)
: language_mode_(language_mode),
is_generator_(is_generator) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
static void InstallDescriptors(Isolate* isolate);
LanguageMode language_mode() const { return language_mode_; }
bool is_generator() const { return is_generator_; }
private:
class StrictModeBits: public BitField<bool, 0, 1> {};
class IsGeneratorBits: public BitField<bool, 1, 1> {};
Major MajorKey() { return FastNewClosure; }
int NotMissMinorKey() {
return StrictModeBits::encode(language_mode_ != CLASSIC_MODE) |
IsGeneratorBits::encode(is_generator_);
}
LanguageMode language_mode_;
bool is_generator_;
};
class FastNewContextStub : public PlatformCodeStub {
public:
static const int kMaximumSlots = 64;
explicit FastNewContextStub(int slots) : slots_(slots) {
ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
}
void Generate(MacroAssembler* masm);
private:
int slots_;
Major MajorKey() { return FastNewContext; }
int MinorKey() { return slots_; }
};
class FastNewBlockContextStub : public PlatformCodeStub {
public:
static const int kMaximumSlots = 64;
explicit FastNewBlockContextStub(int slots) : slots_(slots) {
ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
}
void Generate(MacroAssembler* masm);
private:
int slots_;
Major MajorKey() { return FastNewBlockContext; }
int MinorKey() { return slots_; }
};
class FastCloneShallowArrayStub : public HydrogenCodeStub {
public:
// Maximum length of copied elements array.
static const int kMaximumClonedLength = 8;
enum Mode {
CLONE_ELEMENTS,
CLONE_DOUBLE_ELEMENTS,
COPY_ON_WRITE_ELEMENTS,
CLONE_ANY_ELEMENTS,
LAST_CLONE_MODE = CLONE_ANY_ELEMENTS
};
static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
FastCloneShallowArrayStub(Mode mode,
AllocationSiteMode allocation_site_mode,
int length)
: mode_(mode),
allocation_site_mode_(allocation_site_mode),
length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
ASSERT_GE(length_, 0);
ASSERT_LE(length_, kMaximumClonedLength);
}
Mode mode() const { return mode_; }
int length() const { return length_; }
AllocationSiteMode allocation_site_mode() const {
return allocation_site_mode_;
}
ElementsKind ComputeElementsKind() const {
switch (mode()) {
case CLONE_ELEMENTS:
case COPY_ON_WRITE_ELEMENTS:
return FAST_ELEMENTS;
case CLONE_DOUBLE_ELEMENTS:
return FAST_DOUBLE_ELEMENTS;
case CLONE_ANY_ELEMENTS:
/*fall-through*/;
}
UNREACHABLE();
return LAST_ELEMENTS_KIND;
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Mode mode_;
AllocationSiteMode allocation_site_mode_;
int length_;
class AllocationSiteModeBits: public BitField<AllocationSiteMode, 0, 1> {};
class ModeBits: public BitField<Mode, 1, 4> {};
class LengthBits: public BitField<int, 5, 4> {};
// Ensure data fits within available bits.
STATIC_ASSERT(LAST_ALLOCATION_SITE_MODE == 1);
STATIC_ASSERT(kFastCloneModeCount < 16);
STATIC_ASSERT(kMaximumClonedLength < 16);
Major MajorKey() { return FastCloneShallowArray; }
int NotMissMinorKey() {
return AllocationSiteModeBits::encode(allocation_site_mode_)
| ModeBits::encode(mode_)
| LengthBits::encode(length_);
}
};
class FastCloneShallowObjectStub : public HydrogenCodeStub {
public:
// Maximum number of properties in copied object.
static const int kMaximumClonedProperties = 6;
explicit FastCloneShallowObjectStub(int length)
: length_(length) {
ASSERT_GE(length_, 0);
ASSERT_LE(length_, kMaximumClonedProperties);
}
int length() const { return length_; }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
int length_;
Major MajorKey() { return FastCloneShallowObject; }
int NotMissMinorKey() { return length_; }
DISALLOW_COPY_AND_ASSIGN(FastCloneShallowObjectStub);
};
class CreateAllocationSiteStub : public HydrogenCodeStub {
public:
explicit CreateAllocationSiteStub() { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateAheadOfTime(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Major MajorKey() { return CreateAllocationSite; }
int NotMissMinorKey() { return 0; }
DISALLOW_COPY_AND_ASSIGN(CreateAllocationSiteStub);
};
class InstanceofStub: public PlatformCodeStub {
public:
enum Flags {
kNoFlags = 0,
kArgsInRegisters = 1 << 0,
kCallSiteInlineCheck = 1 << 1,
kReturnTrueFalseObject = 1 << 2
};
explicit InstanceofStub(Flags flags) : flags_(flags) { }
static Register left();
static Register right();
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return Instanceof; }
int MinorKey() { return static_cast<int>(flags_); }
bool HasArgsInRegisters() const {
return (flags_ & kArgsInRegisters) != 0;
}
bool HasCallSiteInlineCheck() const {
return (flags_ & kCallSiteInlineCheck) != 0;
}
bool ReturnTrueFalseObject() const {
return (flags_ & kReturnTrueFalseObject) != 0;
}
virtual void PrintName(StringStream* stream);
Flags flags_;
};
enum AllocationSiteOverrideMode {
DONT_OVERRIDE,
DISABLE_ALLOCATION_SITES,
LAST_ALLOCATION_SITE_OVERRIDE_MODE = DISABLE_ALLOCATION_SITES
};
class ArrayConstructorStub: public PlatformCodeStub {
public:
enum ArgumentCountKey { ANY, NONE, ONE, MORE_THAN_ONE };
ArrayConstructorStub(Isolate* isolate, int argument_count);
explicit ArrayConstructorStub(Isolate* isolate);
void Generate(MacroAssembler* masm);
private:
void GenerateDispatchToArrayStub(MacroAssembler* masm,
AllocationSiteOverrideMode mode);
virtual CodeStub::Major MajorKey() { return ArrayConstructor; }
virtual int MinorKey() { return argument_count_; }
ArgumentCountKey argument_count_;
};
class InternalArrayConstructorStub: public PlatformCodeStub {
public:
explicit InternalArrayConstructorStub(Isolate* isolate);
void Generate(MacroAssembler* masm);
private:
virtual CodeStub::Major MajorKey() { return InternalArrayConstructor; }
virtual int MinorKey() { return 0; }
void GenerateCase(MacroAssembler* masm, ElementsKind kind);
};
class MathPowStub: public PlatformCodeStub {
public:
enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK };
explicit MathPowStub(ExponentType exponent_type)
: exponent_type_(exponent_type) { }
virtual void Generate(MacroAssembler* masm);
private:
virtual CodeStub::Major MajorKey() { return MathPow; }
virtual int MinorKey() { return exponent_type_; }
ExponentType exponent_type_;
};
class ICStub: public PlatformCodeStub {
public:
explicit ICStub(Code::Kind kind) : kind_(kind) { }
virtual Code::Kind GetCodeKind() const { return kind_; }
virtual InlineCacheState GetICState() { return MONOMORPHIC; }
bool Describes(Code* code) {
return GetMajorKey(code) == MajorKey() && code->stub_info() == MinorKey();
}
protected:
class KindBits: public BitField<Code::Kind, 0, 4> {};
virtual void FinishCode(Handle<Code> code) {
code->set_stub_info(MinorKey());
}
Code::Kind kind() { return kind_; }
virtual int MinorKey() {
return KindBits::encode(kind_);
}
private:
Code::Kind kind_;
};
class FunctionPrototypeStub: public ICStub {
public:
explicit FunctionPrototypeStub(Code::Kind kind) : ICStub(kind) { }
virtual void Generate(MacroAssembler* masm);
private:
virtual CodeStub::Major MajorKey() { return FunctionPrototype; }
};
class StringLengthStub: public ICStub {
public:
explicit StringLengthStub(Code::Kind kind) : ICStub(kind) { }
virtual void Generate(MacroAssembler* masm);
private:
STATIC_ASSERT(KindBits::kSize == 4);
virtual CodeStub::Major MajorKey() { return StringLength; }
};
class StoreICStub: public ICStub {
public:
StoreICStub(Code::Kind kind, StrictModeFlag strict_mode)
: ICStub(kind), strict_mode_(strict_mode) { }
protected:
virtual Code::ExtraICState GetExtraICState() {
return strict_mode_;
}
private:
STATIC_ASSERT(KindBits::kSize == 4);
class StrictModeBits: public BitField<bool, 4, 1> {};
virtual int MinorKey() {
return KindBits::encode(kind()) | StrictModeBits::encode(strict_mode_);
}
StrictModeFlag strict_mode_;
};
class StoreArrayLengthStub: public StoreICStub {
public:
explicit StoreArrayLengthStub(Code::Kind kind, StrictModeFlag strict_mode)
: StoreICStub(kind, strict_mode) { }
virtual void Generate(MacroAssembler* masm);
private:
virtual CodeStub::Major MajorKey() { return StoreArrayLength; }
};
class HICStub: public HydrogenCodeStub {
public:
virtual Code::Kind GetCodeKind() const { return kind(); }
virtual InlineCacheState GetICState() { return MONOMORPHIC; }
protected:
class KindBits: public BitField<Code::Kind, 0, 4> {};
virtual Code::Kind kind() const = 0;
};
class HandlerStub: public HICStub {
public:
virtual Code::Kind GetCodeKind() const { return Code::HANDLER; }
virtual int GetStubFlags() { return kind(); }
protected:
HandlerStub() : HICStub() { }
virtual int NotMissMinorKey() { return bit_field_; }
int bit_field_;
};
class LoadFieldStub: public HandlerStub {
public:
LoadFieldStub(bool inobject, int index, Representation representation) {
Initialize(Code::LOAD_IC, inobject, index, representation);
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
Representation representation() {
if (unboxed_double()) return Representation::Double();
return Representation::Tagged();
}
virtual Code::Kind kind() const {
return KindBits::decode(bit_field_);
}
bool is_inobject() {
return InobjectBits::decode(bit_field_);
}
int offset() {
int index = IndexBits::decode(bit_field_);
int offset = index * kPointerSize;
if (is_inobject()) return offset;
return FixedArray::kHeaderSize + offset;
}
bool unboxed_double() {
return UnboxedDoubleBits::decode(bit_field_);
}
virtual Code::StubType GetStubType() { return Code::FIELD; }
protected:
LoadFieldStub() : HandlerStub() { }
void Initialize(Code::Kind kind,
bool inobject,
int index,
Representation representation) {
bool unboxed_double = FLAG_track_double_fields && representation.IsDouble();
bit_field_ = KindBits::encode(kind)
| InobjectBits::encode(inobject)
| IndexBits::encode(index)
| UnboxedDoubleBits::encode(unboxed_double);
}
private:
STATIC_ASSERT(KindBits::kSize == 4);
class InobjectBits: public BitField<bool, 4, 1> {};
class IndexBits: public BitField<int, 5, 11> {};
class UnboxedDoubleBits: public BitField<bool, 16, 1> {};
virtual CodeStub::Major MajorKey() { return LoadField; }
};
class StoreGlobalStub : public HandlerStub {
public:
StoreGlobalStub(StrictModeFlag strict_mode, bool is_constant) {
bit_field_ = StrictModeBits::encode(strict_mode) |
IsConstantBits::encode(is_constant);
}
Handle<Code> GetCodeCopyFromTemplate(Isolate* isolate,
Map* receiver_map,
PropertyCell* cell) {
Handle<Code> code = CodeStub::GetCodeCopyFromTemplate(isolate);
// Replace the placeholder cell and global object map with the actual global
// cell and receiver map.
Map* cell_map = isolate->heap()->global_property_cell_map();
code->ReplaceNthObject(1, cell_map, cell);
code->ReplaceNthObject(1, isolate->heap()->meta_map(), receiver_map);
return code;
}
virtual Code::Kind kind() const { return Code::STORE_IC; }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
virtual Code::ExtraICState GetExtraICState() { return bit_field_; }
bool is_constant() {
return IsConstantBits::decode(bit_field_);
}
void set_is_constant(bool value) {
bit_field_ = IsConstantBits::update(bit_field_, value);
}
Representation representation() {
return Representation::FromKind(RepresentationBits::decode(bit_field_));
}
void set_representation(Representation r) {
bit_field_ = RepresentationBits::update(bit_field_, r.kind());
}
private:
virtual int NotMissMinorKey() { return GetExtraICState(); }
Major MajorKey() { return StoreGlobal; }
class StrictModeBits: public BitField<StrictModeFlag, 0, 1> {};
class IsConstantBits: public BitField<bool, 1, 1> {};
class RepresentationBits: public BitField<Representation::Kind, 2, 8> {};
int bit_field_;
DISALLOW_COPY_AND_ASSIGN(StoreGlobalStub);
};
class KeyedLoadFieldStub: public LoadFieldStub {
public:
KeyedLoadFieldStub(bool inobject, int index, Representation representation)
: LoadFieldStub() {
Initialize(Code::KEYED_LOAD_IC, inobject, index, representation);
}
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
virtual Handle<Code> GenerateCode(Isolate* isolate);
private:
virtual CodeStub::Major MajorKey() { return KeyedLoadField; }
};
class KeyedArrayCallStub: public HICStub {
public:
KeyedArrayCallStub(bool holey, int argc) : HICStub(), argc_(argc) {
bit_field_ = ContextualBits::encode(false) | HoleyBits::encode(holey);
}
virtual Code::Kind kind() const { return Code::KEYED_CALL_IC; }
virtual Code::ExtraICState GetExtraICState() { return bit_field_; }
ElementsKind elements_kind() {
return HoleyBits::decode(bit_field_) ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
}
int argc() { return argc_; }
virtual int GetStubFlags() { return argc(); }
static bool IsHoley(Handle<Code> code) {
Code::ExtraICState state = code->extra_ic_state();
return HoleyBits::decode(state);
}
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
virtual Handle<Code> GenerateCode(Isolate* isolate);
private:
virtual int NotMissMinorKey() {
return GetExtraICState() | ArgcBits::encode(argc_);
}
class ContextualBits: public BitField<bool, 0, 1> {};
STATIC_ASSERT(CallICBase::Contextual::kShift == ContextualBits::kShift);
STATIC_ASSERT(CallICBase::Contextual::kSize == ContextualBits::kSize);
class HoleyBits: public BitField<bool, 1, 1> {};
STATIC_ASSERT(Code::kArgumentsBits <= kStubMinorKeyBits - 2);
class ArgcBits: public BitField<int, 2, Code::kArgumentsBits> {};
virtual CodeStub::Major MajorKey() { return KeyedArrayCall; }
int bit_field_;
int argc_;
};
class BinaryOpStub: public HydrogenCodeStub {
public:
BinaryOpStub(Token::Value op, OverwriteMode mode)
: HydrogenCodeStub(UNINITIALIZED), op_(op), mode_(mode) {
ASSERT(op <= LAST_TOKEN && op >= FIRST_TOKEN);
Initialize();
}
explicit BinaryOpStub(Code::ExtraICState state)
: op_(decode_token(OpBits::decode(state))),
mode_(OverwriteModeField::decode(state)),
fixed_right_arg_(
Maybe<int>(HasFixedRightArgBits::decode(state),
decode_arg_value(FixedRightArgValueBits::decode(state)))),
left_state_(LeftStateField::decode(state)),
right_state_(fixed_right_arg_.has_value
? ((fixed_right_arg_.value <= Smi::kMaxValue) ? SMI : INT32)
: RightStateField::decode(state)),
result_state_(ResultStateField::decode(state)) {
// We don't deserialize the SSE2 Field, since this is only used to be able
// to include SSE2 as well as non-SSE2 versions in the snapshot. For code
// generation we always want it to reflect the current state.
ASSERT(!fixed_right_arg_.has_value ||
can_encode_arg_value(fixed_right_arg_.value));
}
static const int FIRST_TOKEN = Token::BIT_OR;
static const int LAST_TOKEN = Token::MOD;
static void GenerateAheadOfTime(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate, CodeStubInterfaceDescriptor* descriptor);
static void InitializeForIsolate(Isolate* isolate) {
BinaryOpStub binopStub(UNINITIALIZED);
binopStub.InitializeInterfaceDescriptor(
isolate, isolate->code_stub_interface_descriptor(CodeStub::BinaryOp));
}
virtual Code::Kind GetCodeKind() const { return Code::BINARY_OP_IC; }
virtual InlineCacheState GetICState() {
if (Max(left_state_, right_state_) == NONE) {
return ::v8::internal::UNINITIALIZED;
}
if (Max(left_state_, right_state_) == GENERIC) return MEGAMORPHIC;
return MONOMORPHIC;
}
virtual void VerifyPlatformFeatures(Isolate* isolate) V8_OVERRIDE {
ASSERT(CpuFeatures::VerifyCrossCompiling(SSE2));
}
virtual Code::ExtraICState GetExtraICState() {
bool sse_field = Max(result_state_, Max(left_state_, right_state_)) > SMI &&
CpuFeatures::IsSafeForSnapshot(SSE2);
return OpBits::encode(encode_token(op_))
| LeftStateField::encode(left_state_)
| RightStateField::encode(fixed_right_arg_.has_value
? NONE : right_state_)
| ResultStateField::encode(result_state_)
| HasFixedRightArgBits::encode(fixed_right_arg_.has_value)
| FixedRightArgValueBits::encode(fixed_right_arg_.has_value
? encode_arg_value(
fixed_right_arg_.value)
: 0)
| SSE2Field::encode(sse_field)
| OverwriteModeField::encode(mode_);
}
bool CanReuseDoubleBox() {
return result_state_ <= NUMBER && result_state_ > SMI &&
((left_state_ > SMI && left_state_ <= NUMBER &&
mode_ == OVERWRITE_LEFT) ||
(right_state_ > SMI && right_state_ <= NUMBER &&
mode_ == OVERWRITE_RIGHT));
}
bool HasSideEffects(Isolate* isolate) const {
Handle<Type> left = GetLeftType(isolate);
Handle<Type> right = GetRightType(isolate);
return left->Maybe(Type::Receiver()) || right->Maybe(Type::Receiver());
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
Maybe<Handle<Object> > Result(Handle<Object> left,
Handle<Object> right,
Isolate* isolate);
Token::Value operation() const { return op_; }
OverwriteMode mode() const { return mode_; }
Maybe<int> fixed_right_arg() const { return fixed_right_arg_; }
Handle<Type> GetLeftType(Isolate* isolate) const;
Handle<Type> GetRightType(Isolate* isolate) const;
Handle<Type> GetResultType(Isolate* isolate) const;
void UpdateStatus(Handle<Object> left,
Handle<Object> right,
Maybe<Handle<Object> > result);
void PrintState(StringStream* stream);
private:
explicit BinaryOpStub(InitializationState state) : HydrogenCodeStub(state),
op_(Token::ADD),
mode_(NO_OVERWRITE) {
Initialize();
}
void Initialize();
enum State { NONE, SMI, INT32, NUMBER, STRING, GENERIC };
// We truncate the last bit of the token.
STATIC_ASSERT(LAST_TOKEN - FIRST_TOKEN < (1 << 5));
class LeftStateField: public BitField<State, 0, 3> {};
// When fixed right arg is set, we don't need to store the right state.
// Thus the two fields can overlap.
class HasFixedRightArgBits: public BitField<bool, 4, 1> {};
class FixedRightArgValueBits: public BitField<int, 5, 4> {};
class RightStateField: public BitField<State, 5, 3> {};
class ResultStateField: public BitField<State, 9, 3> {};
class SSE2Field: public BitField<bool, 12, 1> {};
class OverwriteModeField: public BitField<OverwriteMode, 13, 2> {};
class OpBits: public BitField<int, 15, 5> {};
virtual CodeStub::Major MajorKey() { return BinaryOp; }
virtual int NotMissMinorKey() { return GetExtraICState(); }
static Handle<Type> StateToType(State state,
Isolate* isolate);
static void Generate(Token::Value op,
State left,
int right,
State result,
OverwriteMode mode,
Isolate* isolate);
static void Generate(Token::Value op,
State left,
State right,
State result,
OverwriteMode mode,
Isolate* isolate);
void UpdateStatus(Handle<Object> object,
State* state);
bool can_encode_arg_value(int32_t value) const;
int encode_arg_value(int32_t value) const;
int32_t decode_arg_value(int value) const;
int encode_token(Token::Value op) const;
Token::Value decode_token(int op) const;
bool has_int_result() const {
return op_ == Token::BIT_XOR || op_ == Token::BIT_AND ||
op_ == Token::BIT_OR || op_ == Token::SAR || op_ == Token::SHL;
}
const char* StateToName(State state);
void PrintBaseName(StringStream* stream);
Token::Value op_;
OverwriteMode mode_;
Maybe<int> fixed_right_arg_;
State left_state_;
State right_state_;
State result_state_;
};
// TODO(bmeurer): Rename to StringAddStub once we dropped the old StringAddStub.
class NewStringAddStub V8_FINAL : public HydrogenCodeStub {
public:
NewStringAddStub(StringAddFlags flags, PretenureFlag pretenure_flag)
: bit_field_(StringAddFlagsBits::encode(flags) |
PretenureFlagBits::encode(pretenure_flag)) {}
StringAddFlags flags() const {
return StringAddFlagsBits::decode(bit_field_);
}
PretenureFlag pretenure_flag() const {
return PretenureFlagBits::decode(bit_field_);
}
virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE;
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
static void InstallDescriptors(Isolate* isolate);
// Parameters accessed via CodeStubGraphBuilder::GetParameter()
static const int kLeft = 0;
static const int kRight = 1;
private:
class StringAddFlagsBits: public BitField<StringAddFlags, 0, 2> {};
class PretenureFlagBits: public BitField<PretenureFlag, 2, 1> {};
uint32_t bit_field_;
virtual Major MajorKey() V8_OVERRIDE { return NewStringAdd; }
virtual int NotMissMinorKey() V8_OVERRIDE { return bit_field_; }
virtual void PrintBaseName(StringStream* stream) V8_OVERRIDE;
DISALLOW_COPY_AND_ASSIGN(NewStringAddStub);
};
class ICCompareStub: public PlatformCodeStub {
public:
ICCompareStub(Token::Value op,
CompareIC::State left,
CompareIC::State right,
CompareIC::State handler)
: op_(op),
left_(left),
right_(right),
state_(handler) {
ASSERT(Token::IsCompareOp(op));
}
virtual void Generate(MacroAssembler* masm);
void set_known_map(Handle<Map> map) { known_map_ = map; }
static void DecodeMinorKey(int minor_key,
CompareIC::State* left_state,
CompareIC::State* right_state,
CompareIC::State* handler_state,
Token::Value* op);
static CompareIC::State CompareState(int minor_key) {
return static_cast<CompareIC::State>(HandlerStateField::decode(minor_key));
}
virtual InlineCacheState GetICState();
private:
class OpField: public BitField<int, 0, 3> { };
class LeftStateField: public BitField<int, 3, 4> { };
class RightStateField: public BitField<int, 7, 4> { };
class HandlerStateField: public BitField<int, 11, 4> { };
virtual void FinishCode(Handle<Code> code) {
code->set_stub_info(MinorKey());
}
virtual CodeStub::Major MajorKey() { return CompareIC; }
virtual int MinorKey();
virtual Code::Kind GetCodeKind() const { return Code::COMPARE_IC; }
void GenerateSmis(MacroAssembler* masm);
void GenerateNumbers(MacroAssembler* masm);
void GenerateInternalizedStrings(MacroAssembler* masm);
void GenerateStrings(MacroAssembler* masm);
void GenerateUniqueNames(MacroAssembler* masm);
void GenerateObjects(MacroAssembler* masm);
void GenerateMiss(MacroAssembler* masm);
void GenerateKnownObjects(MacroAssembler* masm);
void GenerateGeneric(MacroAssembler* masm);
bool strict() const { return op_ == Token::EQ_STRICT; }
Condition GetCondition() const { return CompareIC::ComputeCondition(op_); }
virtual void AddToSpecialCache(Handle<Code> new_object);
virtual bool FindCodeInSpecialCache(Code** code_out, Isolate* isolate);
virtual bool UseSpecialCache() { return state_ == CompareIC::KNOWN_OBJECT; }
Token::Value op_;
CompareIC::State left_;
CompareIC::State right_;
CompareIC::State state_;
Handle<Map> known_map_;
};
class CompareNilICStub : public HydrogenCodeStub {
public:
Handle<Type> GetType(Isolate* isolate, Handle<Map> map = Handle<Map>());
Handle<Type> GetInputType(Isolate* isolate, Handle<Map> map);
explicit CompareNilICStub(NilValue nil) : nil_value_(nil) { }
CompareNilICStub(Code::ExtraICState ic_state,
InitializationState init_state = INITIALIZED)
: HydrogenCodeStub(init_state),
nil_value_(NilValueField::decode(ic_state)),
state_(State(TypesField::decode(ic_state))) {
}
static Handle<Code> GetUninitialized(Isolate* isolate,
NilValue nil) {
return CompareNilICStub(nil, UNINITIALIZED).GetCode(isolate);
}
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
static void InitializeForIsolate(Isolate* isolate) {
CompareNilICStub compare_stub(kNullValue, UNINITIALIZED);
compare_stub.InitializeInterfaceDescriptor(
isolate,
isolate->code_stub_interface_descriptor(CodeStub::CompareNilIC));
}
virtual InlineCacheState GetICState() {
if (state_.Contains(GENERIC)) {
return MEGAMORPHIC;
} else if (state_.Contains(MONOMORPHIC_MAP)) {
return MONOMORPHIC;
} else {
return PREMONOMORPHIC;
}
}
virtual Code::Kind GetCodeKind() const { return Code::COMPARE_NIL_IC; }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual Code::ExtraICState GetExtraICState() {
return NilValueField::encode(nil_value_) |
TypesField::encode(state_.ToIntegral());
}
void UpdateStatus(Handle<Object> object);
bool IsMonomorphic() const { return state_.Contains(MONOMORPHIC_MAP); }
NilValue GetNilValue() const { return nil_value_; }
void ClearState() { state_.RemoveAll(); }
virtual void PrintState(StringStream* stream);
virtual void PrintBaseName(StringStream* stream);
private:
friend class CompareNilIC;
enum CompareNilType {
UNDEFINED,
NULL_TYPE,
MONOMORPHIC_MAP,
GENERIC,
NUMBER_OF_TYPES
};
// At most 6 different types can be distinguished, because the Code object
// only has room for a single byte to hold a set and there are two more
// boolean flags we need to store. :-P
STATIC_ASSERT(NUMBER_OF_TYPES <= 6);
class State : public EnumSet<CompareNilType, byte> {
public:
State() : EnumSet<CompareNilType, byte>(0) { }
explicit State(byte bits) : EnumSet<CompareNilType, byte>(bits) { }
void Print(StringStream* stream) const;
};
CompareNilICStub(NilValue nil, InitializationState init_state)
: HydrogenCodeStub(init_state), nil_value_(nil) { }
class NilValueField : public BitField<NilValue, 0, 1> {};
class TypesField : public BitField<byte, 1, NUMBER_OF_TYPES> {};
virtual CodeStub::Major MajorKey() { return CompareNilIC; }
virtual int NotMissMinorKey() { return GetExtraICState(); }
NilValue nil_value_;
State state_;
DISALLOW_COPY_AND_ASSIGN(CompareNilICStub);
};
class CEntryStub : public PlatformCodeStub {
public:
explicit CEntryStub(int result_size,
SaveFPRegsMode save_doubles = kDontSaveFPRegs)
: result_size_(result_size), save_doubles_(save_doubles) { }
void Generate(MacroAssembler* masm);
// The version of this stub that doesn't save doubles is generated ahead of
// time, so it's OK to call it from other stubs that can't cope with GC during
// their code generation. On machines that always have gp registers (x64) we
// can generate both variants ahead of time.
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
static void GenerateAheadOfTime(Isolate* isolate);
protected:
virtual void VerifyPlatformFeatures(Isolate* isolate) V8_OVERRIDE {
ASSERT(CpuFeatures::VerifyCrossCompiling(SSE2));
};
private:
void GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate_scope);
// Number of pointers/values returned.
Isolate* isolate_;
const int result_size_;
SaveFPRegsMode save_doubles_;
Major MajorKey() { return CEntry; }
int MinorKey();
bool NeedsImmovableCode();
};
class JSEntryStub : public PlatformCodeStub {
public:
JSEntryStub() { }
void Generate(MacroAssembler* masm) { GenerateBody(masm, false); }
protected:
void GenerateBody(MacroAssembler* masm, bool is_construct);
private:
Major MajorKey() { return JSEntry; }
int MinorKey() { return 0; }
virtual void FinishCode(Handle<Code> code);
int handler_offset_;
};
class JSConstructEntryStub : public JSEntryStub {
public:
JSConstructEntryStub() { }
void Generate(MacroAssembler* masm) { GenerateBody(masm, true); }
private:
int MinorKey() { return 1; }
virtual void PrintName(StringStream* stream) {
stream->Add("JSConstructEntryStub");
}
};
class ArgumentsAccessStub: public PlatformCodeStub {
public:
enum Type {
READ_ELEMENT,
NEW_NON_STRICT_FAST,
NEW_NON_STRICT_SLOW,
NEW_STRICT
};
explicit ArgumentsAccessStub(Type type) : type_(type) { }
private:
Type type_;
Major MajorKey() { return ArgumentsAccess; }
int MinorKey() { return type_; }
void Generate(MacroAssembler* masm);
void GenerateReadElement(MacroAssembler* masm);
void GenerateNewStrict(MacroAssembler* masm);
void GenerateNewNonStrictFast(MacroAssembler* masm);
void GenerateNewNonStrictSlow(MacroAssembler* masm);
virtual void PrintName(StringStream* stream);
};
class RegExpExecStub: public PlatformCodeStub {
public:
RegExpExecStub() { }
private:
Major MajorKey() { return RegExpExec; }
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
};
class RegExpConstructResultStub: public PlatformCodeStub {
public:
RegExpConstructResultStub() { }
private:
Major MajorKey() { return RegExpConstructResult; }
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
};
class CallFunctionStub: public PlatformCodeStub {
public:
CallFunctionStub(int argc, CallFunctionFlags flags)
: argc_(argc), flags_(flags) { }
void Generate(MacroAssembler* masm);
virtual void FinishCode(Handle<Code> code) {
code->set_has_function_cache(RecordCallTarget());
}
static int ExtractArgcFromMinorKey(int minor_key) {
return ArgcBits::decode(minor_key);
}
private:
int argc_;
CallFunctionFlags flags_;
virtual void PrintName(StringStream* stream);
// Minor key encoding in 32 bits with Bitfield <Type, shift, size>.
class FlagBits: public BitField<CallFunctionFlags, 0, 2> {};
class ArgcBits: public BitField<unsigned, 2, 32 - 2> {};
Major MajorKey() { return CallFunction; }
int MinorKey() {
// Encode the parameters in a unique 32 bit value.
return FlagBits::encode(flags_) | ArgcBits::encode(argc_);
}
bool ReceiverMightBeImplicit() {
return (flags_ & RECEIVER_MIGHT_BE_IMPLICIT) != 0;
}
bool RecordCallTarget() {
return (flags_ & RECORD_CALL_TARGET) != 0;
}
};
class CallConstructStub: public PlatformCodeStub {
public:
explicit CallConstructStub(CallFunctionFlags flags) : flags_(flags) {}
void Generate(MacroAssembler* masm);
virtual void FinishCode(Handle<Code> code) {
code->set_has_function_cache(RecordCallTarget());
}
private:
CallFunctionFlags flags_;
virtual void PrintName(StringStream* stream);
Major MajorKey() { return CallConstruct; }
int MinorKey() { return flags_; }
bool RecordCallTarget() {
return (flags_ & RECORD_CALL_TARGET) != 0;
}
};
enum StringIndexFlags {
// Accepts smis or heap numbers.
STRING_INDEX_IS_NUMBER,
// Accepts smis or heap numbers that are valid array indices
// (ECMA-262 15.4). Invalid indices are reported as being out of
// range.
STRING_INDEX_IS_ARRAY_INDEX
};
// Generates code implementing String.prototype.charCodeAt.
//
// Only supports the case when the receiver is a string and the index
// is a number (smi or heap number) that is a valid index into the
// string. Additional index constraints are specified by the
// flags. Otherwise, bails out to the provided labels.
//
// Register usage: |object| may be changed to another string in a way
// that doesn't affect charCodeAt/charAt semantics, |index| is
// preserved, |scratch| and |result| are clobbered.
class StringCharCodeAtGenerator {
public:
StringCharCodeAtGenerator(Register object,
Register index,
Register result,
Label* receiver_not_string,
Label* index_not_number,
Label* index_out_of_range,
StringIndexFlags index_flags)
: object_(object),
index_(index),
result_(result),
receiver_not_string_(receiver_not_string),
index_not_number_(index_not_number),
index_out_of_range_(index_out_of_range),
index_flags_(index_flags) {
ASSERT(!result_.is(object_));
ASSERT(!result_.is(index_));
}
// Generates the fast case code. On the fallthrough path |result|
// register contains the result.
void GenerateFast(MacroAssembler* masm);
// Generates the slow case code. Must not be naturally
// reachable. Expected to be put after a ret instruction (e.g., in
// deferred code). Always jumps back to the fast case.
void GenerateSlow(MacroAssembler* masm,
const RuntimeCallHelper& call_helper);
// Skip handling slow case and directly jump to bailout.
void SkipSlow(MacroAssembler* masm, Label* bailout) {
masm->bind(&index_not_smi_);
masm->bind(&call_runtime_);
masm->jmp(bailout);
}
private:
Register object_;
Register index_;
Register result_;
Label* receiver_not_string_;
Label* index_not_number_;
Label* index_out_of_range_;
StringIndexFlags index_flags_;
Label call_runtime_;
Label index_not_smi_;
Label got_smi_index_;
Label exit_;
DISALLOW_COPY_AND_ASSIGN(StringCharCodeAtGenerator);
};
// Generates code for creating a one-char string from a char code.
class StringCharFromCodeGenerator {
public:
StringCharFromCodeGenerator(Register code,
Register result)
: code_(code),
result_(result) {
ASSERT(!code_.is(result_));
}
// Generates the fast case code. On the fallthrough path |result|
// register contains the result.
void GenerateFast(MacroAssembler* masm);
// Generates the slow case code. Must not be naturally
// reachable. Expected to be put after a ret instruction (e.g., in
// deferred code). Always jumps back to the fast case.
void GenerateSlow(MacroAssembler* masm,
const RuntimeCallHelper& call_helper);
// Skip handling slow case and directly jump to bailout.
void SkipSlow(MacroAssembler* masm, Label* bailout) {
masm->bind(&slow_case_);
masm->jmp(bailout);
}
private:
Register code_;
Register result_;
Label slow_case_;
Label exit_;
DISALLOW_COPY_AND_ASSIGN(StringCharFromCodeGenerator);
};
// Generates code implementing String.prototype.charAt.
//
// Only supports the case when the receiver is a string and the index
// is a number (smi or heap number) that is a valid index into the
// string. Additional index constraints are specified by the
// flags. Otherwise, bails out to the provided labels.
//
// Register usage: |object| may be changed to another string in a way
// that doesn't affect charCodeAt/charAt semantics, |index| is
// preserved, |scratch1|, |scratch2|, and |result| are clobbered.
class StringCharAtGenerator {
public:
StringCharAtGenerator(Register object,
Register index,
Register scratch,
Register result,
Label* receiver_not_string,
Label* index_not_number,
Label* index_out_of_range,
StringIndexFlags index_flags)
: char_code_at_generator_(object,
index,
scratch,
receiver_not_string,
index_not_number,
index_out_of_range,
index_flags),
char_from_code_generator_(scratch, result) {}
// Generates the fast case code. On the fallthrough path |result|
// register contains the result.
void GenerateFast(MacroAssembler* masm) {
char_code_at_generator_.GenerateFast(masm);
char_from_code_generator_.GenerateFast(masm);
}
// Generates the slow case code. Must not be naturally
// reachable. Expected to be put after a ret instruction (e.g., in
// deferred code). Always jumps back to the fast case.
void GenerateSlow(MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
char_code_at_generator_.GenerateSlow(masm, call_helper);
char_from_code_generator_.GenerateSlow(masm, call_helper);
}
// Skip handling slow case and directly jump to bailout.
void SkipSlow(MacroAssembler* masm, Label* bailout) {
char_code_at_generator_.SkipSlow(masm, bailout);
char_from_code_generator_.SkipSlow(masm, bailout);
}
private:
StringCharCodeAtGenerator char_code_at_generator_;
StringCharFromCodeGenerator char_from_code_generator_;
DISALLOW_COPY_AND_ASSIGN(StringCharAtGenerator);
};
class AllowStubCallsScope {
public:
AllowStubCallsScope(MacroAssembler* masm, bool allow)
: masm_(masm), previous_allow_(masm->allow_stub_calls()) {
masm_->set_allow_stub_calls(allow);
}
~AllowStubCallsScope() {
masm_->set_allow_stub_calls(previous_allow_);
}
private:
MacroAssembler* masm_;
bool previous_allow_;
DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope);
};
class KeyedLoadDictionaryElementStub : public PlatformCodeStub {
public:
KeyedLoadDictionaryElementStub() {}
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return KeyedLoadElement; }
int MinorKey() { return DICTIONARY_ELEMENTS; }
DISALLOW_COPY_AND_ASSIGN(KeyedLoadDictionaryElementStub);
};
class DoubleToIStub : public PlatformCodeStub {
public:
DoubleToIStub(Register source,
Register destination,
int offset,
bool is_truncating,
bool skip_fastpath = false) : bit_field_(0) {
bit_field_ = SourceRegisterBits::encode(source.code_) |
DestinationRegisterBits::encode(destination.code_) |
OffsetBits::encode(offset) |
IsTruncatingBits::encode(is_truncating) |
SkipFastPathBits::encode(skip_fastpath) |
SSEBits::encode(CpuFeatures::IsSafeForSnapshot(SSE2) ?
CpuFeatures::IsSafeForSnapshot(SSE3) ? 2 : 1 : 0);
}
Register source() {
Register result = { SourceRegisterBits::decode(bit_field_) };
return result;
}
Register destination() {
Register result = { DestinationRegisterBits::decode(bit_field_) };
return result;
}
bool is_truncating() {
return IsTruncatingBits::decode(bit_field_);
}
bool skip_fastpath() {
return SkipFastPathBits::decode(bit_field_);
}
int offset() {
return OffsetBits::decode(bit_field_);
}
void Generate(MacroAssembler* masm);
virtual bool SometimesSetsUpAFrame() { return false; }
protected:
virtual void VerifyPlatformFeatures(Isolate* isolate) V8_OVERRIDE {
ASSERT(CpuFeatures::VerifyCrossCompiling(SSE2));
}
private:
static const int kBitsPerRegisterNumber = 6;
STATIC_ASSERT((1L << kBitsPerRegisterNumber) >= Register::kNumRegisters);
class SourceRegisterBits:
public BitField<int, 0, kBitsPerRegisterNumber> {}; // NOLINT
class DestinationRegisterBits:
public BitField<int, kBitsPerRegisterNumber,
kBitsPerRegisterNumber> {}; // NOLINT
class IsTruncatingBits:
public BitField<bool, 2 * kBitsPerRegisterNumber, 1> {}; // NOLINT
class OffsetBits:
public BitField<int, 2 * kBitsPerRegisterNumber + 1, 3> {}; // NOLINT
class SkipFastPathBits:
public BitField<int, 2 * kBitsPerRegisterNumber + 4, 1> {}; // NOLINT
class SSEBits:
public BitField<int, 2 * kBitsPerRegisterNumber + 5, 2> {}; // NOLINT
Major MajorKey() { return DoubleToI; }
int MinorKey() { return bit_field_; }
int bit_field_;
DISALLOW_COPY_AND_ASSIGN(DoubleToIStub);
};
class KeyedLoadFastElementStub : public HydrogenCodeStub {
public:
KeyedLoadFastElementStub(bool is_js_array, ElementsKind elements_kind) {
bit_field_ = ElementsKindBits::encode(elements_kind) |
IsJSArrayBits::encode(is_js_array);
}
bool is_js_array() const {
return IsJSArrayBits::decode(bit_field_);
}
ElementsKind elements_kind() const {
return ElementsKindBits::decode(bit_field_);
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
class IsJSArrayBits: public BitField<bool, 8, 1> {};
uint32_t bit_field_;
Major MajorKey() { return KeyedLoadElement; }
int NotMissMinorKey() { return bit_field_; }
DISALLOW_COPY_AND_ASSIGN(KeyedLoadFastElementStub);
};
class KeyedStoreFastElementStub : public HydrogenCodeStub {
public:
KeyedStoreFastElementStub(bool is_js_array,
ElementsKind elements_kind,
KeyedAccessStoreMode mode) {
bit_field_ = ElementsKindBits::encode(elements_kind) |
IsJSArrayBits::encode(is_js_array) |
StoreModeBits::encode(mode);
}
bool is_js_array() const {
return IsJSArrayBits::decode(bit_field_);
}
ElementsKind elements_kind() const {
return ElementsKindBits::decode(bit_field_);
}
KeyedAccessStoreMode store_mode() const {
return StoreModeBits::decode(bit_field_);
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
class StoreModeBits: public BitField<KeyedAccessStoreMode, 8, 4> {};
class IsJSArrayBits: public BitField<bool, 12, 1> {};
uint32_t bit_field_;
Major MajorKey() { return KeyedStoreElement; }
int NotMissMinorKey() { return bit_field_; }
DISALLOW_COPY_AND_ASSIGN(KeyedStoreFastElementStub);
};
class TransitionElementsKindStub : public HydrogenCodeStub {
public:
TransitionElementsKindStub(ElementsKind from_kind,
ElementsKind to_kind) {
bit_field_ = FromKindBits::encode(from_kind) |
ToKindBits::encode(to_kind);
}
ElementsKind from_kind() const {
return FromKindBits::decode(bit_field_);
}
ElementsKind to_kind() const {
return ToKindBits::decode(bit_field_);
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
class FromKindBits: public BitField<ElementsKind, 8, 8> {};
class ToKindBits: public BitField<ElementsKind, 0, 8> {};
uint32_t bit_field_;
Major MajorKey() { return TransitionElementsKind; }
int NotMissMinorKey() { return bit_field_; }
DISALLOW_COPY_AND_ASSIGN(TransitionElementsKindStub);
};
enum ContextCheckMode {
CONTEXT_CHECK_REQUIRED,
CONTEXT_CHECK_NOT_REQUIRED,
LAST_CONTEXT_CHECK_MODE = CONTEXT_CHECK_NOT_REQUIRED
};
class ArrayConstructorStubBase : public HydrogenCodeStub {
public:
ArrayConstructorStubBase(ElementsKind kind, ContextCheckMode context_mode,
AllocationSiteOverrideMode override_mode) {
// It only makes sense to override local allocation site behavior
// if there is a difference between the global allocation site policy
// for an ElementsKind and the desired usage of the stub.
ASSERT(!(FLAG_track_allocation_sites &&
override_mode == DISABLE_ALLOCATION_SITES) ||
AllocationSite::GetMode(kind) == TRACK_ALLOCATION_SITE);
bit_field_ = ElementsKindBits::encode(kind) |
AllocationSiteOverrideModeBits::encode(override_mode) |
ContextCheckModeBits::encode(context_mode);
}
ElementsKind elements_kind() const {
return ElementsKindBits::decode(bit_field_);
}
AllocationSiteOverrideMode override_mode() const {
return AllocationSiteOverrideModeBits::decode(bit_field_);
}
ContextCheckMode context_mode() const {
return ContextCheckModeBits::decode(bit_field_);
}
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE {
// We only pre-generate stubs that verify correct context
return context_mode() == CONTEXT_CHECK_REQUIRED;
}
static void GenerateStubsAheadOfTime(Isolate* isolate);
static void InstallDescriptors(Isolate* isolate);
// Parameters accessed via CodeStubGraphBuilder::GetParameter()
static const int kConstructor = 0;
static const int kPropertyCell = 1;
private:
int NotMissMinorKey() { return bit_field_; }
// Ensure data fits within available bits.
STATIC_ASSERT(LAST_ALLOCATION_SITE_OVERRIDE_MODE == 1);
STATIC_ASSERT(LAST_CONTEXT_CHECK_MODE == 1);
class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
class AllocationSiteOverrideModeBits: public
BitField<AllocationSiteOverrideMode, 8, 1> {}; // NOLINT
class ContextCheckModeBits: public BitField<ContextCheckMode, 9, 1> {};
uint32_t bit_field_;
DISALLOW_COPY_AND_ASSIGN(ArrayConstructorStubBase);
};
class ArrayNoArgumentConstructorStub : public ArrayConstructorStubBase {
public:
ArrayNoArgumentConstructorStub(
ElementsKind kind,
ContextCheckMode context_mode = CONTEXT_CHECK_REQUIRED,
AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
: ArrayConstructorStubBase(kind, context_mode, override_mode) {
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Major MajorKey() { return ArrayNoArgumentConstructor; }
DISALLOW_COPY_AND_ASSIGN(ArrayNoArgumentConstructorStub);
};
class ArraySingleArgumentConstructorStub : public ArrayConstructorStubBase {
public:
ArraySingleArgumentConstructorStub(
ElementsKind kind,
ContextCheckMode context_mode = CONTEXT_CHECK_REQUIRED,
AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
: ArrayConstructorStubBase(kind, context_mode, override_mode) {
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Major MajorKey() { return ArraySingleArgumentConstructor; }
DISALLOW_COPY_AND_ASSIGN(ArraySingleArgumentConstructorStub);
};
class ArrayNArgumentsConstructorStub : public ArrayConstructorStubBase {
public:
ArrayNArgumentsConstructorStub(
ElementsKind kind,
ContextCheckMode context_mode = CONTEXT_CHECK_REQUIRED,
AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
: ArrayConstructorStubBase(kind, context_mode, override_mode) {
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Major MajorKey() { return ArrayNArgumentsConstructor; }
DISALLOW_COPY_AND_ASSIGN(ArrayNArgumentsConstructorStub);
};
class InternalArrayConstructorStubBase : public HydrogenCodeStub {
public:
explicit InternalArrayConstructorStubBase(ElementsKind kind) {
kind_ = kind;
}
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateStubsAheadOfTime(Isolate* isolate);
static void InstallDescriptors(Isolate* isolate);
// Parameters accessed via CodeStubGraphBuilder::GetParameter()
static const int kConstructor = 0;
ElementsKind elements_kind() const { return kind_; }
private:
int NotMissMinorKey() { return kind_; }
ElementsKind kind_;
DISALLOW_COPY_AND_ASSIGN(InternalArrayConstructorStubBase);
};
class InternalArrayNoArgumentConstructorStub : public
InternalArrayConstructorStubBase {
public:
explicit InternalArrayNoArgumentConstructorStub(ElementsKind kind)
: InternalArrayConstructorStubBase(kind) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Major MajorKey() { return InternalArrayNoArgumentConstructor; }
DISALLOW_COPY_AND_ASSIGN(InternalArrayNoArgumentConstructorStub);
};
class InternalArraySingleArgumentConstructorStub : public
InternalArrayConstructorStubBase {
public:
explicit InternalArraySingleArgumentConstructorStub(ElementsKind kind)
: InternalArrayConstructorStubBase(kind) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Major MajorKey() { return InternalArraySingleArgumentConstructor; }
DISALLOW_COPY_AND_ASSIGN(InternalArraySingleArgumentConstructorStub);
};
class InternalArrayNArgumentsConstructorStub : public
InternalArrayConstructorStubBase {
public:
explicit InternalArrayNArgumentsConstructorStub(ElementsKind kind)
: InternalArrayConstructorStubBase(kind) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
Major MajorKey() { return InternalArrayNArgumentsConstructor; }
DISALLOW_COPY_AND_ASSIGN(InternalArrayNArgumentsConstructorStub);
};
class KeyedStoreElementStub : public PlatformCodeStub {
public:
KeyedStoreElementStub(bool is_js_array,
ElementsKind elements_kind,
KeyedAccessStoreMode store_mode)
: is_js_array_(is_js_array),
elements_kind_(elements_kind),
store_mode_(store_mode),
fp_registers_(CanUseFPRegisters()) { }
Major MajorKey() { return KeyedStoreElement; }
int MinorKey() {
return ElementsKindBits::encode(elements_kind_) |
IsJSArrayBits::encode(is_js_array_) |
StoreModeBits::encode(store_mode_) |
FPRegisters::encode(fp_registers_);
}
void Generate(MacroAssembler* masm);
private:
class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
class StoreModeBits: public BitField<KeyedAccessStoreMode, 8, 4> {};
class IsJSArrayBits: public BitField<bool, 12, 1> {};
class FPRegisters: public BitField<bool, 13, 1> {};
bool is_js_array_;
ElementsKind elements_kind_;
KeyedAccessStoreMode store_mode_;
bool fp_registers_;
DISALLOW_COPY_AND_ASSIGN(KeyedStoreElementStub);
};
class ToBooleanStub: public HydrogenCodeStub {
public:
enum Type {
UNDEFINED,
BOOLEAN,
NULL_TYPE,
SMI,
SPEC_OBJECT,
STRING,
SYMBOL,
HEAP_NUMBER,
NUMBER_OF_TYPES
};
// At most 8 different types can be distinguished, because the Code object
// only has room for a single byte to hold a set of these types. :-P
STATIC_ASSERT(NUMBER_OF_TYPES <= 8);
class Types : public EnumSet<Type, byte> {
public:
Types() : EnumSet<Type, byte>(0) {}
explicit Types(byte bits) : EnumSet<Type, byte>(bits) {}
byte ToByte() const { return ToIntegral(); }
void Print(StringStream* stream) const;
bool UpdateStatus(Handle<Object> object);
bool NeedsMap() const;
bool CanBeUndetectable() const;
bool IsGeneric() const { return ToIntegral() == Generic().ToIntegral(); }
static Types Generic() { return Types((1 << NUMBER_OF_TYPES) - 1); }
};
explicit ToBooleanStub(Types types = Types())
: types_(types) { }
explicit ToBooleanStub(Code::ExtraICState state)
: types_(static_cast<byte>(state)) { }
bool UpdateStatus(Handle<Object> object);
Types GetTypes() { return types_; }
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
virtual Code::Kind GetCodeKind() const { return Code::TO_BOOLEAN_IC; }
virtual void PrintState(StringStream* stream);
virtual bool SometimesSetsUpAFrame() { return false; }
static void InitializeForIsolate(Isolate* isolate) {
ToBooleanStub stub;
stub.InitializeInterfaceDescriptor(
isolate,
isolate->code_stub_interface_descriptor(CodeStub::ToBoolean));
}
static Handle<Code> GetUninitialized(Isolate* isolate) {
return ToBooleanStub(UNINITIALIZED).GetCode(isolate);
}
virtual Code::ExtraICState GetExtraICState() {
return types_.ToIntegral();
}
virtual InlineCacheState GetICState() {
if (types_.IsEmpty()) {
return ::v8::internal::UNINITIALIZED;
} else {
return MONOMORPHIC;
}
}
private:
Major MajorKey() { return ToBoolean; }
int NotMissMinorKey() { return GetExtraICState(); }
explicit ToBooleanStub(InitializationState init_state) :
HydrogenCodeStub(init_state) {}
Types types_;
};
class ElementsTransitionAndStoreStub : public HydrogenCodeStub {
public:
ElementsTransitionAndStoreStub(ElementsKind from_kind,
ElementsKind to_kind,
bool is_jsarray,
KeyedAccessStoreMode store_mode)
: from_kind_(from_kind),
to_kind_(to_kind),
is_jsarray_(is_jsarray),
store_mode_(store_mode) {}
ElementsKind from_kind() const { return from_kind_; }
ElementsKind to_kind() const { return to_kind_; }
bool is_jsarray() const { return is_jsarray_; }
KeyedAccessStoreMode store_mode() const { return store_mode_; }
virtual Handle<Code> GenerateCode(Isolate* isolate);
void InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor);
private:
class FromBits: public BitField<ElementsKind, 0, 8> {};
class ToBits: public BitField<ElementsKind, 8, 8> {};
class IsJSArrayBits: public BitField<bool, 16, 1> {};
class StoreModeBits: public BitField<KeyedAccessStoreMode, 17, 4> {};
Major MajorKey() { return ElementsTransitionAndStore; }
int NotMissMinorKey() {
return FromBits::encode(from_kind_) |
ToBits::encode(to_kind_) |
IsJSArrayBits::encode(is_jsarray_) |
StoreModeBits::encode(store_mode_);
}
ElementsKind from_kind_;
ElementsKind to_kind_;
bool is_jsarray_;
KeyedAccessStoreMode store_mode_;
DISALLOW_COPY_AND_ASSIGN(ElementsTransitionAndStoreStub);
};
class StoreArrayLiteralElementStub : public PlatformCodeStub {
public:
StoreArrayLiteralElementStub()
: fp_registers_(CanUseFPRegisters()) { }
private:
class FPRegisters: public BitField<bool, 0, 1> {};
Major MajorKey() { return StoreArrayLiteralElement; }
int MinorKey() { return FPRegisters::encode(fp_registers_); }
void Generate(MacroAssembler* masm);
bool fp_registers_;
DISALLOW_COPY_AND_ASSIGN(StoreArrayLiteralElementStub);
};
class StubFailureTrampolineStub : public PlatformCodeStub {
public:
explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
: fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {}
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateAheadOfTime(Isolate* isolate);
private:
class FPRegisters: public BitField<bool, 0, 1> {};
class FunctionModeField: public BitField<StubFunctionMode, 1, 1> {};
Major MajorKey() { return StubFailureTrampoline; }
int MinorKey() {
return FPRegisters::encode(fp_registers_) |
FunctionModeField::encode(function_mode_);
}
void Generate(MacroAssembler* masm);
bool fp_registers_;
StubFunctionMode function_mode_;
DISALLOW_COPY_AND_ASSIGN(StubFailureTrampolineStub);
};
class StubFailureTailCallTrampolineStub : public PlatformCodeStub {
public:
StubFailureTailCallTrampolineStub() : fp_registers_(CanUseFPRegisters()) {}
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateAheadOfTime(Isolate* isolate);
private:
class FPRegisters: public BitField<bool, 0, 1> {};
Major MajorKey() { return StubFailureTailCallTrampoline; }
int MinorKey() { return FPRegisters::encode(fp_registers_); }
void Generate(MacroAssembler* masm);
bool fp_registers_;
DISALLOW_COPY_AND_ASSIGN(StubFailureTailCallTrampolineStub);
};
class ProfileEntryHookStub : public PlatformCodeStub {
public:
explicit ProfileEntryHookStub() {}
// The profile entry hook function is not allowed to cause a GC.
virtual bool SometimesSetsUpAFrame() { return false; }
// Generates a call to the entry hook if it's enabled.
static void MaybeCallEntryHook(MacroAssembler* masm);
private:
static void EntryHookTrampoline(intptr_t function,
intptr_t stack_pointer,
Isolate* isolate);
Major MajorKey() { return ProfileEntryHook; }
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
DISALLOW_COPY_AND_ASSIGN(ProfileEntryHookStub);
};
} } // namespace v8::internal
#endif // V8_CODE_STUBS_H_