Reland and fix "Add support for keyed-call on arrays of fast elements"
BUG= R=danno@chromium.org Review URL: https://chromiumcodereview.appspot.com/71783003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@17782 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
d62337e127
commit
341d405301
@ -133,6 +133,19 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
|
||||
}
|
||||
|
||||
|
||||
void KeyedArrayCallStub::InitializeInterfaceDescriptor(
|
||||
Isolate* isolate,
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { r2 };
|
||||
descriptor->register_param_count_ = 1;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
|
||||
descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
|
||||
descriptor->deoptimization_handler_ =
|
||||
FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
|
||||
Isolate* isolate,
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
@ -5693,6 +5706,24 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
|
||||
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
__ mov(r1, r0);
|
||||
int parameter_count_offset =
|
||||
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
|
||||
__ ldr(r0, MemOperand(fp, parameter_count_offset));
|
||||
// The parameter count above includes the receiver for the arguments passed to
|
||||
// the deoptimization handler. Subtract the receiver for the parameter count
|
||||
// for the call.
|
||||
__ sub(r0, r0, Operand(1));
|
||||
masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
|
||||
ParameterCount argument_count(r0);
|
||||
__ InvokeFunction(
|
||||
r1, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
|
||||
}
|
||||
|
||||
|
||||
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
|
||||
if (masm->isolate()->function_entry_hook() != NULL) {
|
||||
PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize);
|
||||
|
@ -1328,8 +1328,10 @@ LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
|
||||
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
|
||||
LOperand* context = UseFixed(instr->context(), cp);
|
||||
LOperand* function = UseFixed(instr->function(), r1);
|
||||
return MarkAsCall(
|
||||
DefineFixed(new(zone()) LCallFunction(context, function), r0), instr);
|
||||
LCallFunction* call = new(zone()) LCallFunction(context, function);
|
||||
LInstruction* result = DefineFixed(call, r0);
|
||||
if (instr->IsTailCall()) return result;
|
||||
return MarkAsCall(result, instr);
|
||||
}
|
||||
|
||||
|
||||
|
@ -509,17 +509,36 @@ Operand LCodeGen::ToOperand(LOperand* op) {
|
||||
}
|
||||
|
||||
|
||||
static int ArgumentsOffsetWithoutFrame(int index) {
|
||||
ASSERT(index < 0);
|
||||
return -(index + 1) * kPointerSize;
|
||||
}
|
||||
|
||||
|
||||
MemOperand LCodeGen::ToMemOperand(LOperand* op) const {
|
||||
ASSERT(!op->IsRegister());
|
||||
ASSERT(!op->IsDoubleRegister());
|
||||
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
|
||||
return MemOperand(fp, StackSlotOffset(op->index()));
|
||||
if (NeedsEagerFrame()) {
|
||||
return MemOperand(fp, StackSlotOffset(op->index()));
|
||||
} else {
|
||||
// Retrieve parameter without eager stack-frame relative to the
|
||||
// stack-pointer.
|
||||
return MemOperand(sp, ArgumentsOffsetWithoutFrame(op->index()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
MemOperand LCodeGen::ToHighMemOperand(LOperand* op) const {
|
||||
ASSERT(op->IsDoubleStackSlot());
|
||||
return MemOperand(fp, StackSlotOffset(op->index()) + kPointerSize);
|
||||
if (NeedsEagerFrame()) {
|
||||
return MemOperand(fp, StackSlotOffset(op->index()) + kPointerSize);
|
||||
} else {
|
||||
// Retrieve parameter without eager stack-frame relative to the
|
||||
// stack-pointer.
|
||||
return MemOperand(
|
||||
sp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -4107,7 +4126,12 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
if (instr->hydrogen()->IsTailCall()) {
|
||||
if (NeedsEagerFrame()) __ mov(sp, fp);
|
||||
__ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
} else {
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -776,7 +776,7 @@ void Call::RecordTypeFeedback(TypeFeedbackOracle* oracle,
|
||||
if (property == NULL) {
|
||||
// Function call. Specialize for monomorphic calls.
|
||||
if (is_monomorphic_) target_ = oracle->GetCallTarget(this);
|
||||
} else {
|
||||
} else if (property->key()->IsPropertyName()) {
|
||||
// Method call. Specialize for the receiver types seen at runtime.
|
||||
Literal* key = property->key()->AsLiteral();
|
||||
ASSERT(key != NULL && key->value()->IsString());
|
||||
@ -803,6 +803,10 @@ void Call::RecordTypeFeedback(TypeFeedbackOracle* oracle,
|
||||
Handle<Map> map = receiver_types_.first();
|
||||
is_monomorphic_ = ComputeTarget(map, name);
|
||||
}
|
||||
} else {
|
||||
if (is_monomorphic_) {
|
||||
keyed_array_call_is_holey_ = oracle->KeyedArrayCallIsHoley(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1728,6 +1728,7 @@ class Call V8_FINAL : public Expression {
|
||||
return &receiver_types_;
|
||||
}
|
||||
virtual bool IsMonomorphic() V8_OVERRIDE { return is_monomorphic_; }
|
||||
bool KeyedArrayCallIsHoley() { return keyed_array_call_is_holey_; }
|
||||
CheckType check_type() const { return check_type_; }
|
||||
|
||||
void set_string_check(Handle<JSObject> holder) {
|
||||
@ -1778,6 +1779,7 @@ class Call V8_FINAL : public Expression {
|
||||
expression_(expression),
|
||||
arguments_(arguments),
|
||||
is_monomorphic_(false),
|
||||
keyed_array_call_is_holey_(true),
|
||||
check_type_(RECEIVER_MAP_CHECK),
|
||||
return_id_(GetNextId(isolate)) { }
|
||||
|
||||
@ -1786,6 +1788,7 @@ class Call V8_FINAL : public Expression {
|
||||
ZoneList<Expression*>* arguments_;
|
||||
|
||||
bool is_monomorphic_;
|
||||
bool keyed_array_call_is_holey_;
|
||||
CheckType check_type_;
|
||||
SmallMapList receiver_types_;
|
||||
Handle<JSFunction> target_;
|
||||
|
@ -594,6 +594,32 @@ Handle<Code> KeyedLoadFieldStub::GenerateCode(Isolate* isolate) {
|
||||
}
|
||||
|
||||
|
||||
template<>
|
||||
HValue* CodeStubGraphBuilder<KeyedArrayCallStub>::BuildCodeStub() {
|
||||
int argc = casted_stub()->argc() + 1;
|
||||
info()->set_parameter_count(argc);
|
||||
|
||||
HValue* receiver = Add<HParameter>(1);
|
||||
|
||||
// Load the expected initial array map from the context.
|
||||
JSArrayBuilder array_builder(this, casted_stub()->elements_kind());
|
||||
HValue* map = array_builder.EmitMapCode();
|
||||
|
||||
HValue* checked_receiver = Add<HCheckMapValue>(receiver, map);
|
||||
|
||||
HValue* function = BuildUncheckedMonomorphicElementAccess(
|
||||
checked_receiver, GetParameter(0),
|
||||
NULL, true, casted_stub()->elements_kind(),
|
||||
false, NEVER_RETURN_HOLE, STANDARD_STORE);
|
||||
return Add<HCallFunction>(function, argc, TAIL_CALL);
|
||||
}
|
||||
|
||||
|
||||
Handle<Code> KeyedArrayCallStub::GenerateCode(Isolate* isolate) {
|
||||
return DoGenerateCode(isolate, this);
|
||||
}
|
||||
|
||||
|
||||
template <>
|
||||
HValue* CodeStubGraphBuilder<KeyedStoreFastElementStub>::BuildCodeStub() {
|
||||
BuildUncheckedMonomorphicElementAccess(
|
||||
|
@ -43,6 +43,7 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor()
|
||||
: register_param_count_(-1),
|
||||
stack_parameter_count_(no_reg),
|
||||
hint_stack_parameter_count_(-1),
|
||||
continuation_type_(NORMAL_CONTINUATION),
|
||||
function_mode_(NOT_JS_FUNCTION_STUB_MODE),
|
||||
register_params_(NULL),
|
||||
deoptimization_handler_(NULL),
|
||||
@ -51,6 +52,11 @@ CodeStubInterfaceDescriptor::CodeStubInterfaceDescriptor()
|
||||
has_miss_handler_(false) { }
|
||||
|
||||
|
||||
void CodeStub::GenerateStubsRequiringBuiltinsAheadOfTime(Isolate* isolate) {
|
||||
StubFailureTailCallTrampolineStub::GenerateAheadOfTime(isolate);
|
||||
}
|
||||
|
||||
|
||||
bool CodeStub::FindCodeInCache(Code** code_out, Isolate* isolate) {
|
||||
UnseededNumberDictionary* stubs = isolate->heap()->code_stubs();
|
||||
int index = stubs->FindEntry(GetKey());
|
||||
@ -1109,6 +1115,12 @@ void StubFailureTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
|
||||
}
|
||||
|
||||
|
||||
void StubFailureTailCallTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
|
||||
StubFailureTailCallTrampolineStub stub;
|
||||
stub.GetCode(isolate)->set_is_pregenerated(true);
|
||||
}
|
||||
|
||||
|
||||
void ProfileEntryHookStub::EntryHookTrampoline(intptr_t function,
|
||||
intptr_t stack_pointer,
|
||||
Isolate* isolate) {
|
||||
|
@ -90,13 +90,15 @@ namespace internal {
|
||||
V(TransitionElementsKind) \
|
||||
V(StoreArrayLiteralElement) \
|
||||
V(StubFailureTrampoline) \
|
||||
V(StubFailureTailCallTrampoline) \
|
||||
V(ArrayConstructor) \
|
||||
V(InternalArrayConstructor) \
|
||||
V(ProfileEntryHook) \
|
||||
V(StoreGlobal) \
|
||||
/* IC Handler stubs */ \
|
||||
V(LoadField) \
|
||||
V(KeyedLoadField)
|
||||
V(KeyedLoadField) \
|
||||
V(KeyedArrayCall)
|
||||
|
||||
// List of code stubs only used on ARM platforms.
|
||||
#if V8_TARGET_ARCH_ARM
|
||||
@ -170,6 +172,7 @@ class CodeStub BASE_EMBEDDED {
|
||||
virtual bool IsPregenerated(Isolate* isolate) { return false; }
|
||||
|
||||
static void GenerateStubsAheadOfTime(Isolate* isolate);
|
||||
static void GenerateStubsRequiringBuiltinsAheadOfTime(Isolate* isolate);
|
||||
static void GenerateFPStubs(Isolate* isolate);
|
||||
|
||||
// Some stubs put untagged junk on the stack that cannot be scanned by the
|
||||
@ -279,6 +282,9 @@ class PlatformCodeStub : public CodeStub {
|
||||
enum StubFunctionMode { NOT_JS_FUNCTION_STUB_MODE, JS_FUNCTION_STUB_MODE };
|
||||
enum HandlerArgumentsMode { DONT_PASS_ARGUMENTS, PASS_ARGUMENTS };
|
||||
|
||||
enum ContinuationType { NORMAL_CONTINUATION, TAIL_CALL_CONTINUATION };
|
||||
|
||||
|
||||
struct CodeStubInterfaceDescriptor {
|
||||
CodeStubInterfaceDescriptor();
|
||||
int register_param_count_;
|
||||
@ -287,18 +293,23 @@ struct CodeStubInterfaceDescriptor {
|
||||
// if hint_stack_parameter_count_ > 0, the code stub can optimize the
|
||||
// return sequence. Default value is -1, which means it is ignored.
|
||||
int hint_stack_parameter_count_;
|
||||
ContinuationType continuation_type_;
|
||||
StubFunctionMode function_mode_;
|
||||
Register* register_params_;
|
||||
|
||||
Address deoptimization_handler_;
|
||||
HandlerArgumentsMode handler_arguments_mode_;
|
||||
|
||||
bool initialized() const { return register_param_count_ >= 0; }
|
||||
|
||||
bool HasTailCallContinuation() const {
|
||||
return continuation_type_ == TAIL_CALL_CONTINUATION;
|
||||
}
|
||||
|
||||
int environment_length() const {
|
||||
return register_param_count_;
|
||||
}
|
||||
|
||||
bool initialized() const { return register_param_count_ >= 0; }
|
||||
|
||||
void SetMissHandler(ExternalReference handler) {
|
||||
miss_handler_ = handler;
|
||||
has_miss_handler_ = true;
|
||||
@ -876,6 +887,11 @@ class HandlerStub: public HICStub {
|
||||
public:
|
||||
virtual Code::Kind GetCodeKind() const { return Code::HANDLER; }
|
||||
virtual int GetStubFlags() { return kind(); }
|
||||
|
||||
protected:
|
||||
HandlerStub() : HICStub() { }
|
||||
virtual int NotMissMinorKey() { return bit_field_; }
|
||||
int bit_field_;
|
||||
};
|
||||
|
||||
|
||||
@ -937,9 +953,6 @@ class LoadFieldStub: public HandlerStub {
|
||||
class IndexBits: public BitField<int, 5, 11> {};
|
||||
class UnboxedDoubleBits: public BitField<bool, 16, 1> {};
|
||||
virtual CodeStub::Major MajorKey() { return LoadField; }
|
||||
virtual int NotMissMinorKey() { return bit_field_; }
|
||||
|
||||
int bit_field_;
|
||||
};
|
||||
|
||||
|
||||
@ -1018,6 +1031,50 @@ class KeyedLoadFieldStub: public LoadFieldStub {
|
||||
};
|
||||
|
||||
|
||||
class KeyedArrayCallStub: public HICStub {
|
||||
public:
|
||||
KeyedArrayCallStub(bool holey, int argc) : HICStub(), argc_(argc) {
|
||||
bit_field_ = ContextualBits::encode(false) | HoleyBits::encode(holey);
|
||||
}
|
||||
|
||||
virtual Code::Kind kind() const { return Code::KEYED_CALL_IC; }
|
||||
virtual Code::ExtraICState GetExtraICState() { return bit_field_; }
|
||||
|
||||
ElementsKind elements_kind() {
|
||||
return HoleyBits::decode(bit_field_) ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
|
||||
}
|
||||
|
||||
int argc() { return argc_; }
|
||||
virtual int GetStubFlags() { return argc(); }
|
||||
|
||||
static bool IsHoley(Handle<Code> code) {
|
||||
Code::ExtraICState state = code->extra_ic_state();
|
||||
return HoleyBits::decode(state);
|
||||
}
|
||||
|
||||
virtual void InitializeInterfaceDescriptor(
|
||||
Isolate* isolate,
|
||||
CodeStubInterfaceDescriptor* descriptor);
|
||||
|
||||
virtual Handle<Code> GenerateCode(Isolate* isolate);
|
||||
|
||||
private:
|
||||
virtual int NotMissMinorKey() {
|
||||
return GetExtraICState() | ArgcBits::encode(argc_);
|
||||
}
|
||||
|
||||
class ContextualBits: public BitField<bool, 0, 1> {};
|
||||
STATIC_ASSERT(CallICBase::Contextual::kShift == ContextualBits::kShift);
|
||||
STATIC_ASSERT(CallICBase::Contextual::kSize == ContextualBits::kSize);
|
||||
class HoleyBits: public BitField<bool, 1, 1> {};
|
||||
STATIC_ASSERT(Code::kArgumentsBits <= kStubMinorKeyBits - 2);
|
||||
class ArgcBits: public BitField<int, 2, Code::kArgumentsBits> {};
|
||||
virtual CodeStub::Major MajorKey() { return KeyedArrayCall; }
|
||||
int bit_field_;
|
||||
int argc_;
|
||||
};
|
||||
|
||||
|
||||
class BinaryOpStub: public HydrogenCodeStub {
|
||||
public:
|
||||
BinaryOpStub(Token::Value op, OverwriteMode mode)
|
||||
@ -2399,6 +2456,27 @@ class StubFailureTrampolineStub : public PlatformCodeStub {
|
||||
};
|
||||
|
||||
|
||||
class StubFailureTailCallTrampolineStub : public PlatformCodeStub {
|
||||
public:
|
||||
StubFailureTailCallTrampolineStub() : fp_registers_(CanUseFPRegisters()) {}
|
||||
|
||||
virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
|
||||
|
||||
static void GenerateAheadOfTime(Isolate* isolate);
|
||||
|
||||
private:
|
||||
class FPRegisters: public BitField<bool, 0, 1> {};
|
||||
Major MajorKey() { return StubFailureTailCallTrampoline; }
|
||||
int MinorKey() { return FPRegisters::encode(fp_registers_); }
|
||||
|
||||
void Generate(MacroAssembler* masm);
|
||||
|
||||
bool fp_registers_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(StubFailureTailCallTrampolineStub);
|
||||
};
|
||||
|
||||
|
||||
class ProfileEntryHookStub : public PlatformCodeStub {
|
||||
public:
|
||||
explicit ProfileEntryHookStub() {}
|
||||
|
@ -59,7 +59,8 @@ CompilationInfo::CompilationInfo(Handle<Script> script,
|
||||
: flags_(LanguageModeField::encode(CLASSIC_MODE)),
|
||||
script_(script),
|
||||
osr_ast_id_(BailoutId::None()),
|
||||
osr_pc_offset_(0) {
|
||||
osr_pc_offset_(0),
|
||||
parameter_count_(0) {
|
||||
Initialize(script->GetIsolate(), BASE, zone);
|
||||
}
|
||||
|
||||
@ -70,7 +71,8 @@ CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
|
||||
shared_info_(shared_info),
|
||||
script_(Handle<Script>(Script::cast(shared_info->script()))),
|
||||
osr_ast_id_(BailoutId::None()),
|
||||
osr_pc_offset_(0) {
|
||||
osr_pc_offset_(0),
|
||||
parameter_count_(0) {
|
||||
Initialize(script_->GetIsolate(), BASE, zone);
|
||||
}
|
||||
|
||||
@ -83,7 +85,8 @@ CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
|
||||
script_(Handle<Script>(Script::cast(shared_info_->script()))),
|
||||
context_(closure->context()),
|
||||
osr_ast_id_(BailoutId::None()),
|
||||
osr_pc_offset_(0) {
|
||||
osr_pc_offset_(0),
|
||||
parameter_count_(0) {
|
||||
Initialize(script_->GetIsolate(), BASE, zone);
|
||||
}
|
||||
|
||||
@ -94,7 +97,8 @@ CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
|
||||
: flags_(LanguageModeField::encode(CLASSIC_MODE) |
|
||||
IsLazy::encode(true)),
|
||||
osr_ast_id_(BailoutId::None()),
|
||||
osr_pc_offset_(0) {
|
||||
osr_pc_offset_(0),
|
||||
parameter_count_(0) {
|
||||
Initialize(isolate, STUB, zone);
|
||||
code_stub_ = stub;
|
||||
}
|
||||
@ -184,8 +188,12 @@ void CompilationInfo::RollbackDependencies() {
|
||||
|
||||
|
||||
int CompilationInfo::num_parameters() const {
|
||||
ASSERT(!IsStub());
|
||||
return scope()->num_parameters();
|
||||
if (IsStub()) {
|
||||
ASSERT(parameter_count_ > 0);
|
||||
return parameter_count_;
|
||||
} else {
|
||||
return scope()->num_parameters();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -98,6 +98,10 @@ class CompilationInfo {
|
||||
ASSERT(!is_lazy());
|
||||
flags_ |= IsGlobal::encode(true);
|
||||
}
|
||||
void set_parameter_count(int parameter_count) {
|
||||
ASSERT(IsStub());
|
||||
parameter_count_ = parameter_count;
|
||||
}
|
||||
void SetLanguageMode(LanguageMode language_mode) {
|
||||
ASSERT(this->language_mode() == CLASSIC_MODE ||
|
||||
this->language_mode() == language_mode ||
|
||||
@ -442,6 +446,9 @@ class CompilationInfo {
|
||||
// during graph optimization.
|
||||
int opt_count_;
|
||||
|
||||
// Number of parameters used for compilation of stubs that require arguments.
|
||||
int parameter_count_;
|
||||
|
||||
Handle<Foreign> object_wrapper_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
|
||||
|
@ -1465,8 +1465,9 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
int output_frame_size = height_in_bytes + fixed_frame_size;
|
||||
if (trace_scope_ != NULL) {
|
||||
PrintF(trace_scope_->file(),
|
||||
" translating %s => StubFailureTrampolineStub, height=%d\n",
|
||||
" translating %s => StubFailure%sTrampolineStub, height=%d\n",
|
||||
CodeStub::MajorName(static_cast<CodeStub::Major>(major_key), false),
|
||||
descriptor->HasTailCallContinuation() ? "TailCall" : "",
|
||||
height_in_bytes);
|
||||
}
|
||||
|
||||
@ -1538,7 +1539,8 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
top_address + output_frame_offset, output_frame_offset, value);
|
||||
}
|
||||
|
||||
intptr_t caller_arg_count = 0;
|
||||
intptr_t caller_arg_count = descriptor->HasTailCallContinuation()
|
||||
? compiled_code_->arguments_count() + 1 : 0;
|
||||
bool arg_count_known = !descriptor->stack_parameter_count_.is_valid();
|
||||
|
||||
// Build the Arguments object for the caller's parameters and a pointer to it.
|
||||
@ -1634,9 +1636,13 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
|
||||
// Compute this frame's PC, state, and continuation.
|
||||
Code* trampoline = NULL;
|
||||
StubFunctionMode function_mode = descriptor->function_mode_;
|
||||
StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
|
||||
isolate_);
|
||||
if (descriptor->HasTailCallContinuation()) {
|
||||
StubFailureTailCallTrampolineStub().FindCodeInCache(&trampoline, isolate_);
|
||||
} else {
|
||||
StubFunctionMode function_mode = descriptor->function_mode_;
|
||||
StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
|
||||
isolate_);
|
||||
}
|
||||
ASSERT(trampoline != NULL);
|
||||
output_frame->SetPc(reinterpret_cast<intptr_t>(
|
||||
trampoline->instruction_start()));
|
||||
|
@ -1401,6 +1401,11 @@ Code* StubFailureTrampolineFrame::unchecked_code() const {
|
||||
return trampoline;
|
||||
}
|
||||
|
||||
StubFailureTailCallTrampolineStub().FindCodeInCache(&trampoline, isolate());
|
||||
if (trampoline->contains(pc())) {
|
||||
return trampoline;
|
||||
}
|
||||
|
||||
UNREACHABLE();
|
||||
return NULL;
|
||||
}
|
||||
|
@ -3118,6 +3118,12 @@ void Heap::CreateFixedStubs() {
|
||||
}
|
||||
|
||||
|
||||
void Heap::CreateStubsRequiringBuiltins() {
|
||||
HandleScope scope(isolate());
|
||||
CodeStub::GenerateStubsRequiringBuiltinsAheadOfTime(isolate());
|
||||
}
|
||||
|
||||
|
||||
bool Heap::CreateInitialObjects() {
|
||||
Object* obj;
|
||||
|
||||
|
@ -2135,6 +2135,7 @@ class Heap {
|
||||
NO_INLINE(void CreateJSConstructEntryStub());
|
||||
|
||||
void CreateFixedStubs();
|
||||
void CreateStubsRequiringBuiltins();
|
||||
|
||||
MUST_USE_RESULT MaybeObject* CreateOddball(const char* to_string,
|
||||
Object* to_number,
|
||||
|
@ -2289,19 +2289,38 @@ class HCallNamed V8_FINAL : public HUnaryCall {
|
||||
};
|
||||
|
||||
|
||||
enum CallMode {
|
||||
NORMAL_CALL,
|
||||
TAIL_CALL
|
||||
};
|
||||
|
||||
|
||||
class HCallFunction V8_FINAL : public HBinaryCall {
|
||||
public:
|
||||
DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P2(HCallFunction, HValue*, int);
|
||||
DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P3(
|
||||
HCallFunction, HValue*, int, CallMode);
|
||||
|
||||
bool IsTailCall() const { return call_mode_ == TAIL_CALL; }
|
||||
|
||||
HValue* context() { return first(); }
|
||||
HValue* function() { return second(); }
|
||||
|
||||
DECLARE_CONCRETE_INSTRUCTION(CallFunction)
|
||||
|
||||
private:
|
||||
HCallFunction(HValue* context, HValue* function, int argument_count)
|
||||
: HBinaryCall(context, function, argument_count) {
|
||||
virtual int argument_delta() const V8_OVERRIDE {
|
||||
if (IsTailCall()) return 0;
|
||||
return -argument_count();
|
||||
}
|
||||
|
||||
private:
|
||||
HCallFunction(HValue* context,
|
||||
HValue* function,
|
||||
int argument_count,
|
||||
CallMode mode = NORMAL_CALL)
|
||||
: HBinaryCall(context, function, argument_count), call_mode_(mode) {
|
||||
}
|
||||
CallMode call_mode_;
|
||||
};
|
||||
|
||||
|
||||
@ -7183,6 +7202,8 @@ class HCheckMapValue V8_FINAL : public HTemplateInstruction<2> {
|
||||
DECLARE_CONCRETE_INSTRUCTION(CheckMapValue)
|
||||
|
||||
protected:
|
||||
virtual int RedefinedOperandIndex() { return 0; }
|
||||
|
||||
virtual bool DataEquals(HValue* other) V8_OVERRIDE {
|
||||
return true;
|
||||
}
|
||||
|
@ -2493,7 +2493,7 @@ HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
|
||||
return builder()->Add<HConstant>(map);
|
||||
}
|
||||
|
||||
if (kind_ == GetInitialFastElementsKind()) {
|
||||
if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
|
||||
// No need for a context lookup if the kind_ matches the initial
|
||||
// map, because we can just load the map in that case.
|
||||
HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
|
||||
@ -7471,18 +7471,30 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
||||
if (prop != NULL) {
|
||||
if (!prop->key()->IsPropertyName()) {
|
||||
// Keyed function call.
|
||||
CHECK_ALIVE(VisitArgument(prop->obj()));
|
||||
|
||||
CHECK_ALIVE(VisitForValue(prop->obj()));
|
||||
CHECK_ALIVE(VisitForValue(prop->key()));
|
||||
|
||||
// Push receiver and key like the non-optimized code generator expects it.
|
||||
HValue* key = Pop();
|
||||
HValue* receiver = Pop();
|
||||
Push(key);
|
||||
Push(receiver);
|
||||
|
||||
Push(Add<HPushArgument>(receiver));
|
||||
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
|
||||
|
||||
call = New<HCallKeyed>(key, argument_count);
|
||||
if (expr->IsMonomorphic()) {
|
||||
BuildCheckHeapObject(receiver);
|
||||
ElementsKind kind = expr->KeyedArrayCallIsHoley()
|
||||
? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
|
||||
|
||||
Handle<Map> map(isolate()->get_initial_js_array_map(kind));
|
||||
|
||||
HValue* function = BuildMonomorphicElementAccess(
|
||||
receiver, key, NULL, NULL, map, false, STANDARD_STORE);
|
||||
|
||||
call = New<HCallFunction>(function, argument_count);
|
||||
} else {
|
||||
call = New<HCallKeyed>(key, argument_count);
|
||||
}
|
||||
Drop(argument_count + 1); // 1 is the key.
|
||||
return ast_context()->ReturnInstruction(call, expr->id());
|
||||
}
|
||||
|
@ -1610,7 +1610,7 @@ class HGraphBuilder {
|
||||
|
||||
JSArrayBuilder(HGraphBuilder* builder,
|
||||
ElementsKind kind,
|
||||
HValue* constructor_function);
|
||||
HValue* constructor_function = NULL);
|
||||
|
||||
enum FillMode {
|
||||
DONT_FILL_WITH_HOLE,
|
||||
@ -1623,6 +1623,7 @@ class HGraphBuilder {
|
||||
HValue* AllocateArray(HValue* capacity, HValue* length_field,
|
||||
FillMode fill_mode = FILL_WITH_HOLE);
|
||||
HValue* GetElementsLocation() { return elements_location_; }
|
||||
HValue* EmitMapCode();
|
||||
|
||||
private:
|
||||
Zone* zone() const { return builder_->zone(); }
|
||||
@ -1636,7 +1637,6 @@ class HGraphBuilder {
|
||||
return JSArray::kPreallocatedArrayElements;
|
||||
}
|
||||
|
||||
HValue* EmitMapCode();
|
||||
HValue* EmitInternalMapCode();
|
||||
HValue* EstablishEmptyArrayAllocationSize();
|
||||
HValue* EstablishAllocationSize(HValue* length_node);
|
||||
|
@ -138,6 +138,19 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
|
||||
}
|
||||
|
||||
|
||||
void KeyedArrayCallStub::InitializeInterfaceDescriptor(
|
||||
Isolate* isolate,
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { ecx };
|
||||
descriptor->register_param_count_ = 1;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
|
||||
descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
|
||||
descriptor->deoptimization_handler_ =
|
||||
FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
|
||||
Isolate* isolate,
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
@ -5659,6 +5672,24 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
|
||||
__ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
__ mov(edi, eax);
|
||||
int parameter_count_offset =
|
||||
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
|
||||
__ mov(eax, MemOperand(ebp, parameter_count_offset));
|
||||
// The parameter count above includes the receiver for the arguments passed to
|
||||
// the deoptimization handler. Subtract the receiver for the parameter count
|
||||
// for the call.
|
||||
__ sub(eax, Immediate(1));
|
||||
masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
|
||||
ParameterCount argument_count(eax);
|
||||
__ InvokeFunction(
|
||||
edi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
|
||||
}
|
||||
|
||||
|
||||
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
|
||||
if (masm->isolate()->function_entry_hook() != NULL) {
|
||||
// It's always safe to call the entry hook stub, as the hook itself
|
||||
|
@ -781,17 +781,36 @@ bool LCodeGen::IsSmi(LConstantOperand* op) const {
|
||||
}
|
||||
|
||||
|
||||
static int ArgumentsOffsetWithoutFrame(int index) {
|
||||
ASSERT(index < 0);
|
||||
return -(index + 1) * kPointerSize + kPCOnStackSize;
|
||||
}
|
||||
|
||||
|
||||
Operand LCodeGen::ToOperand(LOperand* op) const {
|
||||
if (op->IsRegister()) return Operand(ToRegister(op));
|
||||
if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
|
||||
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
|
||||
return Operand(ebp, StackSlotOffset(op->index()));
|
||||
if (NeedsEagerFrame()) {
|
||||
return Operand(ebp, StackSlotOffset(op->index()));
|
||||
} else {
|
||||
// Retrieve parameter without eager stack-frame relative to the
|
||||
// stack-pointer.
|
||||
return Operand(esp, ArgumentsOffsetWithoutFrame(op->index()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Operand LCodeGen::HighOperand(LOperand* op) {
|
||||
ASSERT(op->IsDoubleStackSlot());
|
||||
return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize);
|
||||
if (NeedsEagerFrame()) {
|
||||
return Operand(ebp, StackSlotOffset(op->index()) + kPointerSize);
|
||||
} else {
|
||||
// Retrieve parameter without eager stack-frame relative to the
|
||||
// stack-pointer.
|
||||
return Operand(
|
||||
esp, ArgumentsOffsetWithoutFrame(op->index()) + kPointerSize);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -4376,7 +4395,12 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
if (instr->hydrogen()->IsTailCall()) {
|
||||
if (NeedsEagerFrame()) __ leave();
|
||||
__ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
} else {
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -1410,8 +1410,10 @@ LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
|
||||
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
|
||||
LOperand* context = UseFixed(instr->context(), esi);
|
||||
LOperand* function = UseFixed(instr->function(), edi);
|
||||
LCallFunction* result = new(zone()) LCallFunction(context, function);
|
||||
return MarkAsCall(DefineFixed(result, eax), instr);
|
||||
LCallFunction* call = new(zone()) LCallFunction(context, function);
|
||||
LInstruction* result = DefineFixed(call, eax);
|
||||
if (instr->IsTailCall()) return result;
|
||||
return MarkAsCall(result, instr);
|
||||
}
|
||||
|
||||
|
||||
|
56
src/ic.cc
56
src/ic.cc
@ -804,16 +804,34 @@ MaybeObject* KeyedCallIC::LoadFunction(Handle<Object> object,
|
||||
if (use_ic && state() != MEGAMORPHIC) {
|
||||
ASSERT(!object->IsJSGlobalProxy());
|
||||
int argc = target()->arguments_count();
|
||||
Handle<Code> stub = isolate()->stub_cache()->ComputeCallMegamorphic(
|
||||
argc, Code::KEYED_CALL_IC, Code::kNoExtraICState);
|
||||
if (object->IsJSObject()) {
|
||||
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
|
||||
if (receiver->elements()->map() ==
|
||||
isolate()->heap()->non_strict_arguments_elements_map()) {
|
||||
stub = isolate()->stub_cache()->ComputeCallArguments(argc);
|
||||
Handle<Code> stub;
|
||||
|
||||
// Use the KeyedArrayCallStub if the call is of the form array[smi](...),
|
||||
// where array is an instance of one of the initial array maps (without
|
||||
// extra named properties).
|
||||
// TODO(verwaest): Also support keyed calls on instances of other maps.
|
||||
if (object->IsJSArray() && key->IsSmi()) {
|
||||
Handle<JSArray> array = Handle<JSArray>::cast(object);
|
||||
ElementsKind kind = array->map()->elements_kind();
|
||||
if (IsFastObjectElementsKind(kind) &&
|
||||
array->map() == isolate()->get_initial_js_array_map(kind)) {
|
||||
KeyedArrayCallStub stub_gen(IsHoleyElementsKind(kind), argc);
|
||||
stub = stub_gen.GetCode(isolate());
|
||||
}
|
||||
}
|
||||
ASSERT(!stub.is_null());
|
||||
|
||||
if (stub.is_null()) {
|
||||
stub = isolate()->stub_cache()->ComputeCallMegamorphic(
|
||||
argc, Code::KEYED_CALL_IC, Code::kNoExtraICState);
|
||||
if (object->IsJSObject()) {
|
||||
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
|
||||
if (receiver->elements()->map() ==
|
||||
isolate()->heap()->non_strict_arguments_elements_map()) {
|
||||
stub = isolate()->stub_cache()->ComputeCallArguments(argc);
|
||||
}
|
||||
}
|
||||
ASSERT(!stub.is_null());
|
||||
}
|
||||
set_target(*stub);
|
||||
TRACE_IC("CallIC", key);
|
||||
}
|
||||
@ -2149,6 +2167,28 @@ RUNTIME_FUNCTION(MaybeObject*, StoreIC_MissFromStubFailure) {
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_MissFromStubFailure) {
|
||||
HandleScope scope(isolate);
|
||||
ASSERT(args.length() == 2);
|
||||
KeyedCallIC ic(isolate);
|
||||
Arguments* caller_args = reinterpret_cast<Arguments*>(args[0]);
|
||||
Handle<Object> key = args.at<Object>(1);
|
||||
Handle<Object> receiver((*caller_args)[0], isolate);
|
||||
|
||||
ic.UpdateState(receiver, key);
|
||||
MaybeObject* maybe_result = ic.LoadFunction(receiver, key);
|
||||
// Result could be a function or a failure.
|
||||
JSFunction* raw_function = NULL;
|
||||
if (!maybe_result->To(&raw_function)) return maybe_result;
|
||||
|
||||
if (raw_function->is_compiled()) return raw_function;
|
||||
|
||||
Handle<JSFunction> function(raw_function, isolate);
|
||||
JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
|
||||
return *function;
|
||||
}
|
||||
|
||||
|
||||
RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) {
|
||||
SealHandleScope shs(isolate);
|
||||
|
||||
|
1
src/ic.h
1
src/ic.h
@ -910,6 +910,7 @@ DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissFromStubFailure);
|
||||
DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure);
|
||||
DECLARE_RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss);
|
||||
DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreIC_MissFromStubFailure);
|
||||
DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_MissFromStubFailure);
|
||||
DECLARE_RUNTIME_FUNCTION(MaybeObject*, ElementsTransitionAndStoreIC_Miss);
|
||||
DECLARE_RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_Miss);
|
||||
DECLARE_RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss);
|
||||
|
@ -2238,6 +2238,8 @@ bool Isolate::Init(Deserializer* des) {
|
||||
bootstrapper_->Initialize(create_heap_objects);
|
||||
builtins_.SetUp(this, create_heap_objects);
|
||||
|
||||
if (create_heap_objects) heap_.CreateStubsRequiringBuiltins();
|
||||
|
||||
// Only preallocate on the first initialization.
|
||||
if (FLAG_preallocate_message_memory && preallocated_message_space_ == NULL) {
|
||||
// Start the thread which will set aside some memory.
|
||||
@ -2314,6 +2316,7 @@ bool Isolate::Init(Deserializer* des) {
|
||||
CodeStub::GenerateFPStubs(this);
|
||||
StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(this);
|
||||
StubFailureTrampolineStub::GenerateAheadOfTime(this);
|
||||
StubFailureTailCallTrampolineStub::GenerateAheadOfTime(this);
|
||||
// TODO(mstarzinger): The following is an ugly hack to make sure the
|
||||
// interface descriptor is initialized even when stubs have been
|
||||
// deserialized out of the snapshot without the graph builder.
|
||||
|
@ -372,7 +372,8 @@ int LChunk::GetParameterStackSlot(int index) const {
|
||||
// shift all parameter indexes down by the number of parameters, and
|
||||
// make sure they end up negative so they are distinguishable from
|
||||
// spill slots.
|
||||
int result = index - info()->scope()->num_parameters() - 1;
|
||||
int result = index - info()->num_parameters() - 1;
|
||||
|
||||
ASSERT(result < 0);
|
||||
return result;
|
||||
}
|
||||
|
@ -3901,6 +3901,7 @@ int Code::major_key() {
|
||||
kind() == STORE_IC ||
|
||||
kind() == LOAD_IC ||
|
||||
kind() == KEYED_LOAD_IC ||
|
||||
kind() == KEYED_CALL_IC ||
|
||||
kind() == TO_BOOLEAN_IC);
|
||||
return StubMajorKeyField::decode(
|
||||
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
|
||||
@ -3917,6 +3918,7 @@ void Code::set_major_key(int major) {
|
||||
kind() == KEYED_LOAD_IC ||
|
||||
kind() == STORE_IC ||
|
||||
kind() == KEYED_STORE_IC ||
|
||||
kind() == KEYED_CALL_IC ||
|
||||
kind() == TO_BOOLEAN_IC);
|
||||
ASSERT(0 <= major && major < 256);
|
||||
int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
|
||||
|
@ -200,7 +200,15 @@ bool TypeFeedbackOracle::StoreIsKeyedPolymorphic(TypeFeedbackId ast_id) {
|
||||
bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) {
|
||||
Handle<Object> value = GetInfo(expr->CallFeedbackId());
|
||||
return value->IsMap() || value->IsAllocationSite() || value->IsJSFunction() ||
|
||||
value->IsSmi();
|
||||
value->IsSmi() ||
|
||||
(value->IsCode() && Handle<Code>::cast(value)->ic_state() == MONOMORPHIC);
|
||||
}
|
||||
|
||||
|
||||
bool TypeFeedbackOracle::KeyedArrayCallIsHoley(Call* expr) {
|
||||
Handle<Object> value = GetInfo(expr->CallFeedbackId());
|
||||
Handle<Code> code = Handle<Code>::cast(value);
|
||||
return KeyedArrayCallStub::IsHoley(code);
|
||||
}
|
||||
|
||||
|
||||
@ -617,7 +625,6 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
|
||||
case Code::LOAD_IC:
|
||||
case Code::STORE_IC:
|
||||
case Code::CALL_IC:
|
||||
case Code::KEYED_CALL_IC:
|
||||
if (target->ic_state() == MONOMORPHIC) {
|
||||
if (target->kind() == Code::CALL_IC &&
|
||||
target->check_type() != RECEIVER_MAP_CHECK) {
|
||||
@ -637,6 +644,7 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
|
||||
}
|
||||
break;
|
||||
|
||||
case Code::KEYED_CALL_IC:
|
||||
case Code::KEYED_LOAD_IC:
|
||||
case Code::KEYED_STORE_IC:
|
||||
case Code::BINARY_OP_IC:
|
||||
|
@ -250,6 +250,7 @@ class TypeFeedbackOracle: public ZoneObject {
|
||||
bool StoreIsPreMonomorphic(TypeFeedbackId ast_id);
|
||||
bool StoreIsKeyedPolymorphic(TypeFeedbackId ast_id);
|
||||
bool CallIsMonomorphic(Call* expr);
|
||||
bool KeyedArrayCallIsHoley(Call* expr);
|
||||
bool CallNewIsMonomorphic(CallNew* expr);
|
||||
bool ObjectLiteralStoreIsMonomorphic(ObjectLiteralProperty* prop);
|
||||
|
||||
|
@ -450,8 +450,7 @@ void AstTyper::VisitCall(Call* expr) {
|
||||
Expression* callee = expr->expression();
|
||||
Property* prop = callee->AsProperty();
|
||||
if (prop != NULL) {
|
||||
if (prop->key()->IsPropertyName())
|
||||
expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD);
|
||||
expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD);
|
||||
} else {
|
||||
expr->RecordTypeFeedback(oracle(), CALL_AS_FUNCTION);
|
||||
}
|
||||
|
@ -134,6 +134,19 @@ void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
|
||||
}
|
||||
|
||||
|
||||
void KeyedArrayCallStub::InitializeInterfaceDescriptor(
|
||||
Isolate* isolate,
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { rcx };
|
||||
descriptor->register_param_count_ = 1;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
|
||||
descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
|
||||
descriptor->deoptimization_handler_ =
|
||||
FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
|
||||
}
|
||||
|
||||
|
||||
void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
|
||||
Isolate* isolate,
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
@ -5452,6 +5465,24 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
|
||||
CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
|
||||
__ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
|
||||
__ movq(rdi, rax);
|
||||
int parameter_count_offset =
|
||||
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
|
||||
__ movq(rax, MemOperand(rbp, parameter_count_offset));
|
||||
// The parameter count above includes the receiver for the arguments passed to
|
||||
// the deoptimization handler. Subtract the receiver for the parameter count
|
||||
// for the call.
|
||||
__ subl(rax, Immediate(1));
|
||||
masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
|
||||
ParameterCount argument_count(rax);
|
||||
__ InvokeFunction(
|
||||
rdi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
|
||||
}
|
||||
|
||||
|
||||
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
|
||||
if (masm->isolate()->function_entry_hook() != NULL) {
|
||||
// It's always safe to call the entry hook stub, as the hook itself
|
||||
|
@ -415,11 +415,23 @@ Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
|
||||
}
|
||||
|
||||
|
||||
static int ArgumentsOffsetWithoutFrame(int index) {
|
||||
ASSERT(index < 0);
|
||||
return -(index + 1) * kPointerSize + kPCOnStackSize;
|
||||
}
|
||||
|
||||
|
||||
Operand LCodeGen::ToOperand(LOperand* op) const {
|
||||
// Does not handle registers. In X64 assembler, plain registers are not
|
||||
// representable as an Operand.
|
||||
ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
|
||||
return Operand(rbp, StackSlotOffset(op->index()));
|
||||
if (NeedsEagerFrame()) {
|
||||
return Operand(rbp, StackSlotOffset(op->index()));
|
||||
} else {
|
||||
// Retrieve parameter without eager stack-frame relative to the
|
||||
// stack-pointer.
|
||||
return Operand(rsp, ArgumentsOffsetWithoutFrame(op->index()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -3910,7 +3922,12 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
if (instr->hydrogen()->IsTailCall()) {
|
||||
if (NeedsEagerFrame()) __ leave();
|
||||
__ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
|
||||
} else {
|
||||
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -1323,8 +1323,10 @@ LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
|
||||
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
|
||||
LOperand* context = UseFixed(instr->context(), rsi);
|
||||
LOperand* function = UseFixed(instr->function(), rdi);
|
||||
LCallFunction* result = new(zone()) LCallFunction(context, function);
|
||||
return MarkAsCall(DefineFixed(result, rax), instr);
|
||||
LCallFunction* call = new(zone()) LCallFunction(context, function);
|
||||
LInstruction* result = DefineFixed(call, rax);
|
||||
if (instr->IsTailCall()) return result;
|
||||
return MarkAsCall(result, instr);
|
||||
}
|
||||
|
||||
|
||||
|
56
test/mjsunit/keyed-array-call.js
Normal file
56
test/mjsunit/keyed-array-call.js
Normal file
@ -0,0 +1,56 @@
|
||||
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
var a = [function(a) { return a+10; },
|
||||
function(a) { return a+20; }];
|
||||
a.__proto__.test = function(a) { return a+30; }
|
||||
function f(i) {
|
||||
return "r" + (1, a[i](i+1), a[i](i+2));
|
||||
}
|
||||
|
||||
assertEquals("r12", f(0));
|
||||
assertEquals("r12", f(0));
|
||||
assertEquals("r23", f(1));
|
||||
assertEquals("r23", f(1));
|
||||
|
||||
// Deopt the stub.
|
||||
assertEquals("rtest230", f("test"));
|
||||
|
||||
var a2 = [function(a) { return a+10; },,
|
||||
function(a) { return a+20; }];
|
||||
a2.__proto__.test = function(a) { return a+30; }
|
||||
function f2(i) {
|
||||
return "r" + (1, a2[i](i+1), a2[i](i+2));
|
||||
}
|
||||
|
||||
assertEquals("r12", f2(0));
|
||||
assertEquals("r12", f2(0));
|
||||
assertEquals("r24", f2(2));
|
||||
assertEquals("r24", f2(2));
|
||||
|
||||
// Deopt the stub. This will throw given that undefined is not a function.
|
||||
assertThrows(function() { f2(1) });
|
40
test/mjsunit/regress/clear-keyed-call.js
Normal file
40
test/mjsunit/regress/clear-keyed-call.js
Normal file
@ -0,0 +1,40 @@
|
||||
// Copyright 2013 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Flags: --expose-gc --allow-natives-syntax
|
||||
|
||||
function f(a) {
|
||||
a[0](1);
|
||||
}
|
||||
|
||||
f([function(a) { return a; }]);
|
||||
f([function(a) { return a; }]);
|
||||
f([function(a) { return a; }]);
|
||||
%NotifyContextDisposed();
|
||||
gc();
|
||||
gc();
|
||||
gc();
|
Loading…
Reference in New Issue
Block a user