Correctify instanceof and make it optimizable.

The previous hack with HInstanceOfKnownGlobal was not only slower,
but also very brittle and required a lot of weird hacks to support it. And
what's even more important it wasn't even correct (because a map check
on the lhs is never enough for instanceof).

The new implementation provides a sane runtime implementation
for InstanceOf plus a fast case in the InstanceOfStub, combined with
a proper specialization in the case of a known global in CrankShaft,
which does only the prototype chain walk (coupled with a code
dependency on the known global).

As a drive-by-fix: Also fix the incorrect Object.prototype.isPrototypeOf
implementation.

BUG=v8:4376
LOG=y

Review URL: https://codereview.chromium.org/1304633002

Cr-Commit-Position: refs/heads/master@{#30342}
This commit is contained in:
bmeurer 2015-08-24 21:48:36 -07:00 committed by Commit bot
parent 2090c08d3e
commit 5d875a57fa
76 changed files with 1142 additions and 2932 deletions

View File

@ -1287,209 +1287,108 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
}
// Uses registers r0 to r4.
// Expected input (depending on whether args are in registers or on the stack):
// * object: r0 or at sp + 1 * kPointerSize.
// * function: r1 or at sp.
//
// An inlined call site may have been generated before calling this stub.
// In this case the offset to the inline sites to patch are passed in r5 and r6.
// (See LCodeGen::DoInstanceOfKnownGlobal)
void InstanceofStub::Generate(MacroAssembler* masm) {
// Call site inlining and patching implies arguments in registers.
DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
void InstanceOfStub::Generate(MacroAssembler* masm) {
Register const object = r1; // Object (lhs).
Register const function = r0; // Function (rhs).
Register const object_map = r2; // Map of {object}.
Register const function_map = r3; // Map of {function}.
Register const function_prototype = r4; // Prototype of {function}.
Register const scratch = r5;
// Fixed register usage throughout the stub:
const Register object = r0; // Object (lhs).
Register map = r3; // Map of the object.
const Register function = r1; // Function (rhs).
const Register prototype = r4; // Prototype of the function.
const Register scratch = r2;
DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
Label slow, loop, is_instance, is_not_instance, not_js_object;
// Check if {object} is a smi.
Label object_is_smi;
__ JumpIfSmi(object, &object_is_smi);
if (!HasArgsInRegisters()) {
__ ldr(object, MemOperand(sp, 1 * kPointerSize));
__ ldr(function, MemOperand(sp, 0));
}
// Lookup the {function} and the {object} map in the global instanceof cache.
// Note: This is safe because we clear the global instanceof cache whenever
// we change the prototype of any object.
Label fast_case, slow_case;
__ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
__ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ b(ne, &fast_case);
__ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
__ b(ne, &fast_case);
__ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret();
// Check that the left hand is a JS object and load map.
__ JumpIfSmi(object, &not_js_object);
__ IsObjectJSObjectType(object, map, scratch, &not_js_object);
// If {object} is a smi we can safely return false if {function} is a JS
// function, otherwise we have to miss to the runtime and throw an exception.
__ bind(&object_is_smi);
__ JumpIfSmi(function, &slow_case);
__ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
__ b(ne, &slow_case);
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
__ Ret();
// If there is a call site cache don't look in the global cache, but do the
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
Label miss;
__ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ b(ne, &miss);
__ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
__ b(ne, &miss);
__ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret(HasArgsInRegisters() ? 0 : 2);
// Fast-case: The {function} must be a valid JSFunction.
__ bind(&fast_case);
__ JumpIfSmi(function, &slow_case);
__ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
__ b(ne, &slow_case);
__ bind(&miss);
}
// Ensure that {function} has an instance prototype.
__ ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
__ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
__ b(ne, &slow_case);
// Get the prototype of the function.
__ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
// Ensure that {function} is not bound.
Register const shared_info = scratch;
__ ldr(shared_info,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ ldr(scratch, FieldMemOperand(shared_info,
SharedFunctionInfo::kCompilerHintsOffset));
__ tst(scratch,
Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
__ b(ne, &slow_case);
// Check that the function prototype is a JS object.
__ JumpIfSmi(prototype, &slow);
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
// Get the "prototype" (or initial map) of the {function}.
__ ldr(function_prototype,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
__ AssertNotSmi(function_prototype);
// Update the global instanceof or call site inlined cache with the current
// map and function. The cached answer will be set when it is known below.
if (!HasCallSiteInlineCheck()) {
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
} else {
DCHECK(HasArgsInRegisters());
// Patch the (relocated) inlined map check.
// Resolve the prototype if the {function} has an initial map. Afterwards the
// {function_prototype} will be either the JSReceiver prototype object or the
// hole value, which means that no instances of the {function} were created so
// far and hence we should return false.
Label function_prototype_valid;
__ CompareObjectType(function_prototype, scratch, scratch, MAP_TYPE);
__ b(ne, &function_prototype_valid);
__ ldr(function_prototype,
FieldMemOperand(function_prototype, Map::kPrototypeOffset));
__ bind(&function_prototype_valid);
__ AssertNotSmi(function_prototype);
// The map_load_offset was stored in r5
// (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
const Register map_load_offset = r5;
__ sub(r9, lr, map_load_offset);
// Get the map location in r5 and patch it.
__ GetRelocatedValueLocation(r9, map_load_offset, scratch);
__ ldr(map_load_offset, MemOperand(map_load_offset));
__ str(map, FieldMemOperand(map_load_offset, Cell::kValueOffset));
// Update the global instanceof cache with the current {object} map and
// {function}. The cached answer will be set when it is known below.
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
__ mov(scratch, map);
// |map_load_offset| points at the beginning of the cell. Calculate the
// field containing the map.
__ add(function, map_load_offset, Operand(Cell::kValueOffset - 1));
__ RecordWriteField(map_load_offset, Cell::kValueOffset, scratch, function,
kLRHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
}
// Register mapping: r3 is object map and r4 is function prototype.
// Get prototype of object into r2.
__ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
// We don't need map any more. Use it as a scratch register.
Register scratch2 = map;
map = no_reg;
// Loop through the prototype chain looking for the function prototype.
__ LoadRoot(scratch2, Heap::kNullValueRootIndex);
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
Register const object_prototype = object_map;
Register const null = scratch;
Label done, loop;
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
__ LoadRoot(null, Heap::kNullValueRootIndex);
__ bind(&loop);
__ cmp(scratch, Operand(prototype));
__ b(eq, &is_instance);
__ cmp(scratch, scratch2);
__ b(eq, &is_not_instance);
__ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
__ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
__ jmp(&loop);
Factory* factory = isolate()->factory();
__ ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
__ cmp(object_prototype, function_prototype);
__ b(eq, &done);
__ cmp(object_prototype, null);
__ ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
__ b(ne, &loop);
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
__ bind(&done);
__ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret();
__ bind(&is_instance);
if (!HasCallSiteInlineCheck()) {
__ mov(r0, Operand(Smi::FromInt(0)));
__ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ Move(r0, factory->true_value());
}
} else {
// Patch the call site to return true.
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
// The bool_load_offset was stored in r6
// (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
const Register bool_load_offset = r6;
__ sub(r9, lr, bool_load_offset);
// Get the boolean result location in scratch and patch it.
__ GetRelocatedValueLocation(r9, scratch, scratch2);
__ str(r0, MemOperand(scratch));
if (!ReturnTrueFalseObject()) {
__ mov(r0, Operand(Smi::FromInt(0)));
}
}
__ Ret(HasArgsInRegisters() ? 0 : 2);
__ bind(&is_not_instance);
if (!HasCallSiteInlineCheck()) {
__ mov(r0, Operand(Smi::FromInt(1)));
__ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ Move(r0, factory->false_value());
}
} else {
// Patch the call site to return false.
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
// The bool_load_offset was stored in r6
// (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
const Register bool_load_offset = r6;
__ sub(r9, lr, bool_load_offset);
;
// Get the boolean result location in scratch and patch it.
__ GetRelocatedValueLocation(r9, scratch, scratch2);
__ str(r0, MemOperand(scratch));
if (!ReturnTrueFalseObject()) {
__ mov(r0, Operand(Smi::FromInt(1)));
}
}
__ Ret(HasArgsInRegisters() ? 0 : 2);
Label object_not_null, object_not_null_or_smi;
__ bind(&not_js_object);
// Before null, smi and string value checks, check that the rhs is a function
// as for a non-function rhs an exception needs to be thrown.
__ JumpIfSmi(function, &slow);
__ CompareObjectType(function, scratch2, scratch, JS_FUNCTION_TYPE);
__ b(ne, &slow);
// Null is not instance of anything.
__ cmp(object, Operand(isolate()->factory()->null_value()));
__ b(ne, &object_not_null);
if (ReturnTrueFalseObject()) {
__ Move(r0, factory->false_value());
} else {
__ mov(r0, Operand(Smi::FromInt(1)));
}
__ Ret(HasArgsInRegisters() ? 0 : 2);
__ bind(&object_not_null);
// Smi values are not instances of anything.
__ JumpIfNotSmi(object, &object_not_null_or_smi);
if (ReturnTrueFalseObject()) {
__ Move(r0, factory->false_value());
} else {
__ mov(r0, Operand(Smi::FromInt(1)));
}
__ Ret(HasArgsInRegisters() ? 0 : 2);
__ bind(&object_not_null_or_smi);
// String values are not instances of anything.
__ IsObjectJSStringType(object, scratch, &slow);
if (ReturnTrueFalseObject()) {
__ Move(r0, factory->false_value());
} else {
__ mov(r0, Operand(Smi::FromInt(1)));
}
__ Ret(HasArgsInRegisters() ? 0 : 2);
// Slow-case. Tail call builtin.
__ bind(&slow);
if (!ReturnTrueFalseObject()) {
if (HasArgsInRegisters()) {
__ Push(r0, r1);
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r0, r1);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
}
__ cmp(r0, Operand::Zero());
__ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
__ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
__ Ret(HasArgsInRegisters() ? 0 : 2);
}
// Slow-case: Call the runtime function.
__ bind(&slow_case);
__ Push(object, function);
__ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
}

View File

@ -46,8 +46,8 @@ const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r2; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r0; }
const Register InstanceofDescriptor::left() { return r0; }
const Register InstanceofDescriptor::right() { return r1; }
const Register InstanceOfDescriptor::LeftRegister() { return r1; }
const Register InstanceOfDescriptor::RightRegister() { return r0; }
const Register ArgumentsAccessReadDescriptor::index() { return r1; }

View File

@ -923,22 +923,14 @@ void LChunkBuilder::AddInstruction(LInstruction* instr,
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
LInstruction* instruction_needing_environment = NULL;
if (hydrogen_val->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(hydrogen_val->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
@ -994,22 +986,21 @@ LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LOperand* left =
UseFixed(instr->left(), InstanceOfDescriptor::LeftRegister());
LOperand* right =
UseFixed(instr->right(), InstanceOfDescriptor::RightRegister());
LOperand* context = UseFixed(instr->context(), cp);
LInstanceOf* result =
new(zone()) LInstanceOf(context, UseFixed(instr->left(), r0),
UseFixed(instr->right(), r1));
LInstanceOf* result = new (zone()) LInstanceOf(context, left, right);
return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
new(zone()) LInstanceOfKnownGlobal(
UseFixed(instr->context(), cp),
UseFixed(instr->left(), r0),
FixedTemp(r4));
return MarkAsCall(DefineFixed(result, r0), instr);
LInstruction* LChunkBuilder::DoHasInPrototypeChainAndBranch(
HHasInPrototypeChainAndBranch* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* prototype = UseRegister(instr->prototype());
return new (zone()) LHasInPrototypeChainAndBranch(object, prototype);
}

View File

@ -83,10 +83,10 @@ class LCodeGen;
V(GetCachedArrayIndex) \
V(Goto) \
V(HasCachedArrayIndexAndBranch) \
V(HasInPrototypeChainAndBranch) \
V(HasInstanceTypeAndBranch) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(InvokeFunction) \
@ -235,8 +235,6 @@ class LInstruction : public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
void MarkAsCall() { bit_field_ = IsCallBits::update(bit_field_, true); }
bool IsCall() const { return IsCallBits::decode(bit_field_); }
@ -1192,41 +1190,27 @@ class LInstanceOf final : public LTemplateInstruction<1, 3, 0> {
inputs_[2] = right;
}
LOperand* context() { return inputs_[0]; }
LOperand* left() { return inputs_[1]; }
LOperand* right() { return inputs_[2]; }
LOperand* context() const { return inputs_[0]; }
LOperand* left() const { return inputs_[1]; }
LOperand* right() const { return inputs_[2]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
class LInstanceOfKnownGlobal final : public LTemplateInstruction<1, 2, 1> {
class LHasInPrototypeChainAndBranch final : public LControlInstruction<2, 0> {
public:
LInstanceOfKnownGlobal(LOperand* context, LOperand* value, LOperand* temp) {
inputs_[0] = context;
inputs_[1] = value;
temps_[0] = temp;
LHasInPrototypeChainAndBranch(LOperand* object, LOperand* prototype) {
inputs_[0] = object;
inputs_[1] = prototype;
}
LOperand* context() { return inputs_[0]; }
LOperand* value() { return inputs_[1]; }
LOperand* temp() { return temps_[0]; }
LOperand* object() const { return inputs_[0]; }
LOperand* prototype() const { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
return lazy_deopt_env_;
}
virtual void SetDeferredLazyDeoptimizationEnvironment(
LEnvironment* env) override {
lazy_deopt_env_ = env;
}
private:
LEnvironment* lazy_deopt_env_;
DECLARE_CONCRETE_INSTRUCTION(HasInPrototypeChainAndBranch,
"has-in-prototype-chain-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInPrototypeChainAndBranch)
};

View File

@ -2150,7 +2150,14 @@ void LCodeGen::EmitBranch(InstrType instr, Condition condition) {
}
template<class InstrType>
template <class InstrType>
void LCodeGen::EmitTrueBranch(InstrType instr, Condition condition) {
int true_block = instr->TrueDestination(chunk_);
__ b(condition, chunk_->GetAssemblyLabel(true_block));
}
template <class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr, Condition condition) {
int false_block = instr->FalseDestination(chunk_);
__ b(condition, chunk_->GetAssemblyLabel(false_block));
@ -2725,160 +2732,40 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
DCHECK(ToRegister(instr->left()).is(r0)); // Object is in r0.
DCHECK(ToRegister(instr->right()).is(r1)); // Function is in r1.
InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
DCHECK(ToRegister(instr->result()).is(r0));
InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ cmp(r0, Operand::Zero());
__ mov(r0, Operand(factory()->false_value()), LeaveCC, ne);
__ mov(r0, Operand(factory()->true_value()), LeaveCC, eq);
}
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
public:
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
void Generate() override {
codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_,
&load_bool_);
}
LInstruction* instr() override { return instr_; }
Label* map_check() { return &map_check_; }
Label* load_bool() { return &load_bool_; }
void LCodeGen::DoHasInPrototypeChainAndBranch(
LHasInPrototypeChainAndBranch* instr) {
Register const object = ToRegister(instr->object());
Register const object_map = scratch0();
Register const object_prototype = object_map;
Register const prototype = ToRegister(instr->prototype());
private:
LInstanceOfKnownGlobal* instr_;
Label map_check_;
Label load_bool_;
};
DeferredInstanceOfKnownGlobal* deferred;
deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->value());
Register temp = ToRegister(instr->temp());
Register result = ToRegister(instr->result());
// A Smi is not instance of anything.
__ JumpIfSmi(object, &false_result);
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
Label cache_miss;
Register map = temp;
__ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
{
// Block constant pool emission to ensure the positions of instructions are
// as expected by the patcher. See InstanceofStub::Generate().
Assembler::BlockConstPoolScope block_const_pool(masm());
__ bind(deferred->map_check()); // Label for calculating code patching.
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch with
// the cached map.
Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
__ mov(ip, Operand(cell));
__ ldr(ip, FieldMemOperand(ip, Cell::kValueOffset));
__ cmp(map, Operand(ip));
__ b(ne, &cache_miss);
__ bind(deferred->load_bool()); // Label for calculating code patching.
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch
// with true or false.
__ mov(result, Operand(factory()->the_hole_value()));
// The {object} must be a spec object. It's sufficient to know that {object}
// is not a smi, since all other non-spec objects have {null} prototypes and
// will be ruled out below.
if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
__ SmiTst(object);
EmitFalseBranch(instr, eq);
}
__ b(&done);
// The inlined call site cache did not match. Check null and string before
// calling the deferred code.
__ bind(&cache_miss);
// Null is not instance of anything.
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(object, Operand(ip));
__ b(eq, &false_result);
// String values is not instance of anything.
Condition is_string = masm_->IsObjectStringType(object, temp);
__ b(is_string, &false_result);
// Go to the deferred code.
__ b(deferred->entry());
__ bind(&false_result);
__ LoadRoot(result, Heap::kFalseValueRootIndex);
// Here result has either true or false. Deferred code also produces true or
// false object.
__ bind(deferred->exit());
__ bind(&done);
}
void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check,
Label* bool_load) {
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kArgsInRegisters);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
InstanceofStub stub(isolate(), flags);
PushSafepointRegistersScope scope(this);
LoadContextFromDeferred(instr->context());
__ Move(InstanceofStub::right(), instr->function());
int call_size = CallCodeSize(stub.GetCode(), RelocInfo::CODE_TARGET);
int additional_delta = (call_size / Assembler::kInstrSize) + 4;
{
// Make sure that code size is predicable, since we use specific constants
// offsets in the code to find embedded values..
PredictableCodeSizeScope predictable(
masm_, additional_delta * Assembler::kInstrSize);
// The labels must be already bound since the code has predictabel size up
// to the call instruction.
DCHECK(map_check->is_bound());
DCHECK(bool_load->is_bound());
// Make sure we don't emit any additional entries in the constant pool
// before the call to ensure that the CallCodeSize() calculated the
// correct number of instructions for the constant pool load.
{
ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
int map_check_delta =
masm_->InstructionsGeneratedSince(map_check) + additional_delta;
int bool_load_delta =
masm_->InstructionsGeneratedSince(bool_load) + additional_delta;
Label before_push_delta;
__ bind(&before_push_delta);
__ BlockConstPoolFor(additional_delta);
// r5 is used to communicate the offset to the location of the map check.
__ mov(r5, Operand(map_check_delta * kPointerSize));
// r6 is used to communicate the offset to the location of the bool load.
__ mov(r6, Operand(bool_load_delta * kPointerSize));
// The mov above can generate one or two instructions. The delta was
// computed for two instructions, so we need to pad here in case of one
// instruction.
while (masm_->InstructionsGeneratedSince(&before_push_delta) != 4) {
__ nop();
}
}
CallCodeGeneric(stub.GetCode(), RelocInfo::CODE_TARGET, instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
}
LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value (r0) into the result register slot and
// restore all registers.
__ StoreToSafepointRegisterSlot(r0, ToRegister(instr->result()));
// Loop through the {object}s prototype chain looking for the {prototype}.
__ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
Label loop;
__ bind(&loop);
__ ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
__ cmp(object_prototype, prototype);
EmitTrueBranch(instr, eq);
__ CompareRoot(object_prototype, Heap::kNullValueRootIndex);
EmitFalseBranch(instr, eq);
__ ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
__ b(&loop);
}

View File

@ -115,8 +115,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check, Label* bool_load);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,
@ -272,7 +270,9 @@ class LCodeGen: public LCodeGenBase {
// EmitBranch expects to be the last instruction of a block.
template<class InstrType>
void EmitBranch(InstrType instr, Condition condition);
template<class InstrType>
template <class InstrType>
void EmitTrueBranch(InstrType instr, Condition condition);
template <class InstrType>
void EmitFalseBranch(InstrType instr, Condition condition);
void EmitNumberUntagD(LNumberUntagD* instr, Register input,
DwVfpRegister result, NumberUntagDMode mode);

View File

@ -2216,34 +2216,8 @@ void MacroAssembler::GetMapConstructor(Register result, Register map,
}
void MacroAssembler::TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
bool miss_on_bound_function) {
Label non_instance;
if (miss_on_bound_function) {
// Check that the receiver isn't a smi.
JumpIfSmi(function, miss);
// Check that the function really is a function. Load map into result reg.
CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
b(ne, miss);
ldr(scratch,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
ldr(scratch,
FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
tst(scratch,
Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
b(ne, miss);
// Make sure that the function has an instance prototype.
ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
b(ne, &non_instance);
}
void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss) {
// Get the prototype or initial map from the function.
ldr(result,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@ -2263,15 +2237,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// Get the prototype from the initial map.
ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
if (miss_on_bound_function) {
jmp(&done);
// Non-instance prototype: Fetch prototype from constructor field
// in initial map.
bind(&non_instance);
GetMapConstructor(result, result, scratch, ip);
}
// All done.
bind(&done);
}
@ -3386,75 +3351,6 @@ void MacroAssembler::CallCFunctionHelper(Register function,
}
void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
Register result,
Register scratch) {
Label small_constant_pool_load, load_result;
ldr(result, MemOperand(ldr_location));
if (FLAG_enable_embedded_constant_pool) {
// Check if this is an extended constant pool load.
and_(scratch, result, Operand(GetConsantPoolLoadMask()));
teq(scratch, Operand(GetConsantPoolLoadPattern()));
b(eq, &small_constant_pool_load);
if (emit_debug_code()) {
// Check that the instruction sequence is:
// movw reg, #offset_low
// movt reg, #offset_high
// ldr reg, [pp, reg]
Instr patterns[] = {GetMovWPattern(), GetMovTPattern(),
GetLdrPpRegOffsetPattern()};
for (int i = 0; i < 3; i++) {
ldr(result, MemOperand(ldr_location, i * kInstrSize));
and_(result, result, Operand(patterns[i]));
cmp(result, Operand(patterns[i]));
Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
}
// Result was clobbered. Restore it.
ldr(result, MemOperand(ldr_location));
}
// Get the offset into the constant pool. First extract movw immediate into
// result.
and_(scratch, result, Operand(0xfff));
mov(ip, Operand(result, LSR, 4));
and_(ip, ip, Operand(0xf000));
orr(result, scratch, Operand(ip));
// Then extract movt immediate and or into result.
ldr(scratch, MemOperand(ldr_location, kInstrSize));
and_(ip, scratch, Operand(0xf0000));
orr(result, result, Operand(ip, LSL, 12));
and_(scratch, scratch, Operand(0xfff));
orr(result, result, Operand(scratch, LSL, 16));
b(&load_result);
}
bind(&small_constant_pool_load);
if (emit_debug_code()) {
// Check that the instruction is a ldr reg, [<pc or pp> + offset] .
and_(result, result, Operand(GetConsantPoolLoadPattern()));
cmp(result, Operand(GetConsantPoolLoadPattern()));
Check(eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
// Result was clobbered. Restore it.
ldr(result, MemOperand(ldr_location));
}
// Get the offset into the constant pool.
const uint32_t kLdrOffsetMask = (1 << 12) - 1;
and_(result, result, Operand(kLdrOffsetMask));
bind(&load_result);
// Get the address of the constant.
if (FLAG_enable_embedded_constant_pool) {
add(result, pp, Operand(result));
} else {
add(result, ldr_location, Operand(result));
add(result, result, Operand(Instruction::kPCReadOffset));
}
}
void MacroAssembler::CheckPageFlag(
Register object,
Register scratch,

View File

@ -851,11 +851,8 @@ class MacroAssembler: public Assembler {
// function and jumps to the miss label if the fast checks fail. The
// function register will be untouched; the other registers may be
// clobbered.
void TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
bool miss_on_bound_function = false);
void TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss);
// Compare object type for heap object. heap_object contains a non-Smi
// whose object type should be compared with the given type. This both
@ -1375,14 +1372,6 @@ class MacroAssembler: public Assembler {
Register value,
uint32_t encoding_mask);
// ---------------------------------------------------------------------------
// Patching helpers.
// Get the location of a relocated constant (its address in the constant pool)
// from its load site.
void GetRelocatedValueLocation(Register ldr_location, Register result,
Register scratch);
void ClampUint8(Register output_reg, Register input_reg);

View File

@ -1505,191 +1505,107 @@ void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
}
void InstanceofStub::Generate(MacroAssembler* masm) {
// Stack on entry:
// jssp[0]: function.
// jssp[8]: object.
//
// Returns result in x0. Zero indicates instanceof, smi 1 indicates not
// instanceof.
void InstanceOfStub::Generate(MacroAssembler* masm) {
Register const object = x1; // Object (lhs).
Register const function = x0; // Function (rhs).
Register const object_map = x2; // Map of {object}.
Register const function_map = x3; // Map of {function}.
Register const function_prototype = x4; // Prototype of {function}.
Register const scratch = x5;
Register result = x0;
Register function = right();
Register object = left();
Register scratch1 = x6;
Register scratch2 = x7;
Register res_true = x8;
Register res_false = x9;
// Only used if there was an inline map check site. (See
// LCodeGen::DoInstanceOfKnownGlobal().)
Register map_check_site = x4;
// Delta for the instructions generated between the inline map check and the
// instruction setting the result.
const int32_t kDeltaToLoadBoolResult = 4 * kInstructionSize;
DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
Label not_js_object, slow;
// Check if {object} is a smi.
Label object_is_smi;
__ JumpIfSmi(object, &object_is_smi);
if (!HasArgsInRegisters()) {
__ Pop(function, object);
}
if (ReturnTrueFalseObject()) {
__ LoadTrueFalseRoots(res_true, res_false);
} else {
// This is counter-intuitive, but correct.
__ Mov(res_true, Smi::FromInt(0));
__ Mov(res_false, Smi::FromInt(1));
}
// Check that the left hand side is a JS object and load its map as a side
// effect.
Register map = x12;
__ JumpIfSmi(object, &not_js_object);
__ IsObjectJSObjectType(object, map, scratch2, &not_js_object);
// If there is a call site cache, don't look in the global cache, but do the
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
Label miss;
__ JumpIfNotRoot(function, Heap::kInstanceofCacheFunctionRootIndex, &miss);
__ JumpIfNotRoot(map, Heap::kInstanceofCacheMapRootIndex, &miss);
__ LoadRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret();
__ Bind(&miss);
}
// Get the prototype of the function.
Register prototype = x13;
__ TryGetFunctionPrototype(function, prototype, scratch2, &slow,
MacroAssembler::kMissOnBoundFunction);
// Check that the function prototype is a JS object.
__ JumpIfSmi(prototype, &slow);
__ IsObjectJSObjectType(prototype, scratch1, scratch2, &slow);
// Update the global instanceof or call site inlined cache with the current
// map and function. The cached answer will be set when it is known below.
if (HasCallSiteInlineCheck()) {
// Patch the (relocated) inlined map check.
__ GetRelocatedValueLocation(map_check_site, scratch1);
// We have a cell, so need another level of dereferencing.
__ Ldr(scratch1, MemOperand(scratch1));
__ Str(map, FieldMemOperand(scratch1, Cell::kValueOffset));
__ Mov(x14, map);
// |scratch1| points at the beginning of the cell. Calculate the
// field containing the map.
__ Add(function, scratch1, Operand(Cell::kValueOffset - 1));
__ RecordWriteField(scratch1, Cell::kValueOffset, x14, function,
kLRHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
} else {
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
}
Label return_true, return_result;
Register smi_value = scratch1;
{
// Loop through the prototype chain looking for the function prototype.
Register chain_map = x1;
Register chain_prototype = x14;
Register null_value = x15;
Label loop;
__ Ldr(chain_prototype, FieldMemOperand(map, Map::kPrototypeOffset));
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
// Speculatively set a result.
__ Mov(result, res_false);
if (!HasCallSiteInlineCheck() && ReturnTrueFalseObject()) {
// Value to store in the cache cannot be an object.
__ Mov(smi_value, Smi::FromInt(1));
}
__ Bind(&loop);
// If the chain prototype is the object prototype, return true.
__ Cmp(chain_prototype, prototype);
__ B(eq, &return_true);
// If the chain prototype is null, we've reached the end of the chain, so
// return false.
__ Cmp(chain_prototype, null_value);
__ B(eq, &return_result);
// Otherwise, load the next prototype in the chain, and loop.
__ Ldr(chain_map, FieldMemOperand(chain_prototype, HeapObject::kMapOffset));
__ Ldr(chain_prototype, FieldMemOperand(chain_map, Map::kPrototypeOffset));
__ B(&loop);
}
// Return sequence when no arguments are on the stack.
// We cannot fall through to here.
__ Bind(&return_true);
__ Mov(result, res_true);
if (!HasCallSiteInlineCheck() && ReturnTrueFalseObject()) {
// Value to store in the cache cannot be an object.
__ Mov(smi_value, Smi::FromInt(0));
}
__ Bind(&return_result);
if (HasCallSiteInlineCheck()) {
DCHECK(ReturnTrueFalseObject());
__ Add(map_check_site, map_check_site, kDeltaToLoadBoolResult);
__ GetRelocatedValueLocation(map_check_site, scratch2);
__ Str(result, MemOperand(scratch2));
} else {
Register cached_value = ReturnTrueFalseObject() ? smi_value : result;
__ StoreRoot(cached_value, Heap::kInstanceofCacheAnswerRootIndex);
}
// Lookup the {function} and the {object} map in the global instanceof cache.
// Note: This is safe because we clear the global instanceof cache whenever
// we change the prototype of any object.
Label fast_case, slow_case;
__ Ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
__ JumpIfNotRoot(function, Heap::kInstanceofCacheFunctionRootIndex,
&fast_case);
__ JumpIfNotRoot(object_map, Heap::kInstanceofCacheMapRootIndex, &fast_case);
__ LoadRoot(x0, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret();
Label object_not_null, object_not_null_or_smi;
__ Bind(&not_js_object);
Register object_type = x14;
// x0 result result return register (uninit)
// x10 function pointer to function
// x11 object pointer to object
// x14 object_type type of object (uninit)
// Before null, smi and string checks, check that the rhs is a function.
// For a non-function rhs, an exception must be thrown.
__ JumpIfSmi(function, &slow);
__ JumpIfNotObjectType(
function, scratch1, object_type, JS_FUNCTION_TYPE, &slow);
__ Mov(result, res_false);
// Null is not instance of anything.
__ Cmp(object, Operand(isolate()->factory()->null_value()));
__ B(ne, &object_not_null);
// If {object} is a smi we can safely return false if {function} is a JS
// function, otherwise we have to miss to the runtime and throw an exception.
__ Bind(&object_is_smi);
__ JumpIfSmi(function, &slow_case);
__ JumpIfNotObjectType(function, function_map, scratch, JS_FUNCTION_TYPE,
&slow_case);
__ LoadRoot(x0, Heap::kFalseValueRootIndex);
__ Ret();
__ Bind(&object_not_null);
// Smi values are not instances of anything.
__ JumpIfNotSmi(object, &object_not_null_or_smi);
// Fast-case: The {function} must be a valid JSFunction.
__ Bind(&fast_case);
__ JumpIfSmi(function, &slow_case);
__ JumpIfNotObjectType(function, function_map, scratch, JS_FUNCTION_TYPE,
&slow_case);
// Ensure that {function} has an instance prototype.
__ Ldrb(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
__ Tbnz(scratch, Map::kHasNonInstancePrototype, &slow_case);
// Ensure that {function} is not bound.
Register const shared_info = scratch;
Register const scratch_w = scratch.W();
__ Ldr(shared_info,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
// On 64-bit platforms, compiler hints field is not a smi. See definition of
// kCompilerHintsOffset in src/objects.h.
__ Ldr(scratch_w, FieldMemOperand(shared_info,
SharedFunctionInfo::kCompilerHintsOffset));
__ Tbnz(scratch_w, SharedFunctionInfo::kBoundFunction, &slow_case);
// Get the "prototype" (or initial map) of the {function}.
__ Ldr(function_prototype,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
__ AssertNotSmi(function_prototype);
// Resolve the prototype if the {function} has an initial map. Afterwards the
// {function_prototype} will be either the JSReceiver prototype object or the
// hole value, which means that no instances of the {function} were created so
// far and hence we should return false.
Label function_prototype_valid;
__ JumpIfNotObjectType(function_prototype, scratch, scratch, MAP_TYPE,
&function_prototype_valid);
__ Ldr(function_prototype,
FieldMemOperand(function_prototype, Map::kPrototypeOffset));
__ Bind(&function_prototype_valid);
__ AssertNotSmi(function_prototype);
// Update the global instanceof cache with the current {object} map and
// {function}. The cached answer will be set when it is known below.
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
Register const object_prototype = object_map;
Register const null = scratch;
Label done, loop;
__ LoadRoot(x0, Heap::kTrueValueRootIndex);
__ LoadRoot(null, Heap::kNullValueRootIndex);
__ Bind(&loop);
__ Ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
__ Cmp(object_prototype, function_prototype);
__ B(eq, &done);
__ Cmp(object_prototype, null);
__ Ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
__ B(ne, &loop);
__ LoadRoot(x0, Heap::kFalseValueRootIndex);
__ Bind(&done);
__ StoreRoot(x0, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret();
__ Bind(&object_not_null_or_smi);
// String values are not instances of anything.
__ IsObjectJSStringType(object, scratch2, &slow);
__ Ret();
// Slow-case. Tail call builtin.
__ Bind(&slow);
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Arguments have either been passed into registers or have been previously
// popped. We need to push them before calling builtin.
__ Push(object, function);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
}
if (ReturnTrueFalseObject()) {
// Reload true/false because they were clobbered in the builtin call.
__ LoadTrueFalseRoots(res_true, res_false);
__ Cmp(result, 0);
__ Csel(result, res_true, res_false, eq);
}
__ Ret();
// Slow-case: Call the runtime function.
__ bind(&slow_case);
__ Push(object, function);
__ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
}

View File

@ -46,16 +46,8 @@ const Register StoreGlobalViaContextDescriptor::SlotRegister() { return x2; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return x0; }
const Register InstanceofDescriptor::left() {
// Object to check (instanceof lhs).
return x11;
}
const Register InstanceofDescriptor::right() {
// Constructor function (instanceof rhs).
return x10;
}
const Register InstanceOfDescriptor::LeftRegister() { return x1; }
const Register InstanceOfDescriptor::RightRegister() { return x0; }
const Register ArgumentsAccessReadDescriptor::index() { return x1; }

View File

@ -765,22 +765,14 @@ void LChunkBuilder::AddInstruction(LInstruction* instr,
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
LInstruction* instruction_needing_environment = NULL;
if (hydrogen_val->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(hydrogen_val->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
@ -1586,21 +1578,22 @@ LInstruction* LChunkBuilder::DoInnerAllocatedObject(
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LOperand* left =
UseFixed(instr->left(), InstanceOfDescriptor::LeftRegister());
LOperand* right =
UseFixed(instr->right(), InstanceOfDescriptor::RightRegister());
LOperand* context = UseFixed(instr->context(), cp);
LInstanceOf* result = new(zone()) LInstanceOf(
context,
UseFixed(instr->left(), InstanceofStub::left()),
UseFixed(instr->right(), InstanceofStub::right()));
LInstanceOf* result = new (zone()) LInstanceOf(context, left, right);
return MarkAsCall(DefineFixed(result, x0), instr);
}
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result = new(zone()) LInstanceOfKnownGlobal(
UseFixed(instr->context(), cp),
UseFixed(instr->left(), InstanceofStub::left()));
return MarkAsCall(DefineFixed(result, x0), instr);
LInstruction* LChunkBuilder::DoHasInPrototypeChainAndBranch(
HHasInPrototypeChainAndBranch* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* prototype = UseRegister(instr->prototype());
LOperand* scratch = TempRegister();
return new (zone()) LHasInPrototypeChainAndBranch(object, prototype, scratch);
}

View File

@ -86,10 +86,10 @@ class LCodeGen;
V(GetCachedArrayIndex) \
V(Goto) \
V(HasCachedArrayIndexAndBranch) \
V(HasInPrototypeChainAndBranch) \
V(HasInstanceTypeAndBranch) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(InvokeFunction) \
@ -246,8 +246,6 @@ class LInstruction : public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
void MarkAsCall() { bit_field_ = IsCallBits::update(bit_field_, true); }
bool IsCall() const { return IsCallBits::decode(bit_field_); }
@ -1481,39 +1479,30 @@ class LInstanceOf final : public LTemplateInstruction<1, 3, 0> {
inputs_[2] = right;
}
LOperand* context() { return inputs_[0]; }
LOperand* left() { return inputs_[1]; }
LOperand* right() { return inputs_[2]; }
LOperand* context() const { return inputs_[0]; }
LOperand* left() const { return inputs_[1]; }
LOperand* right() const { return inputs_[2]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
class LInstanceOfKnownGlobal final : public LTemplateInstruction<1, 2, 0> {
class LHasInPrototypeChainAndBranch final : public LControlInstruction<2, 1> {
public:
LInstanceOfKnownGlobal(LOperand* context, LOperand* value) {
inputs_[0] = context;
inputs_[1] = value;
LHasInPrototypeChainAndBranch(LOperand* object, LOperand* prototype,
LOperand* scratch) {
inputs_[0] = object;
inputs_[1] = prototype;
temps_[0] = scratch;
}
LOperand* context() { return inputs_[0]; }
LOperand* value() { return inputs_[1]; }
LOperand* object() const { return inputs_[0]; }
LOperand* prototype() const { return inputs_[1]; }
LOperand* scratch() const { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
return lazy_deopt_env_;
}
virtual void SetDeferredLazyDeoptimizationEnvironment(
LEnvironment* env) override {
lazy_deopt_env_ = env;
}
private:
LEnvironment* lazy_deopt_env_;
DECLARE_CONCRETE_INSTRUCTION(HasInPrototypeChainAndBranch,
"has-in-prototype-chain-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInPrototypeChainAndBranch)
};

View File

@ -3007,135 +3007,39 @@ void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
// Assert that the arguments are in the registers expected by InstanceofStub.
DCHECK(ToRegister(instr->left()).Is(InstanceofStub::left()));
DCHECK(ToRegister(instr->right()).Is(InstanceofStub::right()));
InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
DCHECK(ToRegister(instr->result()).is(x0));
InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
// InstanceofStub returns a result in x0:
// 0 => not an instance
// smi 1 => instance.
__ Cmp(x0, 0);
__ LoadTrueFalseRoots(x0, x1);
__ Csel(x0, x0, x1, eq);
}
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
class DeferredInstanceOfKnownGlobal: public LDeferredCode {
public:
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
virtual void Generate() {
codegen()->DoDeferredInstanceOfKnownGlobal(instr_);
}
virtual LInstruction* instr() { return instr_; }
private:
LInstanceOfKnownGlobal* instr_;
};
void LCodeGen::DoHasInPrototypeChainAndBranch(
LHasInPrototypeChainAndBranch* instr) {
Register const object = ToRegister(instr->object());
Register const object_map = ToRegister(instr->scratch());
Register const object_prototype = object_map;
Register const prototype = ToRegister(instr->prototype());
DeferredInstanceOfKnownGlobal* deferred =
new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label map_check, return_false, cache_miss, done;
Register object = ToRegister(instr->value());
Register result = ToRegister(instr->result());
// x4 is expected in the associated deferred code and stub.
Register map_check_site = x4;
Register map = x5;
// This instruction is marked as call. We can clobber any register.
DCHECK(instr->IsMarkedAsCall());
// We must take into account that object is in x11.
DCHECK(object.Is(x11));
Register scratch = x10;
// A Smi is not instance of anything.
__ JumpIfSmi(object, &return_false);
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
__ Ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
{
// Below we use Factory::the_hole_value() on purpose instead of loading from
// the root array to force relocation and later be able to patch with a
// custom value.
InstructionAccurateScope scope(masm(), 5);
__ bind(&map_check);
// Will be patched with the cached map.
Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
__ ldr(scratch, Immediate(cell));
__ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
__ cmp(map, scratch);
__ b(&cache_miss, ne);
// The address of this instruction is computed relative to the map check
// above, so check the size of the code generated.
DCHECK(masm()->InstructionsGeneratedSince(&map_check) == 4);
// Will be patched with the cached result.
__ ldr(result, Immediate(factory()->the_hole_value()));
// The {object} must be a spec object. It's sufficient to know that {object}
// is not a smi, since all other non-spec objects have {null} prototypes and
// will be ruled out below.
if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
__ JumpIfSmi(object, instr->FalseLabel(chunk_));
}
__ B(&done);
// The inlined call site cache did not match.
// Check null and string before calling the deferred code.
__ Bind(&cache_miss);
// Compute the address of the map check. It must not be clobbered until the
// InstanceOfStub has used it.
__ Adr(map_check_site, &map_check);
// Null is not instance of anything.
__ JumpIfRoot(object, Heap::kNullValueRootIndex, &return_false);
// String values are not instances of anything.
// Return false if the object is a string. Otherwise, jump to the deferred
// code.
// Note that we can't jump directly to deferred code from
// IsObjectJSStringType, because it uses tbz for the jump and the deferred
// code can be out of range.
__ IsObjectJSStringType(object, scratch, NULL, &return_false);
__ B(deferred->entry());
__ Bind(&return_false);
__ LoadRoot(result, Heap::kFalseValueRootIndex);
// Here result is either true or false.
__ Bind(deferred->exit());
__ Bind(&done);
}
void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Register result = ToRegister(instr->result());
DCHECK(result.Is(x0)); // InstanceofStub returns its result in x0.
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kArgsInRegisters);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kCallSiteInlineCheck);
PushSafepointRegistersScope scope(this);
LoadContextFromDeferred(instr->context());
// Prepare InstanceofStub arguments.
DCHECK(ToRegister(instr->value()).Is(InstanceofStub::left()));
__ LoadObject(InstanceofStub::right(), instr->function());
InstanceofStub stub(isolate(), flags);
CallCodeGeneric(stub.GetCode(),
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the result register slot.
__ StoreToSafepointRegisterSlot(result, result);
// Loop through the {object}s prototype chain looking for the {prototype}.
__ Ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
Label loop;
__ Bind(&loop);
__ Ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
__ Cmp(object_prototype, prototype);
__ B(eq, instr->TrueLabel(chunk_));
__ CompareRoot(object_prototype, Heap::kNullValueRootIndex);
__ B(eq, instr->FalseLabel(chunk_));
__ Ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
__ B(&loop);
}

View File

@ -132,7 +132,6 @@ class LCodeGen: public LCodeGenBase {
LOperand* temp1,
LOperand* temp2);
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,

View File

@ -3587,35 +3587,10 @@ void MacroAssembler::GetMapConstructor(Register result, Register map,
}
void MacroAssembler::TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
BoundFunctionAction action) {
void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss) {
DCHECK(!AreAliased(function, result, scratch));
Label non_instance;
if (action == kMissOnBoundFunction) {
// Check that the receiver isn't a smi.
JumpIfSmi(function, miss);
// Check that the function really is a function. Load map into result reg.
JumpIfNotObjectType(function, result, scratch, JS_FUNCTION_TYPE, miss);
Register scratch_w = scratch.W();
Ldr(scratch,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
// On 64-bit platforms, compiler hints field is not a smi. See definition of
// kCompilerHintsOffset in src/objects.h.
Ldr(scratch_w,
FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
Tbnz(scratch, SharedFunctionInfo::kBoundFunction, miss);
// Make sure that the function has an instance prototype.
Ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
Tbnz(scratch, Map::kHasNonInstancePrototype, &non_instance);
}
// Get the prototype or initial map from the function.
Ldr(result,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@ -3632,15 +3607,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// Get the prototype from the initial map.
Ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
if (action == kMissOnBoundFunction) {
B(&done);
// Non-instance prototype: fetch prototype from constructor field in initial
// map.
Bind(&non_instance);
GetMapConstructor(result, result, scratch, scratch);
}
// All done.
Bind(&done);
}
@ -4441,24 +4407,6 @@ void MacroAssembler::JumpIfDictionaryInPrototypeChain(
}
void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
Register result) {
DCHECK(!result.Is(ldr_location));
const uint32_t kLdrLitOffset_lsb = 5;
const uint32_t kLdrLitOffset_width = 19;
Ldr(result, MemOperand(ldr_location));
if (emit_debug_code()) {
And(result, result, LoadLiteralFMask);
Cmp(result, LoadLiteralFixed);
Check(eq, kTheInstructionToPatchShouldBeAnLdrLiteral);
// The instruction was clobbered. Reload it.
Ldr(result, MemOperand(ldr_location));
}
Sbfx(result, result, kLdrLitOffset_lsb, kLdrLitOffset_width);
Add(result, ldr_location, Operand(result, LSL, kWordSizeInBytesLog2));
}
void MacroAssembler::EnsureNotWhite(
Register value,
Register bitmap_scratch,

View File

@ -1364,26 +1364,13 @@ class MacroAssembler : public Assembler {
// ---------------------------------------------------------------------------
// Support functions.
// Try to get function prototype of a function and puts the value in the
// result register. Checks that the function really is a function and jumps
// to the miss label if the fast checks fail. The function register will be
// untouched; the other registers may be clobbered.
enum BoundFunctionAction {
kMissOnBoundFunction,
kDontMissOnBoundFunction
};
// Machine code version of Map::GetConstructor().
// |temp| holds |result|'s map when done, and |temp2| its instance type.
void GetMapConstructor(Register result, Register map, Register temp,
Register temp2);
void TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
BoundFunctionAction action =
kDontMissOnBoundFunction);
void TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss);
// Compare object type for heap object. heap_object contains a non-Smi
// whose object type should be compared with the given type. This both
@ -1883,12 +1870,6 @@ class MacroAssembler : public Assembler {
Label* on_black);
// Get the location of a relocated constant (its address in the constant pool)
// from its load site.
void GetRelocatedValueLocation(Register ldr_location,
Register result);
// ---------------------------------------------------------------------------
// Debugging.

View File

@ -197,8 +197,6 @@ namespace internal {
V(kTheInstructionShouldBeAnOris, "The instruction should be an oris") \
V(kTheInstructionShouldBeALi, "The instruction should be a li") \
V(kTheInstructionShouldBeASldi, "The instruction should be a sldi") \
V(kTheInstructionToPatchShouldBeALoadFromConstantPool, \
"The instruction to patch should be a load from the constant pool") \
V(kTheInstructionToPatchShouldBeAnLdrLiteral, \
"The instruction to patch should be a ldr literal") \
V(kTheInstructionToPatchShouldBeALis, \

View File

@ -177,7 +177,6 @@ enum BuiltinExtraArguments {
V(SHR, 1) \
V(SHR_STRONG, 1) \
V(IN, 1) \
V(INSTANCE_OF, 1) \
V(CALL_NON_FUNCTION, 0) \
V(CALL_NON_FUNCTION_AS_CONSTRUCTOR, 0) \
V(CALL_FUNCTION_PROXY, 1) \

View File

@ -153,9 +153,8 @@ Callable CodeFactory::StoreGlobalViaContext(Isolate* isolate, int depth,
// static
Callable CodeFactory::Instanceof(Isolate* isolate,
InstanceofStub::Flags flags) {
InstanceofStub stub(isolate, flags);
Callable CodeFactory::InstanceOf(Isolate* isolate) {
InstanceOfStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}

View File

@ -66,7 +66,7 @@ class CodeFactory final {
static Callable StoreGlobalViaContext(Isolate* isolate, int depth,
LanguageMode language_mode);
static Callable Instanceof(Isolate* isolate, InstanceofStub::Flags flags);
static Callable InstanceOf(Isolate* isolate);
static Callable ToBoolean(
Isolate* isolate, ToBooleanStub::ResultMode mode,

View File

@ -594,14 +594,6 @@ void CallICStub::PrintState(std::ostream& os) const { // NOLINT
}
void InstanceofStub::PrintName(std::ostream& os) const { // NOLINT
os << "InstanceofStub";
if (HasArgsInRegisters()) os << "_REGS";
if (HasCallSiteInlineCheck()) os << "_INLINE";
if (ReturnTrueFalseObject()) os << "_TRUEFALSE";
}
void JSEntryStub::FinishCode(Handle<Code> code) {
Handle<FixedArray> handler_table =
code->GetIsolate()->factory()->NewFixedArray(1, TENURED);

View File

@ -34,7 +34,7 @@ namespace internal {
V(CompareIC) \
V(DoubleToI) \
V(FunctionPrototype) \
V(Instanceof) \
V(InstanceOf) \
V(InternalArrayConstructor) \
V(JSEntry) \
V(KeyedLoadICTrampoline) \
@ -876,47 +876,14 @@ class GrowArrayElementsStub : public HydrogenCodeStub {
DEFINE_HYDROGEN_CODE_STUB(GrowArrayElements, HydrogenCodeStub);
};
class InstanceofStub: public PlatformCodeStub {
class InstanceOfStub final : public PlatformCodeStub {
public:
enum Flags {
kNoFlags = 0,
kArgsInRegisters = 1 << 0,
kCallSiteInlineCheck = 1 << 1,
kReturnTrueFalseObject = 1 << 2
};
InstanceofStub(Isolate* isolate, Flags flags) : PlatformCodeStub(isolate) {
minor_key_ = FlagBits::encode(flags);
}
static Register left() { return InstanceofDescriptor::left(); }
static Register right() { return InstanceofDescriptor::right(); }
CallInterfaceDescriptor GetCallInterfaceDescriptor() const override {
if (HasArgsInRegisters()) {
return InstanceofDescriptor(isolate());
}
return ContextOnlyDescriptor(isolate());
}
explicit InstanceOfStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
Flags flags() const { return FlagBits::decode(minor_key_); }
bool HasArgsInRegisters() const { return (flags() & kArgsInRegisters) != 0; }
bool HasCallSiteInlineCheck() const {
return (flags() & kCallSiteInlineCheck) != 0;
}
bool ReturnTrueFalseObject() const {
return (flags() & kReturnTrueFalseObject) != 0;
}
void PrintName(std::ostream& os) const override; // NOLINT
class FlagBits : public BitField<Flags, 0, 3> {};
DEFINE_PLATFORM_CODE_STUB(Instanceof, PlatformCodeStub);
DEFINE_CALL_INTERFACE_DESCRIPTOR(InstanceOf);
DEFINE_PLATFORM_CODE_STUB(InstanceOf, PlatformCodeStub);
};

View File

@ -443,10 +443,7 @@ void JSGenericLowering::LowerJSHasProperty(Node* node) {
void JSGenericLowering::LowerJSInstanceOf(Node* node) {
CallDescriptor::Flags flags = AdjustFrameStatesForCall(node);
InstanceofStub::Flags stub_flags = static_cast<InstanceofStub::Flags>(
InstanceofStub::kReturnTrueFalseObject |
InstanceofStub::kArgsInRegisters);
Callable callable = CodeFactory::Instanceof(isolate(), stub_flags);
Callable callable = CodeFactory::InstanceOf(isolate());
ReplaceWithStubCall(node, callable, flags);
}

View File

@ -5088,18 +5088,17 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
VisitForStackValue(expr->right());
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r0, ip);
__ CompareRoot(r0, Heap::kTrueValueRootIndex);
Split(eq, if_true, if_false, fall_through);
break;
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
VisitForAccumulatorValue(expr->right());
__ pop(r1);
InstanceOfStub stub(isolate());
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
__ tst(r0, r0);
PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
__ CompareRoot(r0, Heap::kTrueValueRootIndex);
Split(eq, if_true, if_false, fall_through);
break;
}

View File

@ -4798,12 +4798,13 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
break;
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
VisitForAccumulatorValue(expr->right());
__ Pop(x1);
InstanceOfStub stub(isolate());
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
__ CompareAndSplit(x0, 0, eq, if_true, if_false, fall_through);
PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
__ CompareRoot(x0, Heap::kTrueValueRootIndex);
Split(eq, if_true, if_false, fall_through);
break;
}

View File

@ -5032,13 +5032,13 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
break;
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
VisitForAccumulatorValue(expr->right());
__ Pop(edx);
InstanceOfStub stub(isolate());
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
__ test(eax, eax);
// The stub returns 0 for true.
Split(zero, if_true, if_false, fall_through);
PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
__ cmp(eax, isolate()->factory()->true_value());
Split(equal, if_true, if_false, fall_through);
break;
}

View File

@ -5118,12 +5118,14 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
break;
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
VisitForAccumulatorValue(expr->right());
__ mov(a0, result_register());
__ pop(a1);
InstanceOfStub stub(isolate());
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
__ LoadRoot(at, Heap::kTrueValueRootIndex);
Split(eq, v0, Operand(at), if_true, if_false, fall_through);
break;
}

View File

@ -5120,12 +5120,14 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
break;
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
VisitForAccumulatorValue(expr->right());
__ mov(a0, result_register());
__ pop(a1);
InstanceOfStub stub(isolate());
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
__ LoadRoot(a4, Heap::kTrueValueRootIndex);
Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
break;
}

View File

@ -5041,13 +5041,13 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
break;
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
VisitForAccumulatorValue(expr->right());
__ Pop(rdx);
InstanceOfStub stub(isolate());
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
__ testp(rax, rax);
// The stub returns 0 for true.
Split(zero, if_true, if_false, fall_through);
PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
__ CompareRoot(rax, Heap::kTrueValueRootIndex);
Split(equal, if_true, if_false, fall_through);
break;
}

View File

@ -811,8 +811,8 @@ bool HInstruction::CanDeoptimize() {
case HValue::kHasInstanceTypeAndBranch:
case HValue::kInnerAllocatedObject:
case HValue::kInstanceOf:
case HValue::kInstanceOfKnownGlobal:
case HValue::kIsConstructCallAndBranch:
case HValue::kHasInPrototypeChainAndBranch:
case HValue::kIsObjectAndBranch:
case HValue::kIsSmiAndBranch:
case HValue::kIsStringAndBranch:

View File

@ -105,9 +105,9 @@ class LChunkBuilder;
V(HasInstanceTypeAndBranch) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(InvokeFunction) \
V(IsConstructCallAndBranch) \
V(HasInPrototypeChainAndBranch) \
V(IsObjectAndBranch) \
V(IsStringAndBranch) \
V(IsSmiAndBranch) \
@ -4760,34 +4760,32 @@ class HInstanceOf final : public HBinaryOperation {
};
class HInstanceOfKnownGlobal final : public HTemplateInstruction<2> {
class HHasInPrototypeChainAndBranch final
: public HTemplateControlInstruction<2, 2> {
public:
DECLARE_INSTRUCTION_WITH_CONTEXT_FACTORY_P2(HInstanceOfKnownGlobal,
HValue*,
Handle<JSFunction>);
DECLARE_INSTRUCTION_FACTORY_P2(HHasInPrototypeChainAndBranch, HValue*,
HValue*);
HValue* context() { return OperandAt(0); }
HValue* left() { return OperandAt(1); }
Handle<JSFunction> function() { return function_; }
HValue* object() const { return OperandAt(0); }
HValue* prototype() const { return OperandAt(1); }
Representation RequiredInputRepresentation(int index) override {
return Representation::Tagged();
}
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal)
private:
HInstanceOfKnownGlobal(HValue* context,
HValue* left,
Handle<JSFunction> right)
: HTemplateInstruction<2>(HType::Boolean()), function_(right) {
SetOperandAt(0, context);
SetOperandAt(1, left);
set_representation(Representation::Tagged());
SetAllSideEffects();
bool ObjectNeedsSmiCheck() const {
return !object()->type().IsHeapObject() &&
!object()->representation().IsHeapObject();
}
Handle<JSFunction> function_;
DECLARE_CONCRETE_INSTRUCTION(HasInPrototypeChainAndBranch)
private:
HHasInPrototypeChainAndBranch(HValue* object, HValue* prototype) {
SetOperandAt(0, object);
SetOperandAt(1, prototype);
SetDependsOnFlag(kCalls);
}
};

View File

@ -11347,11 +11347,19 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
// Check to see if the rhs of the instanceof is a known function.
if (right->IsConstant() &&
HConstant::cast(right)->handle(isolate())->IsJSFunction()) {
Handle<Object> function = HConstant::cast(right)->handle(isolate());
Handle<JSFunction> target = Handle<JSFunction>::cast(function);
HInstanceOfKnownGlobal* result =
New<HInstanceOfKnownGlobal>(left, target);
return ast_context()->ReturnInstruction(result, expr->id());
Handle<JSFunction> constructor =
Handle<JSFunction>::cast(HConstant::cast(right)->handle(isolate()));
if (!constructor->map()->has_non_instance_prototype()) {
JSFunction::EnsureHasInitialMap(constructor);
DCHECK(constructor->has_initial_map());
Handle<Map> initial_map(constructor->initial_map(), isolate());
top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
HInstruction* prototype =
Add<HConstant>(handle(initial_map->prototype(), isolate()));
HHasInPrototypeChainAndBranch* result =
New<HHasInPrototypeChainAndBranch>(left, prototype);
return ast_context()->ReturnControl(result, expr->id());
}
}
HInstanceOf* result = New<HInstanceOf>(left, right);
@ -12552,6 +12560,18 @@ void HOptimizedGraphBuilder::GenerateUnlikely(CallRuntime* call) {
}
void HOptimizedGraphBuilder::GenerateHasInPrototypeChain(CallRuntime* call) {
DCHECK_EQ(2, call->arguments()->length());
CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
HValue* prototype = Pop();
HValue* object = Pop();
HHasInPrototypeChainAndBranch* result =
New<HHasInPrototypeChainAndBranch>(object, prototype);
return ast_context()->ReturnControl(result, call->id());
}
void HOptimizedGraphBuilder::GenerateFixedArrayGet(CallRuntime* call) {
DCHECK(call->arguments()->length() == 2);
CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));

View File

@ -2217,6 +2217,7 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor {
F(DebugIsActive) \
F(Likely) \
F(Unlikely) \
F(HasInPrototypeChain) \
/* Typed Arrays */ \
F(TypedArrayInitialize) \
F(DataViewInitialize) \

View File

@ -2718,233 +2718,108 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
}
// Generate stub code for instanceof.
// This code can patch a call site inlined cache of the instance of check,
// which looks like this.
//
// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
// 75 0a jne <some near label>
// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
//
// If call site patching is requested the stack will have the delta from the
// return address to the cmp instruction just below the return address. This
// also means that call site patching can only take place with arguments in
// registers. TOS looks like this when call site patching is requested
//
// esp[0] : return address
// esp[4] : delta from return address to cmp instruction
//
void InstanceofStub::Generate(MacroAssembler* masm) {
// Call site inlining and patching implies arguments in registers.
DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
void InstanceOfStub::Generate(MacroAssembler* masm) {
Register const object = edx; // Object (lhs).
Register const function = eax; // Function (rhs).
Register const object_map = ecx; // Map of {object}.
Register const function_map = ebx; // Map of {function}.
Register const function_prototype = function_map; // Prototype of {function}.
Register const scratch = edi;
// Fixed register usage throughout the stub.
Register object = eax; // Object (lhs).
Register map = ebx; // Map of the object.
Register function = edx; // Function (rhs).
Register prototype = edi; // Prototype of the function.
Register scratch = ecx;
DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
// Constants describing the call site code to patch.
static const int kDeltaToCmpImmediate = 2;
static const int kDeltaToMov = 8;
static const int kDeltaToMovImmediate = 9;
static const int8_t kCmpEdiOperandByte1 = bit_cast<int8_t, uint8_t>(0x3b);
static const int8_t kCmpEdiOperandByte2 = bit_cast<int8_t, uint8_t>(0x3d);
static const int8_t kMovEaxImmediateByte = bit_cast<int8_t, uint8_t>(0xb8);
// Check if {object} is a smi.
Label object_is_smi;
__ JumpIfSmi(object, &object_is_smi, Label::kNear);
DCHECK_EQ(object.code(), InstanceofStub::left().code());
DCHECK_EQ(function.code(), InstanceofStub::right().code());
// Lookup the {function} and the {object} map in the global instanceof cache.
// Note: This is safe because we clear the global instanceof cache whenever
// we change the prototype of any object.
Label fast_case, slow_case;
__ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
__ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
__ j(not_equal, &fast_case, Label::kNear);
__ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
__ j(not_equal, &fast_case, Label::kNear);
__ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
__ ret(0);
// Get the object and function - they are always both needed.
Label slow, not_js_object;
if (!HasArgsInRegisters()) {
__ mov(object, Operand(esp, 2 * kPointerSize));
__ mov(function, Operand(esp, 1 * kPointerSize));
}
// If {object} is a smi we can safely return false if {function} is a JS
// function, otherwise we have to miss to the runtime and throw an exception.
__ bind(&object_is_smi);
__ JumpIfSmi(function, &slow_case);
__ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
__ j(not_equal, &slow_case);
__ LoadRoot(eax, Heap::kFalseValueRootIndex);
__ ret(0);
// Check that the left hand is a JS object.
__ JumpIfSmi(object, &not_js_object);
__ IsObjectJSObjectType(object, map, scratch, &not_js_object);
// Fast-case: The {function} must be a valid JSFunction.
__ bind(&fast_case);
__ JumpIfSmi(function, &slow_case);
__ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
__ j(not_equal, &slow_case);
// If there is a call site cache don't look in the global cache, but do the
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
// Look up the function and the map in the instanceof cache.
Label miss;
__ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
__ j(not_equal, &miss, Label::kNear);
__ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
__ j(not_equal, &miss, Label::kNear);
__ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&miss);
}
// Ensure that {function} has an instance prototype.
__ test_b(FieldOperand(function_map, Map::kBitFieldOffset),
static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype));
__ j(not_zero, &slow_case);
// Get the prototype of the function.
__ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
// Ensure that {function} is not bound.
Register const shared_info = scratch;
__ mov(shared_info,
FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ BooleanBitTest(shared_info, SharedFunctionInfo::kCompilerHintsOffset,
SharedFunctionInfo::kBoundFunction);
__ j(not_zero, &slow_case);
// Check that the function prototype is a JS object.
__ JumpIfSmi(prototype, &slow);
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
// Get the "prototype" (or initial map) of the {function}.
__ mov(function_prototype,
FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
__ AssertNotSmi(function_prototype);
// Update the global instanceof or call site inlined cache with the current
// map and function. The cached answer will be set when it is known below.
if (!HasCallSiteInlineCheck()) {
__ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
__ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
} else {
// The constants for the code patching are based on no push instructions
// at the call site.
DCHECK(HasArgsInRegisters());
// Get return address and delta to inlined map check.
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
__ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
}
__ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
__ mov(Operand(scratch, 0), map);
__ push(map);
// Scratch points at the cell payload. Calculate the start of the object.
__ sub(scratch, Immediate(Cell::kValueOffset - 1));
__ RecordWriteField(scratch, Cell::kValueOffset, map, function,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
__ pop(map);
}
// Resolve the prototype if the {function} has an initial map. Afterwards the
// {function_prototype} will be either the JSReceiver prototype object or the
// hole value, which means that no instances of the {function} were created so
// far and hence we should return false.
Label function_prototype_valid;
Register const function_prototype_map = scratch;
__ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
__ j(not_equal, &function_prototype_valid, Label::kNear);
__ mov(function_prototype,
FieldOperand(function_prototype, Map::kPrototypeOffset));
__ bind(&function_prototype_valid);
__ AssertNotSmi(function_prototype);
// Loop through the prototype chain of the object looking for the function
// prototype.
__ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
Label loop, is_instance, is_not_instance;
// Update the global instanceof cache with the current {object} map and
// {function}. The cached answer will be set when it is known below.
__ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
Register const object_prototype = object_map;
Label done, loop;
__ mov(eax, isolate()->factory()->true_value());
__ bind(&loop);
__ cmp(scratch, prototype);
__ j(equal, &is_instance, Label::kNear);
Factory* factory = isolate()->factory();
__ cmp(scratch, Immediate(factory->null_value()));
__ j(equal, &is_not_instance, Label::kNear);
__ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
__ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
__ jmp(&loop);
__ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
__ cmp(object_prototype, function_prototype);
__ j(equal, &done, Label::kNear);
__ cmp(object_prototype, isolate()->factory()->null_value());
__ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
__ j(not_equal, &loop);
__ mov(eax, isolate()->factory()->false_value());
__ bind(&done);
__ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
__ ret(0);
__ bind(&is_instance);
if (!HasCallSiteInlineCheck()) {
__ mov(eax, Immediate(0));
__ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ mov(eax, factory->true_value());
}
} else {
// Get return address and delta to inlined map check.
__ mov(eax, factory->true_value());
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
__ Move(eax, Immediate(0));
}
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&is_not_instance);
if (!HasCallSiteInlineCheck()) {
__ mov(eax, Immediate(Smi::FromInt(1)));
__ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ mov(eax, factory->false_value());
}
} else {
// Get return address and delta to inlined map check.
__ mov(eax, factory->false_value());
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
__ Move(eax, Immediate(Smi::FromInt(1)));
}
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Label object_not_null, object_not_null_or_smi;
__ bind(&not_js_object);
// Before null, smi and string value checks, check that the rhs is a function
// as for a non-function rhs an exception needs to be thrown.
__ JumpIfSmi(function, &slow, Label::kNear);
__ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
__ j(not_equal, &slow, Label::kNear);
// Null is not instance of anything.
__ cmp(object, factory->null_value());
__ j(not_equal, &object_not_null, Label::kNear);
if (ReturnTrueFalseObject()) {
__ mov(eax, factory->false_value());
} else {
__ Move(eax, Immediate(Smi::FromInt(1)));
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&object_not_null);
// Smi values is not instance of anything.
__ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
if (ReturnTrueFalseObject()) {
__ mov(eax, factory->false_value());
} else {
__ Move(eax, Immediate(Smi::FromInt(1)));
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&object_not_null_or_smi);
// String values is not instance of anything.
Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
__ j(NegateCondition(is_string), &slow, Label::kNear);
if (ReturnTrueFalseObject()) {
__ mov(eax, factory->false_value());
} else {
__ Move(eax, Immediate(Smi::FromInt(1)));
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
// Slow-case: Go through the JavaScript implementation.
__ bind(&slow);
if (!ReturnTrueFalseObject()) {
// Tail call the builtin which returns 0 or 1.
if (HasArgsInRegisters()) {
// Push arguments below return address.
__ pop(scratch);
__ push(object);
__ push(function);
__ push(scratch);
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
// Call the builtin and convert 0/1 to true/false.
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(object);
__ push(function);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
}
Label true_value, done;
__ test(eax, eax);
__ j(zero, &true_value, Label::kNear);
__ mov(eax, factory->false_value());
__ jmp(&done, Label::kNear);
__ bind(&true_value);
__ mov(eax, factory->true_value());
__ bind(&done);
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
}
// Slow-case: Call the runtime function.
__ bind(&slow_case);
__ pop(scratch); // Pop return address.
__ push(object); // Push {object}.
__ push(function); // Push {function}.
__ push(scratch); // Push return address.
__ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
}

View File

@ -53,8 +53,8 @@ const Register StoreGlobalViaContextDescriptor::SlotRegister() { return ebx; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return eax; }
const Register InstanceofDescriptor::left() { return eax; }
const Register InstanceofDescriptor::right() { return edx; }
const Register InstanceOfDescriptor::LeftRegister() { return edx; }
const Register InstanceOfDescriptor::RightRegister() { return eax; }
const Register ArgumentsAccessReadDescriptor::index() { return edx; }

View File

@ -2055,6 +2055,17 @@ void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
}
template <class InstrType>
void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) {
int true_block = instr->TrueDestination(chunk_);
if (cc == no_condition) {
__ jmp(chunk_->GetAssemblyLabel(true_block));
} else {
__ j(cc, chunk_->GetAssemblyLabel(true_block));
}
}
template<class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
int false_block = instr->FalseDestination(chunk_);
@ -2614,120 +2625,41 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
// Object and function are in fixed registers defined by the stub.
DCHECK(ToRegister(instr->context()).is(esi));
InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
DCHECK(ToRegister(instr->result()).is(eax));
InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Label true_value, done;
__ test(eax, Operand(eax));
__ j(zero, &true_value, Label::kNear);
__ mov(ToRegister(instr->result()), factory()->false_value());
__ jmp(&done, Label::kNear);
__ bind(&true_value);
__ mov(ToRegister(instr->result()), factory()->true_value());
__ bind(&done);
}
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
public:
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
void Generate() override {
codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
}
LInstruction* instr() override { return instr_; }
Label* map_check() { return &map_check_; }
private:
LInstanceOfKnownGlobal* instr_;
Label map_check_;
};
void LCodeGen::DoHasInPrototypeChainAndBranch(
LHasInPrototypeChainAndBranch* instr) {
Register const object = ToRegister(instr->object());
Register const object_map = ToRegister(instr->scratch());
Register const object_prototype = object_map;
Register const prototype = ToRegister(instr->prototype());
DeferredInstanceOfKnownGlobal* deferred;
deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
// The {object} must be a spec object. It's sufficient to know that {object}
// is not a smi, since all other non-spec objects have {null} prototypes and
// will be ruled out below.
if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
__ test(object, Immediate(kSmiTagMask));
EmitFalseBranch(instr, zero);
}
Label done, false_result;
Register object = ToRegister(instr->value());
Register temp = ToRegister(instr->temp());
// A Smi is not an instance of anything.
__ JumpIfSmi(object, &false_result, Label::kNear);
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
Label cache_miss;
Register map = ToRegister(instr->temp());
__ mov(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
__ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map.
__ j(not_equal, &cache_miss, Label::kNear);
__ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
__ jmp(&done, Label::kNear);
// The inlined call site cache did not match. Check for null and string
// before calling the deferred code.
__ bind(&cache_miss);
// Null is not an instance of anything.
__ cmp(object, factory()->null_value());
__ j(equal, &false_result, Label::kNear);
// String values are not instances of anything.
Condition is_string = masm_->IsObjectStringType(object, temp, temp);
__ j(is_string, &false_result, Label::kNear);
// Go to the deferred code.
__ jmp(deferred->entry());
__ bind(&false_result);
__ mov(ToRegister(instr->result()), factory()->false_value());
// Here result has either true or false. Deferred code also produces true or
// false object.
__ bind(deferred->exit());
__ bind(&done);
}
void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check) {
PushSafepointRegistersScope scope(this);
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kArgsInRegisters);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
InstanceofStub stub(isolate(), flags);
// Get the temp register reserved by the instruction. This needs to be a
// register which is pushed last by PushSafepointRegisters as top of the
// stack is used to pass the offset to the location of the map check to
// the stub.
Register temp = ToRegister(instr->temp());
DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
__ LoadHeapObject(InstanceofStub::right(), instr->function());
static const int kAdditionalDelta = 13;
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
__ mov(temp, Immediate(delta));
__ StoreToSafepointRegisterSlot(temp, temp);
CallCodeGeneric(stub.GetCode(),
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
// Get the deoptimization index of the LLazyBailout-environment that
// corresponds to this instruction.
LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the eax slot and restore all registers.
__ StoreToSafepointRegisterSlot(eax, eax);
// Loop through the {object}s prototype chain looking for the {prototype}.
__ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
Label loop;
__ bind(&loop);
__ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
__ cmp(object_prototype, prototype);
EmitTrueBranch(instr, equal);
__ cmp(object_prototype, factory()->null_value());
EmitFalseBranch(instr, equal);
__ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
__ jmp(&loop);
}

View File

@ -104,8 +104,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register object,
@ -260,7 +258,9 @@ class LCodeGen: public LCodeGenBase {
// EmitBranch expects to be the last instruction of a block.
template<class InstrType>
void EmitBranch(InstrType instr, Condition cc);
template<class InstrType>
template <class InstrType>
void EmitTrueBranch(InstrType instr, Condition cc);
template <class InstrType>
void EmitFalseBranch(InstrType instr, Condition cc);
void EmitNumberUntagD(LNumberUntagD* instr, Register input, Register temp,
XMMRegister result, NumberUntagDMode mode);

View File

@ -964,22 +964,14 @@ void LChunkBuilder::AddInstruction(LInstruction* instr,
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
LInstruction* instruction_needing_environment = NULL;
if (hydrogen_val->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(hydrogen_val->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
@ -1034,22 +1026,22 @@ LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LOperand* left = UseFixed(instr->left(), InstanceofStub::left());
LOperand* right = UseFixed(instr->right(), InstanceofStub::right());
LOperand* left =
UseFixed(instr->left(), InstanceOfDescriptor::LeftRegister());
LOperand* right =
UseFixed(instr->right(), InstanceOfDescriptor::RightRegister());
LOperand* context = UseFixed(instr->context(), esi);
LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
LInstanceOf* result = new (zone()) LInstanceOf(context, left, right);
return MarkAsCall(DefineFixed(result, eax), instr);
}
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
new(zone()) LInstanceOfKnownGlobal(
UseFixed(instr->context(), esi),
UseFixed(instr->left(), InstanceofStub::left()),
FixedTemp(edi));
return MarkAsCall(DefineFixed(result, eax), instr);
LInstruction* LChunkBuilder::DoHasInPrototypeChainAndBranch(
HHasInPrototypeChainAndBranch* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* prototype = UseRegister(instr->prototype());
LOperand* temp = TempRegister();
return new (zone()) LHasInPrototypeChainAndBranch(object, prototype, temp);
}

View File

@ -87,10 +87,10 @@ class LCodeGen;
V(GetCachedArrayIndex) \
V(Goto) \
V(HasCachedArrayIndexAndBranch) \
V(HasInPrototypeChainAndBranch) \
V(HasInstanceTypeAndBranch) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(InvokeFunction) \
@ -236,8 +236,6 @@ class LInstruction : public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
void MarkAsCall() { bit_field_ = IsCallBits::update(bit_field_, true); }
bool IsCall() const { return IsCallBits::decode(bit_field_); }
@ -1187,39 +1185,30 @@ class LInstanceOf final : public LTemplateInstruction<1, 3, 0> {
inputs_[2] = right;
}
LOperand* context() { return inputs_[0]; }
LOperand* context() const { return inputs_[0]; }
LOperand* left() const { return inputs_[1]; }
LOperand* right() const { return inputs_[2]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
class LInstanceOfKnownGlobal final : public LTemplateInstruction<1, 2, 1> {
class LHasInPrototypeChainAndBranch final : public LControlInstruction<2, 1> {
public:
LInstanceOfKnownGlobal(LOperand* context, LOperand* value, LOperand* temp) {
inputs_[0] = context;
inputs_[1] = value;
temps_[0] = temp;
LHasInPrototypeChainAndBranch(LOperand* object, LOperand* prototype,
LOperand* scratch) {
inputs_[0] = object;
inputs_[1] = prototype;
temps_[0] = scratch;
}
LOperand* context() { return inputs_[0]; }
LOperand* value() { return inputs_[1]; }
LOperand* temp() { return temps_[0]; }
LOperand* object() const { return inputs_[0]; }
LOperand* prototype() const { return inputs_[1]; }
LOperand* scratch() const { return temps_[0]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
return lazy_deopt_env_;
}
virtual void SetDeferredLazyDeoptimizationEnvironment(
LEnvironment* env) override {
lazy_deopt_env_ = env;
}
private:
LEnvironment* lazy_deopt_env_;
DECLARE_CONCRETE_INSTRUCTION(HasInPrototypeChainAndBranch,
"has-in-prototype-chain-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInPrototypeChainAndBranch)
};

View File

@ -1795,33 +1795,8 @@ void MacroAssembler::GetMapConstructor(Register result, Register map,
}
void MacroAssembler::TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
bool miss_on_bound_function) {
Label non_instance;
if (miss_on_bound_function) {
// Check that the receiver isn't a smi.
JumpIfSmi(function, miss);
// Check that the function really is a function.
CmpObjectType(function, JS_FUNCTION_TYPE, result);
j(not_equal, miss);
// If a bound function, go to miss label.
mov(scratch,
FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
SharedFunctionInfo::kBoundFunction);
j(not_zero, miss);
// Make sure that the function has an instance prototype.
movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
j(not_zero, &non_instance);
}
void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss) {
// Get the prototype or initial map from the function.
mov(result,
FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@ -1835,20 +1810,11 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// If the function does not have an initial map, we're done.
Label done;
CmpObjectType(result, MAP_TYPE, scratch);
j(not_equal, &done);
j(not_equal, &done, Label::kNear);
// Get the prototype from the initial map.
mov(result, FieldOperand(result, Map::kPrototypeOffset));
if (miss_on_bound_function) {
jmp(&done);
// Non-instance prototype: Fetch prototype from constructor field
// in initial map.
bind(&non_instance);
GetMapConstructor(result, result, scratch);
}
// All done.
bind(&done);
}

View File

@ -731,11 +731,8 @@ class MacroAssembler: public Assembler {
// function and jumps to the miss label if the fast checks fail. The
// function register will be untouched; the other registers may be
// clobbered.
void TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
bool miss_on_bound_function = false);
void TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss);
// Picks out an array index from the hash field.
// Register use:

View File

@ -155,9 +155,9 @@ void StoreGlobalViaContextDescriptor::InitializePlatformSpecific(
}
void InstanceofDescriptor::InitializePlatformSpecific(
void InstanceOfDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {left(), right()};
Register registers[] = {LeftRegister(), RightRegister()};
data->InitializePlatformSpecific(arraysize(registers), registers);
}

View File

@ -20,7 +20,7 @@ class PlatformInterfaceDescriptor;
V(VectorStoreTransition) \
V(VectorStoreICTrampoline) \
V(VectorStoreIC) \
V(Instanceof) \
V(InstanceOf) \
V(LoadWithVector) \
V(FastNewClosure) \
V(FastNewContext) \
@ -291,13 +291,13 @@ class VectorStoreTransitionDescriptor : public StoreDescriptor {
};
class InstanceofDescriptor : public CallInterfaceDescriptor {
class InstanceOfDescriptor final : public CallInterfaceDescriptor {
public:
DECLARE_DESCRIPTOR(InstanceofDescriptor, CallInterfaceDescriptor)
DECLARE_DESCRIPTOR(InstanceOfDescriptor, CallInterfaceDescriptor)
enum ParameterIndices { kLeftIndex, kRightIndex, kParameterCount };
static const Register left();
static const Register right();
static const Register LeftRegister();
static const Register RightRegister();
};

View File

@ -1413,202 +1413,105 @@ void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
}
// Uses registers a0 to t0.
// Expected input (depending on whether args are in registers or on the stack):
// * object: a0 or at sp + 1 * kPointerSize.
// * function: a1 or at sp.
//
// An inlined call site may have been generated before calling this stub.
// In this case the offset to the inline site to patch is passed on the stack,
// in the safepoint slot for register t0.
void InstanceofStub::Generate(MacroAssembler* masm) {
// Call site inlining and patching implies arguments in registers.
DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
void InstanceOfStub::Generate(MacroAssembler* masm) {
Register const object = a1; // Object (lhs).
Register const function = a0; // Function (rhs).
Register const object_map = a2; // Map of {object}.
Register const function_map = a3; // Map of {function}.
Register const function_prototype = t0; // Prototype of {function}.
Register const scratch = t1;
// Fixed register usage throughout the stub:
const Register object = a0; // Object (lhs).
Register map = a3; // Map of the object.
const Register function = a1; // Function (rhs).
const Register prototype = t0; // Prototype of the function.
const Register inline_site = t5;
const Register scratch = a2;
DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
const int32_t kDeltaToLoadBoolResult = 5 * kPointerSize;
// Check if {object} is a smi.
Label object_is_smi;
__ JumpIfSmi(object, &object_is_smi);
Label slow, loop, is_instance, is_not_instance, not_js_object;
// Lookup the {function} and the {object} map in the global instanceof cache.
// Note: This is safe because we clear the global instanceof cache whenever
// we change the prototype of any object.
Label fast_case, slow_case;
__ lw(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
__ Branch(&fast_case, ne, function, Operand(at));
__ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
__ Branch(&fast_case, ne, object_map, Operand(at));
__ Ret(USE_DELAY_SLOT);
__ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
if (!HasArgsInRegisters()) {
__ lw(object, MemOperand(sp, 1 * kPointerSize));
__ lw(function, MemOperand(sp, 0));
}
// If {object} is a smi we can safely return false if {function} is a JS
// function, otherwise we have to miss to the runtime and throw an exception.
__ bind(&object_is_smi);
__ JumpIfSmi(function, &slow_case);
__ GetObjectType(function, function_map, scratch);
__ Branch(&slow_case, ne, scratch, Operand(JS_FUNCTION_TYPE));
__ Ret(USE_DELAY_SLOT);
__ LoadRoot(v0, Heap::kFalseValueRootIndex); // In delay slot.
// Check that the left hand is a JS object and load map.
__ JumpIfSmi(object, &not_js_object);
__ IsObjectJSObjectType(object, map, scratch, &not_js_object);
// Fast-case: The {function} must be a valid JSFunction.
__ bind(&fast_case);
__ JumpIfSmi(function, &slow_case);
__ GetObjectType(function, function_map, scratch);
__ Branch(&slow_case, ne, scratch, Operand(JS_FUNCTION_TYPE));
// If there is a call site cache don't look in the global cache, but do the
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
Label miss;
__ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
__ Branch(&miss, ne, function, Operand(at));
__ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
__ Branch(&miss, ne, map, Operand(at));
__ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
// Ensure that {function} has an instance prototype.
__ lbu(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
__ And(at, scratch, Operand(1 << Map::kHasNonInstancePrototype));
__ Branch(&slow_case, ne, at, Operand(zero_reg));
__ bind(&miss);
}
// Ensure that {function} is not bound.
Register const shared_info = scratch;
__ lw(shared_info,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ lbu(scratch,
FieldMemOperand(shared_info, SharedFunctionInfo::kBoundByteOffset));
__ And(at, scratch, Operand(1 << SharedFunctionInfo::kBoundBitWithinByte));
__ Branch(&slow_case, ne, at, Operand(zero_reg));
// Get the prototype of the function.
__ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
// Get the "prototype" (or initial map) of the {function}.
__ lw(function_prototype,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
__ AssertNotSmi(function_prototype);
// Check that the function prototype is a JS object.
__ JumpIfSmi(prototype, &slow);
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
// Resolve the prototype if the {function} has an initial map. Afterwards the
// {function_prototype} will be either the JSReceiver prototype object or the
// hole value, which means that no instances of the {function} were created so
// far and hence we should return false.
Label function_prototype_valid;
__ GetObjectType(function_prototype, scratch, scratch);
__ Branch(&function_prototype_valid, ne, scratch, Operand(MAP_TYPE));
__ lw(function_prototype,
FieldMemOperand(function_prototype, Map::kPrototypeOffset));
__ bind(&function_prototype_valid);
__ AssertNotSmi(function_prototype);
// Update the global instanceof or call site inlined cache with the current
// map and function. The cached answer will be set when it is known below.
if (!HasCallSiteInlineCheck()) {
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
} else {
DCHECK(HasArgsInRegisters());
// Patch the (relocated) inlined map check.
// Update the global instanceof cache with the current {object} map and
// {function}. The cached answer will be set when it is known below.
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
// The offset was stored in t0 safepoint slot.
// (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
__ LoadFromSafepointRegisterSlot(scratch, t0);
__ Subu(inline_site, ra, scratch);
// Get the map location in scratch and patch it.
__ GetRelocatedValue(inline_site, scratch, v1); // v1 used as scratch.
__ sw(map, FieldMemOperand(scratch, Cell::kValueOffset));
__ mov(t4, map);
// |scratch| points at the beginning of the cell. Calculate the field
// containing the map.
__ Addu(function, scratch, Operand(Cell::kValueOffset - 1));
__ RecordWriteField(scratch, Cell::kValueOffset, t4, function,
kRAHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
}
// Register mapping: a3 is object map and t0 is function prototype.
// Get prototype of object into a2.
__ lw(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
// We don't need map any more. Use it as a scratch register.
Register scratch2 = map;
map = no_reg;
// Loop through the prototype chain looking for the function prototype.
__ LoadRoot(scratch2, Heap::kNullValueRootIndex);
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
Register const object_prototype = object_map;
Register const null = scratch;
Label done, loop;
__ LoadRoot(v0, Heap::kTrueValueRootIndex);
__ LoadRoot(null, Heap::kNullValueRootIndex);
__ bind(&loop);
__ Branch(&is_instance, eq, scratch, Operand(prototype));
__ Branch(&is_not_instance, eq, scratch, Operand(scratch2));
__ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
__ lw(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
__ Branch(&loop);
__ lw(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
__ Branch(&done, eq, object_prototype, Operand(function_prototype));
__ Branch(USE_DELAY_SLOT, &loop, ne, object_prototype, Operand(null));
__ lw(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
__ bind(&done);
__ Ret(USE_DELAY_SLOT);
__ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
__ bind(&is_instance);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
if (!HasCallSiteInlineCheck()) {
__ mov(v0, zero_reg);
__ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kTrueValueRootIndex);
}
} else {
// Patch the call site to return true.
__ LoadRoot(v0, Heap::kTrueValueRootIndex);
__ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
// Get the boolean result location in scratch and patch it.
__ PatchRelocatedValue(inline_site, scratch, v0);
if (!ReturnTrueFalseObject()) {
__ mov(v0, zero_reg);
}
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
__ bind(&is_not_instance);
if (!HasCallSiteInlineCheck()) {
__ li(v0, Operand(Smi::FromInt(1)));
__ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
}
} else {
// Patch the call site to return false.
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
__ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
// Get the boolean result location in scratch and patch it.
__ PatchRelocatedValue(inline_site, scratch, v0);
if (!ReturnTrueFalseObject()) {
__ li(v0, Operand(Smi::FromInt(1)));
}
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
Label object_not_null, object_not_null_or_smi;
__ bind(&not_js_object);
// Before null, smi and string value checks, check that the rhs is a function
// as for a non-function rhs an exception needs to be thrown.
__ JumpIfSmi(function, &slow);
__ GetObjectType(function, scratch2, scratch);
__ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE));
// Null is not instance of anything.
__ Branch(&object_not_null, ne, object,
Operand(isolate()->factory()->null_value()));
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
} else {
__ li(v0, Operand(Smi::FromInt(1)));
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
__ bind(&object_not_null);
// Smi values are not instances of anything.
__ JumpIfNotSmi(object, &object_not_null_or_smi);
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
} else {
__ li(v0, Operand(Smi::FromInt(1)));
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
__ bind(&object_not_null_or_smi);
// String values are not instances of anything.
__ IsObjectJSStringType(object, scratch, &slow);
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
} else {
__ li(v0, Operand(Smi::FromInt(1)));
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
// Slow-case. Tail call builtin.
__ bind(&slow);
if (!ReturnTrueFalseObject()) {
if (HasArgsInRegisters()) {
__ Push(a0, a1);
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(a0, a1);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
}
__ mov(a0, v0);
__ LoadRoot(v0, Heap::kTrueValueRootIndex);
__ DropAndRet(HasArgsInRegisters() ? 0 : 2, eq, a0, Operand(zero_reg));
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
}
// Slow-case: Call the runtime function.
__ bind(&slow_case);
__ Push(object, function);
__ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
}

View File

@ -46,8 +46,8 @@ const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; }
const Register InstanceofDescriptor::left() { return a0; }
const Register InstanceofDescriptor::right() { return a1; }
const Register InstanceOfDescriptor::LeftRegister() { return a1; }
const Register InstanceOfDescriptor::RightRegister() { return a0; }
const Register ArgumentsAccessReadDescriptor::index() { return a1; }

View File

@ -2057,11 +2057,17 @@ void LCodeGen::EmitBranchF(InstrType instr,
}
template<class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr,
Condition condition,
Register src1,
const Operand& src2) {
template <class InstrType>
void LCodeGen::EmitTrueBranch(InstrType instr, Condition condition,
Register src1, const Operand& src2) {
int true_block = instr->TrueDestination(chunk_);
__ Branch(chunk_->GetAssemblyLabel(true_block), condition, src1, src2);
}
template <class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr, Condition condition,
Register src1, const Operand& src2) {
int false_block = instr->FalseDestination(chunk_);
__ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2);
}
@ -2651,142 +2657,38 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
Label true_label, done;
DCHECK(ToRegister(instr->left()).is(a0)); // Object is in a0.
DCHECK(ToRegister(instr->right()).is(a1)); // Function is in a1.
Register result = ToRegister(instr->result());
DCHECK(result.is(v0));
InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
DCHECK(ToRegister(instr->result()).is(v0));
InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ Branch(&true_label, eq, result, Operand(zero_reg));
__ li(result, Operand(factory()->false_value()));
__ Branch(&done);
__ bind(&true_label);
__ li(result, Operand(factory()->true_value()));
__ bind(&done);
}
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
public:
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
void Generate() override {
codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
}
LInstruction* instr() override { return instr_; }
Label* map_check() { return &map_check_; }
void LCodeGen::DoHasInPrototypeChainAndBranch(
LHasInPrototypeChainAndBranch* instr) {
Register const object = ToRegister(instr->object());
Register const object_map = scratch0();
Register const object_prototype = object_map;
Register const prototype = ToRegister(instr->prototype());
private:
LInstanceOfKnownGlobal* instr_;
Label map_check_;
};
DeferredInstanceOfKnownGlobal* deferred;
deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->value());
Register temp = ToRegister(instr->temp());
Register result = ToRegister(instr->result());
DCHECK(object.is(a0));
DCHECK(result.is(v0));
// A Smi is not instance of anything.
__ JumpIfSmi(object, &false_result);
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
Label cache_miss;
Register map = temp;
__ lw(map, FieldMemOperand(object, HeapObject::kMapOffset));
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
__ bind(deferred->map_check()); // Label for calculating code patching.
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch with
// the cached map.
Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
__ li(at, Operand(cell));
__ lw(at, FieldMemOperand(at, Cell::kValueOffset));
__ BranchShort(&cache_miss, ne, map, Operand(at));
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch
// with true or false. The distance from map check has to be constant.
__ li(result, Operand(factory()->the_hole_value()), CONSTANT_SIZE);
__ Branch(&done);
// The inlined call site cache did not match. Check null and string before
// calling the deferred code.
__ bind(&cache_miss);
// Null is not instance of anything.
__ LoadRoot(temp, Heap::kNullValueRootIndex);
__ Branch(&false_result, eq, object, Operand(temp));
// String values is not instance of anything.
Condition cc = __ IsObjectStringType(object, temp, temp);
__ Branch(&false_result, cc, temp, Operand(zero_reg));
// Go to the deferred code.
__ Branch(deferred->entry());
__ bind(&false_result);
__ LoadRoot(result, Heap::kFalseValueRootIndex);
// Here result has either true or false. Deferred code also produces true or
// false object.
__ bind(deferred->exit());
__ bind(&done);
}
void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check) {
Register result = ToRegister(instr->result());
DCHECK(result.is(v0));
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kArgsInRegisters);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
InstanceofStub stub(isolate(), flags);
PushSafepointRegistersScope scope(this);
LoadContextFromDeferred(instr->context());
// Get the temp register reserved by the instruction. This needs to be t0 as
// its slot of the pushing of safepoint registers is used to communicate the
// offset to the location of the map check.
Register temp = ToRegister(instr->temp());
DCHECK(temp.is(t0));
__ li(InstanceofStub::right(), instr->function());
static const int kAdditionalDelta = 7;
int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta;
__ bind(&before_push_delta);
{
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
__ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE);
__ StoreToSafepointRegisterSlot(temp, temp);
// The {object} must be a spec object. It's sufficient to know that {object}
// is not a smi, since all other non-spec objects have {null} prototypes and
// will be ruled out below.
if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
__ SmiTst(object, at);
EmitFalseBranch(instr, eq, at, Operand(zero_reg));
}
CallCodeGeneric(stub.GetCode(),
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the result register slot and
// restore all registers.
__ StoreToSafepointRegisterSlot(result, result);
// Loop through the {object}s prototype chain looking for the {prototype}.
__ lw(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
Label loop;
__ bind(&loop);
__ lw(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
EmitTrueBranch(instr, eq, object_prototype, Operand(prototype));
__ LoadRoot(at, Heap::kNullValueRootIndex);
EmitFalseBranch(instr, eq, object_prototype, Operand(at));
__ Branch(USE_DELAY_SLOT, &loop);
__ lw(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
}

View File

@ -113,9 +113,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register result,
@ -278,10 +275,11 @@ class LCodeGen: public LCodeGenBase {
Condition condition,
FPURegister src1,
FPURegister src2);
template<class InstrType>
void EmitFalseBranch(InstrType instr,
Condition condition,
Register src1,
template <class InstrType>
void EmitTrueBranch(InstrType instr, Condition condition, Register src1,
const Operand& src2);
template <class InstrType>
void EmitFalseBranch(InstrType instr, Condition condition, Register src1,
const Operand& src2);
template<class InstrType>
void EmitFalseBranchF(InstrType instr,

View File

@ -933,22 +933,14 @@ void LChunkBuilder::AddInstruction(LInstruction* instr,
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
LInstruction* instruction_needing_environment = NULL;
if (hydrogen_val->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(hydrogen_val->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
@ -999,22 +991,21 @@ LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LOperand* left =
UseFixed(instr->left(), InstanceOfDescriptor::LeftRegister());
LOperand* right =
UseFixed(instr->right(), InstanceOfDescriptor::RightRegister());
LOperand* context = UseFixed(instr->context(), cp);
LInstanceOf* result =
new(zone()) LInstanceOf(context, UseFixed(instr->left(), a0),
UseFixed(instr->right(), a1));
LInstanceOf* result = new (zone()) LInstanceOf(context, left, right);
return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
new(zone()) LInstanceOfKnownGlobal(
UseFixed(instr->context(), cp),
UseFixed(instr->left(), a0),
FixedTemp(t0));
return MarkAsCall(DefineFixed(result, v0), instr);
LInstruction* LChunkBuilder::DoHasInPrototypeChainAndBranch(
HHasInPrototypeChainAndBranch* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* prototype = UseRegister(instr->prototype());
return new (zone()) LHasInPrototypeChainAndBranch(object, prototype);
}

View File

@ -83,10 +83,10 @@ class LCodeGen;
V(GetCachedArrayIndex) \
V(Goto) \
V(HasCachedArrayIndexAndBranch) \
V(HasInPrototypeChainAndBranch) \
V(HasInstanceTypeAndBranch) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(InvokeFunction) \
@ -232,8 +232,6 @@ class LInstruction : public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
void MarkAsCall() { bit_field_ = IsCallBits::update(bit_field_, true); }
bool IsCall() const { return IsCallBits::decode(bit_field_); }
@ -1170,41 +1168,27 @@ class LInstanceOf final : public LTemplateInstruction<1, 3, 0> {
inputs_[2] = right;
}
LOperand* context() { return inputs_[0]; }
LOperand* left() { return inputs_[1]; }
LOperand* right() { return inputs_[2]; }
LOperand* context() const { return inputs_[0]; }
LOperand* left() const { return inputs_[1]; }
LOperand* right() const { return inputs_[2]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
class LInstanceOfKnownGlobal final : public LTemplateInstruction<1, 2, 1> {
class LHasInPrototypeChainAndBranch final : public LControlInstruction<2, 0> {
public:
LInstanceOfKnownGlobal(LOperand* context, LOperand* value, LOperand* temp) {
inputs_[0] = context;
inputs_[1] = value;
temps_[0] = temp;
LHasInPrototypeChainAndBranch(LOperand* object, LOperand* prototype) {
inputs_[0] = object;
inputs_[1] = prototype;
}
LOperand* context() { return inputs_[0]; }
LOperand* value() { return inputs_[1]; }
LOperand* temp() { return temps_[0]; }
LOperand* object() const { return inputs_[0]; }
LOperand* prototype() const { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
return lazy_deopt_env_;
}
virtual void SetDeferredLazyDeoptimizationEnvironment(
LEnvironment* env) override {
lazy_deopt_env_ = env;
}
private:
LEnvironment* lazy_deopt_env_;
DECLARE_CONCRETE_INSTRUCTION(HasInPrototypeChainAndBranch,
"has-in-prototype-chain-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInPrototypeChainAndBranch)
};

View File

@ -4270,34 +4270,8 @@ void MacroAssembler::GetMapConstructor(Register result, Register map,
}
void MacroAssembler::TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
bool miss_on_bound_function) {
Label non_instance;
if (miss_on_bound_function) {
// Check that the receiver isn't a smi.
JumpIfSmi(function, miss);
// Check that the function really is a function. Load map into result reg.
GetObjectType(function, result, scratch);
Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
lw(scratch,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
lw(scratch,
FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
And(scratch, scratch,
Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
Branch(miss, ne, scratch, Operand(zero_reg));
// Make sure that the function has an instance prototype.
lbu(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
And(scratch, scratch, Operand(1 << Map::kHasNonInstancePrototype));
Branch(&non_instance, ne, scratch, Operand(zero_reg));
}
void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss) {
// Get the prototype or initial map from the function.
lw(result,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@ -4316,15 +4290,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// Get the prototype from the initial map.
lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
if (miss_on_bound_function) {
jmp(&done);
// Non-instance prototype: Fetch prototype from constructor field
// in initial map.
bind(&non_instance);
GetMapConstructor(result, result, scratch, scratch);
}
// All done.
bind(&done);
}
@ -5556,65 +5521,6 @@ void MacroAssembler::CallCFunctionHelper(Register function,
#undef BRANCH_ARGS_CHECK
void MacroAssembler::PatchRelocatedValue(Register li_location,
Register scratch,
Register new_value) {
lw(scratch, MemOperand(li_location));
// At this point scratch is a lui(at, ...) instruction.
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
Check(eq, kTheInstructionToPatchShouldBeALui,
scratch, Operand(LUI));
lw(scratch, MemOperand(li_location));
}
srl(t9, new_value, kImm16Bits);
Ins(scratch, t9, 0, kImm16Bits);
sw(scratch, MemOperand(li_location));
lw(scratch, MemOperand(li_location, kInstrSize));
// scratch is now ori(at, ...).
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
Check(eq, kTheInstructionToPatchShouldBeAnOri,
scratch, Operand(ORI));
lw(scratch, MemOperand(li_location, kInstrSize));
}
Ins(scratch, new_value, 0, kImm16Bits);
sw(scratch, MemOperand(li_location, kInstrSize));
// Update the I-cache so the new lui and ori can be executed.
FlushICache(li_location, 2);
}
void MacroAssembler::GetRelocatedValue(Register li_location,
Register value,
Register scratch) {
lw(value, MemOperand(li_location));
if (emit_debug_code()) {
And(value, value, kOpcodeMask);
Check(eq, kTheInstructionShouldBeALui,
value, Operand(LUI));
lw(value, MemOperand(li_location));
}
// value now holds a lui instruction. Extract the immediate.
sll(value, value, kImm16Bits);
lw(scratch, MemOperand(li_location, kInstrSize));
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
Check(eq, kTheInstructionShouldBeAnOri,
scratch, Operand(ORI));
lw(scratch, MemOperand(li_location, kInstrSize));
}
// "scratch" now holds an ori instruction. Extract the immediate.
andi(scratch, scratch, kImm16Mask);
// Merge the results.
or_(value, value, scratch);
}
void MacroAssembler::CheckPageFlag(
Register object,
Register scratch,

View File

@ -1041,11 +1041,8 @@ class MacroAssembler: public Assembler {
// function and jumps to the miss label if the fast checks fail. The
// function register will be untouched; the other registers may be
// clobbered.
void TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
bool miss_on_bound_function = false);
void TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss);
void GetObjectType(Register function,
Register map,
@ -1598,15 +1595,6 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg);
void LeaveFrame(StackFrame::Type type);
// Patch the relocated value (lui/ori pair).
void PatchRelocatedValue(Register li_location,
Register scratch,
Register new_value);
// Get the relocatad value (loaded data) from the lui/ori pair.
void GetRelocatedValue(Register li_location,
Register value,
Register scratch);
// Expects object in a0 and returns map with validated enum cache
// in a0. Assumes that any other register can be used as a scratch.
void CheckEnumCache(Register null_value, Label* call_runtime);

View File

@ -1413,202 +1413,105 @@ void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
}
// Uses registers a0 to a4.
// Expected input (depending on whether args are in registers or on the stack):
// * object: a0 or at sp + 1 * kPointerSize.
// * function: a1 or at sp.
//
// An inlined call site may have been generated before calling this stub.
// In this case the offset to the inline site to patch is passed on the stack,
// in the safepoint slot for register a4.
void InstanceofStub::Generate(MacroAssembler* masm) {
// Call site inlining and patching implies arguments in registers.
DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
void InstanceOfStub::Generate(MacroAssembler* masm) {
Register const object = a1; // Object (lhs).
Register const function = a0; // Function (rhs).
Register const object_map = a2; // Map of {object}.
Register const function_map = a3; // Map of {function}.
Register const function_prototype = a4; // Prototype of {function}.
Register const scratch = a5;
// Fixed register usage throughout the stub:
const Register object = a0; // Object (lhs).
Register map = a3; // Map of the object.
const Register function = a1; // Function (rhs).
const Register prototype = a4; // Prototype of the function.
const Register inline_site = t1;
const Register scratch = a2;
DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
const int32_t kDeltaToLoadBoolResult = 7 * Assembler::kInstrSize;
// Check if {object} is a smi.
Label object_is_smi;
__ JumpIfSmi(object, &object_is_smi);
Label slow, loop, is_instance, is_not_instance, not_js_object;
// Lookup the {function} and the {object} map in the global instanceof cache.
// Note: This is safe because we clear the global instanceof cache whenever
// we change the prototype of any object.
Label fast_case, slow_case;
__ ld(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
__ Branch(&fast_case, ne, function, Operand(at));
__ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
__ Branch(&fast_case, ne, object_map, Operand(at));
__ Ret(USE_DELAY_SLOT);
__ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
if (!HasArgsInRegisters()) {
__ ld(object, MemOperand(sp, 1 * kPointerSize));
__ ld(function, MemOperand(sp, 0));
}
// If {object} is a smi we can safely return false if {function} is a JS
// function, otherwise we have to miss to the runtime and throw an exception.
__ bind(&object_is_smi);
__ JumpIfSmi(function, &slow_case);
__ GetObjectType(function, function_map, scratch);
__ Branch(&slow_case, ne, scratch, Operand(JS_FUNCTION_TYPE));
__ Ret(USE_DELAY_SLOT);
__ LoadRoot(v0, Heap::kFalseValueRootIndex); // In delay slot.
// Check that the left hand is a JS object and load map.
__ JumpIfSmi(object, &not_js_object);
__ IsObjectJSObjectType(object, map, scratch, &not_js_object);
// Fast-case: The {function} must be a valid JSFunction.
__ bind(&fast_case);
__ JumpIfSmi(function, &slow_case);
__ GetObjectType(function, function_map, scratch);
__ Branch(&slow_case, ne, scratch, Operand(JS_FUNCTION_TYPE));
// If there is a call site cache don't look in the global cache, but do the
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
Label miss;
__ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
__ Branch(&miss, ne, function, Operand(at));
__ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
__ Branch(&miss, ne, map, Operand(at));
__ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
// Ensure that {function} has an instance prototype.
__ lbu(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
__ And(at, scratch, Operand(1 << Map::kHasNonInstancePrototype));
__ Branch(&slow_case, ne, at, Operand(zero_reg));
__ bind(&miss);
}
// Ensure that {function} is not bound.
Register const shared_info = scratch;
__ ld(shared_info,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ lbu(scratch,
FieldMemOperand(shared_info, SharedFunctionInfo::kBoundByteOffset));
__ And(at, scratch, Operand(1 << SharedFunctionInfo::kBoundBitWithinByte));
__ Branch(&slow_case, ne, at, Operand(zero_reg));
// Get the prototype of the function.
__ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
// Get the "prototype" (or initial map) of the {function}.
__ ld(function_prototype,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
__ AssertNotSmi(function_prototype);
// Check that the function prototype is a JS object.
__ JumpIfSmi(prototype, &slow);
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
// Resolve the prototype if the {function} has an initial map. Afterwards the
// {function_prototype} will be either the JSReceiver prototype object or the
// hole value, which means that no instances of the {function} were created so
// far and hence we should return false.
Label function_prototype_valid;
__ GetObjectType(function_prototype, scratch, scratch);
__ Branch(&function_prototype_valid, ne, scratch, Operand(MAP_TYPE));
__ ld(function_prototype,
FieldMemOperand(function_prototype, Map::kPrototypeOffset));
__ bind(&function_prototype_valid);
__ AssertNotSmi(function_prototype);
// Update the global instanceof or call site inlined cache with the current
// map and function. The cached answer will be set when it is known below.
if (!HasCallSiteInlineCheck()) {
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
} else {
DCHECK(HasArgsInRegisters());
// Patch the (relocated) inlined map check.
// Update the global instanceof cache with the current {object} map and
// {function}. The cached answer will be set when it is known below.
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
// The offset was stored in a4 safepoint slot.
// (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
__ LoadFromSafepointRegisterSlot(scratch, a4);
__ Dsubu(inline_site, ra, scratch);
// Get the map location in scratch and patch it.
__ GetRelocatedValue(inline_site, scratch, v1); // v1 used as scratch.
__ sd(map, FieldMemOperand(scratch, Cell::kValueOffset));
__ mov(t0, map);
// |scratch| points at the beginning of the cell. Calculate the
// field containing the map.
__ Daddu(function, scratch, Operand(Cell::kValueOffset - 1));
__ RecordWriteField(scratch, Cell::kValueOffset, t0, function,
kRAHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
}
// Register mapping: a3 is object map and a4 is function prototype.
// Get prototype of object into a2.
__ ld(scratch, FieldMemOperand(map, Map::kPrototypeOffset));
// We don't need map any more. Use it as a scratch register.
Register scratch2 = map;
map = no_reg;
// Loop through the prototype chain looking for the function prototype.
__ LoadRoot(scratch2, Heap::kNullValueRootIndex);
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
Register const object_prototype = object_map;
Register const null = scratch;
Label done, loop;
__ LoadRoot(v0, Heap::kTrueValueRootIndex);
__ LoadRoot(null, Heap::kNullValueRootIndex);
__ bind(&loop);
__ Branch(&is_instance, eq, scratch, Operand(prototype));
__ Branch(&is_not_instance, eq, scratch, Operand(scratch2));
__ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
__ ld(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset));
__ Branch(&loop);
__ ld(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
__ Branch(&done, eq, object_prototype, Operand(function_prototype));
__ Branch(USE_DELAY_SLOT, &loop, ne, object_prototype, Operand(null));
__ ld(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset));
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
__ bind(&done);
__ Ret(USE_DELAY_SLOT);
__ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); // In delay slot.
__ bind(&is_instance);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
if (!HasCallSiteInlineCheck()) {
__ mov(v0, zero_reg);
__ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kTrueValueRootIndex);
}
} else {
// Patch the call site to return true.
__ LoadRoot(v0, Heap::kTrueValueRootIndex);
__ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
// Get the boolean result location in scratch and patch it.
__ PatchRelocatedValue(inline_site, scratch, v0);
if (!ReturnTrueFalseObject()) {
__ mov(v0, zero_reg);
}
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
__ bind(&is_not_instance);
if (!HasCallSiteInlineCheck()) {
__ li(v0, Operand(Smi::FromInt(1)));
__ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
}
} else {
// Patch the call site to return false.
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
__ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
// Get the boolean result location in scratch and patch it.
__ PatchRelocatedValue(inline_site, scratch, v0);
if (!ReturnTrueFalseObject()) {
__ li(v0, Operand(Smi::FromInt(1)));
}
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
Label object_not_null, object_not_null_or_smi;
__ bind(&not_js_object);
// Before null, smi and string value checks, check that the rhs is a function
// as for a non-function rhs an exception needs to be thrown.
__ JumpIfSmi(function, &slow);
__ GetObjectType(function, scratch2, scratch);
__ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE));
// Null is not instance of anything.
__ Branch(&object_not_null, ne, object,
Operand(isolate()->factory()->null_value()));
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
} else {
__ li(v0, Operand(Smi::FromInt(1)));
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
__ bind(&object_not_null);
// Smi values are not instances of anything.
__ JumpIfNotSmi(object, &object_not_null_or_smi);
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
} else {
__ li(v0, Operand(Smi::FromInt(1)));
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
__ bind(&object_not_null_or_smi);
// String values are not instances of anything.
__ IsObjectJSStringType(object, scratch, &slow);
if (ReturnTrueFalseObject()) {
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
} else {
__ li(v0, Operand(Smi::FromInt(1)));
}
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
// Slow-case. Tail call builtin.
__ bind(&slow);
if (!ReturnTrueFalseObject()) {
if (HasArgsInRegisters()) {
__ Push(a0, a1);
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(a0, a1);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
}
__ mov(a0, v0);
__ LoadRoot(v0, Heap::kTrueValueRootIndex);
__ DropAndRet(HasArgsInRegisters() ? 0 : 2, eq, a0, Operand(zero_reg));
__ LoadRoot(v0, Heap::kFalseValueRootIndex);
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
}
// Slow-case: Call the runtime function.
__ bind(&slow_case);
__ Push(object, function);
__ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
}

View File

@ -46,8 +46,8 @@ const Register StoreGlobalViaContextDescriptor::SlotRegister() { return a2; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return a0; }
const Register InstanceofDescriptor::left() { return a0; }
const Register InstanceofDescriptor::right() { return a1; }
const Register InstanceOfDescriptor::LeftRegister() { return a1; }
const Register InstanceOfDescriptor::RightRegister() { return a0; }
const Register ArgumentsAccessReadDescriptor::index() { return a1; }

View File

@ -2158,11 +2158,17 @@ void LCodeGen::EmitBranchF(InstrType instr,
}
template<class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr,
Condition condition,
Register src1,
const Operand& src2) {
template <class InstrType>
void LCodeGen::EmitTrueBranch(InstrType instr, Condition condition,
Register src1, const Operand& src2) {
int true_block = instr->TrueDestination(chunk_);
__ Branch(chunk_->GetAssemblyLabel(true_block), condition, src1, src2);
}
template <class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr, Condition condition,
Register src1, const Operand& src2) {
int false_block = instr->FalseDestination(chunk_);
__ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2);
}
@ -2756,141 +2762,41 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
DCHECK(ToRegister(instr->context()).is(cp));
Label true_label, done;
DCHECK(ToRegister(instr->left()).is(a0)); // Object is in a0.
DCHECK(ToRegister(instr->right()).is(a1)); // Function is in a1.
Register result = ToRegister(instr->result());
DCHECK(result.is(v0));
DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
DCHECK(ToRegister(instr->result()).is(v0));
InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ Branch(&true_label, eq, result, Operand(zero_reg));
__ li(result, Operand(factory()->false_value()));
__ Branch(&done);
__ bind(&true_label);
__ li(result, Operand(factory()->true_value()));
__ bind(&done);
}
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
public:
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
void Generate() override {
codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
}
LInstruction* instr() override { return instr_; }
Label* map_check() { return &map_check_; }
void LCodeGen::DoHasInPrototypeChainAndBranch(
LHasInPrototypeChainAndBranch* instr) {
Register const object = ToRegister(instr->object());
Register const object_map = scratch0();
Register const object_prototype = object_map;
Register const prototype = ToRegister(instr->prototype());
private:
LInstanceOfKnownGlobal* instr_;
Label map_check_;
};
DeferredInstanceOfKnownGlobal* deferred;
deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->value());
Register temp = ToRegister(instr->temp());
Register result = ToRegister(instr->result());
DCHECK(object.is(a0));
DCHECK(result.is(v0));
// A Smi is not instance of anything.
__ JumpIfSmi(object, &false_result);
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
Label cache_miss;
Register map = temp;
__ ld(map, FieldMemOperand(object, HeapObject::kMapOffset));
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
__ bind(deferred->map_check()); // Label for calculating code patching.
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch with
// the cached map.
Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
__ li(at, Operand(cell));
__ ld(at, FieldMemOperand(at, Cell::kValueOffset));
__ BranchShort(&cache_miss, ne, map, Operand(at));
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch
// with true or false. The distance from map check has to be constant.
__ li(result, Operand(factory()->the_hole_value()));
__ Branch(&done);
// The inlined call site cache did not match. Check null and string before
// calling the deferred code.
__ bind(&cache_miss);
// Null is not instance of anything.
__ LoadRoot(temp, Heap::kNullValueRootIndex);
__ Branch(&false_result, eq, object, Operand(temp));
// String values is not instance of anything.
Condition cc = __ IsObjectStringType(object, temp, temp);
__ Branch(&false_result, cc, temp, Operand(zero_reg));
// Go to the deferred code.
__ Branch(deferred->entry());
__ bind(&false_result);
__ LoadRoot(result, Heap::kFalseValueRootIndex);
// Here result has either true or false. Deferred code also produces true or
// false object.
__ bind(deferred->exit());
__ bind(&done);
}
void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check) {
Register result = ToRegister(instr->result());
DCHECK(result.is(v0));
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kArgsInRegisters);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
InstanceofStub stub(isolate(), flags);
PushSafepointRegistersScope scope(this);
LoadContextFromDeferred(instr->context());
// Get the temp register reserved by the instruction. This needs to be a4 as
// its slot of the pushing of safepoint registers is used to communicate the
// offset to the location of the map check.
Register temp = ToRegister(instr->temp());
DCHECK(temp.is(a4));
__ li(InstanceofStub::right(), instr->function());
static const int kAdditionalDelta = 13;
int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta;
__ bind(&before_push_delta);
{
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
__ li(temp, Operand(delta * kIntSize), CONSTANT_SIZE);
__ StoreToSafepointRegisterSlot(temp, temp);
// The {object} must be a spec object. It's sufficient to know that {object}
// is not a smi, since all other non-spec objects have {null} prototypes and
// will be ruled out below.
if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
__ SmiTst(object, at);
EmitFalseBranch(instr, eq, at, Operand(zero_reg));
}
CallCodeGeneric(stub.GetCode(),
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the result register slot and
// restore all registers.
__ StoreToSafepointRegisterSlot(result, result);
// Loop through the {object}s prototype chain looking for the {prototype}.
__ ld(object_map, FieldMemOperand(object, HeapObject::kMapOffset));
Label loop;
__ bind(&loop);
__ ld(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset));
EmitTrueBranch(instr, eq, object_prototype, Operand(prototype));
__ LoadRoot(at, Heap::kNullValueRootIndex);
EmitFalseBranch(instr, eq, object_prototype, Operand(at));
__ Branch(&loop, USE_DELAY_SLOT);
__ ld(object_map, FieldMemOperand(object_prototype,
HeapObject::kMapOffset)); // In delay slot.
}

View File

@ -114,8 +114,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
@ -280,10 +278,11 @@ class LCodeGen: public LCodeGenBase {
Condition condition,
FPURegister src1,
FPURegister src2);
template<class InstrType>
void EmitFalseBranch(InstrType instr,
Condition condition,
Register src1,
template <class InstrType>
void EmitTrueBranch(InstrType instr, Condition condition, Register src1,
const Operand& src2);
template <class InstrType>
void EmitFalseBranch(InstrType instr, Condition condition, Register src1,
const Operand& src2);
template<class InstrType>
void EmitFalseBranchF(InstrType instr,

View File

@ -933,22 +933,14 @@ void LChunkBuilder::AddInstruction(LInstruction* instr,
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
LInstruction* instruction_needing_environment = NULL;
if (hydrogen_val->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(hydrogen_val->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
@ -999,22 +991,21 @@ LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LOperand* left =
UseFixed(instr->left(), InstanceOfDescriptor::LeftRegister());
LOperand* right =
UseFixed(instr->right(), InstanceOfDescriptor::RightRegister());
LOperand* context = UseFixed(instr->context(), cp);
LInstanceOf* result =
new(zone()) LInstanceOf(context, UseFixed(instr->left(), a0),
UseFixed(instr->right(), a1));
LInstanceOf* result = new (zone()) LInstanceOf(context, left, right);
return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
new(zone()) LInstanceOfKnownGlobal(
UseFixed(instr->context(), cp),
UseFixed(instr->left(), a0),
FixedTemp(a4));
return MarkAsCall(DefineFixed(result, v0), instr);
LInstruction* LChunkBuilder::DoHasInPrototypeChainAndBranch(
HHasInPrototypeChainAndBranch* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* prototype = UseRegister(instr->prototype());
return new (zone()) LHasInPrototypeChainAndBranch(object, prototype);
}

View File

@ -85,10 +85,10 @@ class LCodeGen;
V(GetCachedArrayIndex) \
V(Goto) \
V(HasCachedArrayIndexAndBranch) \
V(HasInPrototypeChainAndBranch) \
V(HasInstanceTypeAndBranch) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(InvokeFunction) \
@ -235,8 +235,6 @@ class LInstruction : public ZoneObject {
void set_hydrogen_value(HValue* value) { hydrogen_value_ = value; }
HValue* hydrogen_value() const { return hydrogen_value_; }
virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
void MarkAsCall() { bit_field_ = IsCallBits::update(bit_field_, true); }
bool IsCall() const { return IsCallBits::decode(bit_field_); }
@ -1188,41 +1186,27 @@ class LInstanceOf final : public LTemplateInstruction<1, 3, 0> {
inputs_[2] = right;
}
LOperand* context() { return inputs_[0]; }
LOperand* left() { return inputs_[1]; }
LOperand* right() { return inputs_[2]; }
LOperand* context() const { return inputs_[0]; }
LOperand* left() const { return inputs_[1]; }
LOperand* right() const { return inputs_[2]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOf, "instance-of")
};
class LInstanceOfKnownGlobal final : public LTemplateInstruction<1, 2, 1> {
class LHasInPrototypeChainAndBranch final : public LControlInstruction<2, 0> {
public:
LInstanceOfKnownGlobal(LOperand* context, LOperand* value, LOperand* temp) {
inputs_[0] = context;
inputs_[1] = value;
temps_[0] = temp;
LHasInPrototypeChainAndBranch(LOperand* object, LOperand* prototype) {
inputs_[0] = object;
inputs_[1] = prototype;
}
LOperand* context() { return inputs_[0]; }
LOperand* value() { return inputs_[1]; }
LOperand* temp() { return temps_[0]; }
LOperand* object() const { return inputs_[0]; }
LOperand* prototype() const { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
return lazy_deopt_env_;
}
virtual void SetDeferredLazyDeoptimizationEnvironment(
LEnvironment* env) override {
lazy_deopt_env_ = env;
}
private:
LEnvironment* lazy_deopt_env_;
DECLARE_CONCRETE_INSTRUCTION(HasInPrototypeChainAndBranch,
"has-in-prototype-chain-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInPrototypeChainAndBranch)
};

View File

@ -4273,34 +4273,8 @@ void MacroAssembler::GetMapConstructor(Register result, Register map,
}
void MacroAssembler::TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
bool miss_on_bound_function) {
Label non_instance;
if (miss_on_bound_function) {
// Check that the receiver isn't a smi.
JumpIfSmi(function, miss);
// Check that the function really is a function. Load map into result reg.
GetObjectType(function, result, scratch);
Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
ld(scratch,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
lwu(scratch,
FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
And(scratch, scratch,
Operand(1 << SharedFunctionInfo::kBoundFunction));
Branch(miss, ne, scratch, Operand(zero_reg));
// Make sure that the function has an instance prototype.
lbu(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
And(scratch, scratch, Operand(1 << Map::kHasNonInstancePrototype));
Branch(&non_instance, ne, scratch, Operand(zero_reg));
}
void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss) {
// Get the prototype or initial map from the function.
ld(result,
FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@ -4319,15 +4293,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// Get the prototype from the initial map.
ld(result, FieldMemOperand(result, Map::kPrototypeOffset));
if (miss_on_bound_function) {
jmp(&done);
// Non-instance prototype: Fetch prototype from constructor field
// in initial map.
bind(&non_instance);
GetMapConstructor(result, result, scratch, scratch);
}
// All done.
bind(&done);
}
@ -5750,94 +5715,6 @@ void MacroAssembler::CallCFunctionHelper(Register function,
#undef BRANCH_ARGS_CHECK
void MacroAssembler::PatchRelocatedValue(Register li_location,
Register scratch,
Register new_value) {
lwu(scratch, MemOperand(li_location));
// At this point scratch is a lui(at, ...) instruction.
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
Check(eq, kTheInstructionToPatchShouldBeALui,
scratch, Operand(LUI));
lwu(scratch, MemOperand(li_location));
}
dsrl32(t9, new_value, 0);
Ins(scratch, t9, 0, kImm16Bits);
sw(scratch, MemOperand(li_location));
lwu(scratch, MemOperand(li_location, kInstrSize));
// scratch is now ori(at, ...).
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
Check(eq, kTheInstructionToPatchShouldBeAnOri,
scratch, Operand(ORI));
lwu(scratch, MemOperand(li_location, kInstrSize));
}
dsrl(t9, new_value, kImm16Bits);
Ins(scratch, t9, 0, kImm16Bits);
sw(scratch, MemOperand(li_location, kInstrSize));
lwu(scratch, MemOperand(li_location, kInstrSize * 3));
// scratch is now ori(at, ...).
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
Check(eq, kTheInstructionToPatchShouldBeAnOri,
scratch, Operand(ORI));
lwu(scratch, MemOperand(li_location, kInstrSize * 3));
}
Ins(scratch, new_value, 0, kImm16Bits);
sw(scratch, MemOperand(li_location, kInstrSize * 3));
// Update the I-cache so the new lui and ori can be executed.
FlushICache(li_location, 4);
}
void MacroAssembler::GetRelocatedValue(Register li_location,
Register value,
Register scratch) {
lwu(value, MemOperand(li_location));
if (emit_debug_code()) {
And(value, value, kOpcodeMask);
Check(eq, kTheInstructionShouldBeALui,
value, Operand(LUI));
lwu(value, MemOperand(li_location));
}
// value now holds a lui instruction. Extract the immediate.
andi(value, value, kImm16Mask);
dsll32(value, value, kImm16Bits);
lwu(scratch, MemOperand(li_location, kInstrSize));
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
Check(eq, kTheInstructionShouldBeAnOri,
scratch, Operand(ORI));
lwu(scratch, MemOperand(li_location, kInstrSize));
}
// "scratch" now holds an ori instruction. Extract the immediate.
andi(scratch, scratch, kImm16Mask);
dsll32(scratch, scratch, 0);
or_(value, value, scratch);
lwu(scratch, MemOperand(li_location, kInstrSize * 3));
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
Check(eq, kTheInstructionShouldBeAnOri,
scratch, Operand(ORI));
lwu(scratch, MemOperand(li_location, kInstrSize * 3));
}
// "scratch" now holds an ori instruction. Extract the immediate.
andi(scratch, scratch, kImm16Mask);
dsll(scratch, scratch, kImm16Bits);
or_(value, value, scratch);
// Sign extend extracted address.
dsra(value, value, kImm16Bits);
}
void MacroAssembler::CheckPageFlag(
Register object,
Register scratch,

View File

@ -1071,11 +1071,8 @@ class MacroAssembler: public Assembler {
// function and jumps to the miss label if the fast checks fail. The
// function register will be untouched; the other registers may be
// clobbered.
void TryGetFunctionPrototype(Register function,
Register result,
Register scratch,
Label* miss,
bool miss_on_bound_function = false);
void TryGetFunctionPrototype(Register function, Register result,
Register scratch, Label* miss);
void GetObjectType(Register function,
Register map,
@ -1681,15 +1678,6 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg);
void LeaveFrame(StackFrame::Type type);
// Patch the relocated value (lui/ori pair).
void PatchRelocatedValue(Register li_location,
Register scratch,
Register new_value);
// Get the relocatad value (loaded data) from the lui/ori pair.
void GetRelocatedValue(Register li_location,
Register value,
Register scratch);
// Expects object in a0 and returns map with validated enum cache
// in a0. Assumes that any other register can be used as a scratch.
void CheckEnumCache(Register null_value, Label* call_runtime);

View File

@ -6700,6 +6700,9 @@ class SharedFunctionInfo: public HeapObject {
static const int kNativeBitWithinByte =
(kNative + kCompilerHintsSmiTagSize) % kBitsPerByte;
static const int kBoundBitWithinByte =
(kBoundFunction + kCompilerHintsSmiTagSize) % kBitsPerByte;
#if defined(V8_TARGET_LITTLE_ENDIAN)
static const int kStrictModeByteOffset = kCompilerHintsOffset +
(kStrictModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte;
@ -6708,6 +6711,9 @@ class SharedFunctionInfo: public HeapObject {
(kStrongModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte;
static const int kNativeByteOffset = kCompilerHintsOffset +
(kNative + kCompilerHintsSmiTagSize) / kBitsPerByte;
static const int kBoundByteOffset =
kCompilerHintsOffset +
(kBoundFunction + kCompilerHintsSmiTagSize) / kBitsPerByte;
#elif defined(V8_TARGET_BIG_ENDIAN)
static const int kStrictModeByteOffset = kCompilerHintsOffset +
(kCompilerHintsSize - 1) -
@ -6718,6 +6724,9 @@ class SharedFunctionInfo: public HeapObject {
static const int kNativeByteOffset = kCompilerHintsOffset +
(kCompilerHintsSize - 1) -
((kNative + kCompilerHintsSmiTagSize) / kBitsPerByte);
static const int kBoundByteOffset =
kCompilerHintsOffset + (kCompilerHintsSize - 1) -
((kBoundFunction + kCompilerHintsSmiTagSize) / kBitsPerByte);
#else
#error Unknown byte ordering
#endif

View File

@ -435,38 +435,6 @@ function IN(x) {
}
// ECMA-262, section 11.8.6, page 54. To make the implementation more
// efficient, the return value should be zero if the 'this' is an
// instance of F, and non-zero if not. This makes it possible to avoid
// an expensive ToBoolean conversion in the generated code.
function INSTANCE_OF(F) {
var V = this;
if (!IS_SPEC_FUNCTION(F)) {
throw %MakeTypeError(kInstanceofFunctionExpected, F);
}
// If V is not an object, return false.
if (!IS_SPEC_OBJECT(V)) {
return 1;
}
// Check if function is bound, if so, get [[BoundFunction]] from it
// and use that instead of F.
var bindings = %BoundFunctionGetBindings(F);
if (bindings) {
F = bindings[kBoundFunctionIndex]; // Always a non-bound function.
}
// Get the prototype of F; if it is not an object, throw an error.
var O = F.prototype;
if (!IS_SPEC_OBJECT(O)) {
throw %MakeTypeError(kInstanceofNonobjectProto, O);
}
// Return whether or not O is in the prototype chain of V.
return %IsInPrototypeChain(O, V) ? 0 : 1;
}
function CALL_NON_FUNCTION() {
var delegate = %GetFunctionDelegate(this);
if (!IS_FUNCTION(delegate)) {
@ -899,7 +867,6 @@ $toString = ToString;
SHR,
SHR_STRONG,
IN,
INSTANCE_OF,
CALL_NON_FUNCTION,
CALL_NON_FUNCTION_AS_CONSTRUCTOR,
CALL_FUNCTION_PROXY,

View File

@ -262,16 +262,6 @@ RUNTIME_FUNCTION(Runtime_SetPrototype) {
}
RUNTIME_FUNCTION(Runtime_IsInPrototypeChain) {
SealHandleScope shs(isolate);
DCHECK(args.length() == 2);
// See ECMA-262, section 15.3.5.3, page 88 (steps 5 - 8).
CONVERT_ARG_CHECKED(Object, O, 0);
CONVERT_ARG_CHECKED(Object, V, 1);
return isolate->heap()->ToBoolean(V->HasInPrototypeChain(isolate, O));
}
// Enumerator used as indices into the array returned from GetOwnProperty
enum PropertyDescriptorIndices {
IS_ACCESSOR_INDEX,
@ -1468,5 +1458,59 @@ RUNTIME_FUNCTION(Runtime_StrictEquals) {
return Smi::FromInt(x->StrictEquals(y) ? EQUAL : NOT_EQUAL);
}
RUNTIME_FUNCTION(Runtime_InstanceOf) {
// ECMA-262, section 11.8.6, page 54.
HandleScope shs(isolate);
DCHECK_EQ(2, args.length());
DCHECK(args.length() == 2);
CONVERT_ARG_HANDLE_CHECKED(Object, object, 0);
CONVERT_ARG_HANDLE_CHECKED(Object, callable, 1);
// {callable} must have a [[Call]] internal method.
if (!callable->IsCallable()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate,
NewTypeError(MessageTemplate::kInstanceofFunctionExpected, callable));
}
// If {object} is not a receiver, return false.
if (!object->IsJSReceiver()) {
return isolate->heap()->false_value();
}
// Check if {callable} is bound, if so, get [[BoundFunction]] from it and use
// that instead of {callable}.
if (callable->IsJSFunction()) {
Handle<JSFunction> function = Handle<JSFunction>::cast(callable);
if (function->shared()->bound()) {
Handle<FixedArray> bindings(function->function_bindings(), isolate);
callable =
handle(bindings->get(JSFunction::kBoundFunctionIndex), isolate);
}
}
DCHECK(callable->IsCallable());
// Get the "prototype" of {callable}; raise an error if it's not a receiver.
Handle<Object> prototype;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, prototype,
Object::GetProperty(callable, isolate->factory()->prototype_string()));
if (!prototype->IsJSReceiver()) {
THROW_NEW_ERROR_RETURN_FAILURE(
isolate,
NewTypeError(MessageTemplate::kInstanceofNonobjectProto, prototype));
}
// Return whether or not {prototype} is in the prototype chain of {object}.
return isolate->heap()->ToBoolean(
object->HasInPrototypeChain(isolate, *prototype));
}
RUNTIME_FUNCTION(Runtime_HasInPrototypeChain) {
SealHandleScope scope(isolate);
DCHECK_EQ(2, args.length());
CONVERT_ARG_CHECKED(Object, object, 0);
CONVERT_ARG_CHECKED(Object, prototype, 1);
return isolate->heap()->ToBoolean(
object->HasInPrototypeChain(isolate, prototype));
}
} // namespace internal
} // namespace v8

View File

@ -436,7 +436,6 @@ namespace internal {
F(GetPrototype, 1, 1) \
F(InternalSetPrototype, 2, 1) \
F(SetPrototype, 2, 1) \
F(IsInPrototypeChain, 2, 1) \
F(GetOwnProperty, 2, 1) \
F(PreventExtensions, 1, 1) \
F(IsExtensible, 1, 1) \
@ -495,7 +494,9 @@ namespace internal {
F(DefineGetterPropertyUnchecked, 4, 1) \
F(DefineSetterPropertyUnchecked, 4, 1) \
F(ToObject, 1, 1) \
F(StrictEquals, 2, 1)
F(StrictEquals, 2, 1) \
F(InstanceOf, 2, 1) \
F(HasInPrototypeChain, 2, 1)
#define FOR_EACH_INTRINSIC_OBSERVE(F) \

View File

@ -193,8 +193,8 @@ function ObjectHasOwnProperty(value) {
// ECMA-262 - 15.2.4.6
function ObjectIsPrototypeOf(V) {
if (!IS_SPEC_OBJECT(V)) return false;
CHECK_OBJECT_COERCIBLE(this, "Object.prototype.isPrototypeOf");
return %IsInPrototypeChain(this, V);
var O = TO_OBJECT(this);
return %_HasInPrototypeChain(V, O);
}

View File

@ -2684,220 +2684,108 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
}
void InstanceofStub::Generate(MacroAssembler* masm) {
// Implements "value instanceof function" operator.
// Expected input state with no inline cache:
// rsp[0] : return address
// rsp[8] : function pointer
// rsp[16] : value
// Expected input state with an inline one-element cache:
// rsp[0] : return address
// rsp[8] : offset from return address to location of inline cache
// rsp[16] : function pointer
// rsp[24] : value
// Returns a bitwise zero to indicate that the value
// is and instance of the function and anything else to
// indicate that the value is not an instance.
void InstanceOfStub::Generate(MacroAssembler* masm) {
Register const object = rdx; // Object (lhs).
Register const function = rax; // Function (rhs).
Register const object_map = rcx; // Map of {object}.
Register const function_map = r8; // Map of {function}.
Register const function_prototype = rdi; // Prototype of {function}.
// Fixed register usage throughout the stub.
Register object = rax; // Object (lhs).
Register map = rbx; // Map of the object.
Register function = rdx; // Function (rhs).
Register prototype = rdi; // Prototype of the function.
Register scratch = rcx;
DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
static const int kOffsetToMapCheckValue = 2;
static const int kOffsetToResultValue = kPointerSize == kInt64Size ? 18 : 14;
// The last 4 bytes of the instruction sequence
// movp(rdi, FieldOperand(rax, HeapObject::kMapOffset))
// Move(kScratchRegister, Factory::the_hole_value())
// in front of the hole value address.
static const unsigned int kWordBeforeMapCheckValue =
kPointerSize == kInt64Size ? 0xBA49FF78 : 0xBA41FF78;
// The last 4 bytes of the instruction sequence
// __ j(not_equal, &cache_miss);
// __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
// before the offset of the hole value in the root array.
static const unsigned int kWordBeforeResultValue =
kPointerSize == kInt64Size ? 0x458B4906 : 0x458B4106;
// Check if {object} is a smi.
Label object_is_smi;
__ JumpIfSmi(object, &object_is_smi, Label::kNear);
int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
// Lookup the {function} and the {object} map in the global instanceof cache.
// Note: This is safe because we clear the global instanceof cache whenever
// we change the prototype of any object.
Label fast_case, slow_case;
__ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
__ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ j(not_equal, &fast_case, Label::kNear);
__ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
__ j(not_equal, &fast_case, Label::kNear);
__ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
__ ret(0);
DCHECK_EQ(object.code(), InstanceofStub::left().code());
DCHECK_EQ(function.code(), InstanceofStub::right().code());
// If {object} is a smi we can safely return false if {function} is a JS
// function, otherwise we have to miss to the runtime and throw an exception.
__ bind(&object_is_smi);
__ JumpIfSmi(function, &slow_case);
__ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
__ j(not_equal, &slow_case);
__ LoadRoot(rax, Heap::kFalseValueRootIndex);
__ ret(0);
// Get the object and function - they are always both needed.
// Go slow case if the object is a smi.
Label slow;
StackArgumentsAccessor args(rsp, 2 + extra_argument_offset,
ARGUMENTS_DONT_CONTAIN_RECEIVER);
if (!HasArgsInRegisters()) {
__ movp(object, args.GetArgumentOperand(0));
__ movp(function, args.GetArgumentOperand(1));
}
__ JumpIfSmi(object, &slow);
// Fast-case: The {function} must be a valid JSFunction.
__ bind(&fast_case);
__ JumpIfSmi(function, &slow_case);
__ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
__ j(not_equal, &slow_case);
// Check that the left hand is a JS object. Leave its map in rax.
__ CmpObjectType(object, FIRST_SPEC_OBJECT_TYPE, map);
__ j(below, &slow);
__ CmpInstanceType(map, LAST_SPEC_OBJECT_TYPE);
__ j(above, &slow);
// Ensure that {function} has an instance prototype.
__ testb(FieldOperand(function_map, Map::kBitFieldOffset),
Immediate(1 << Map::kHasNonInstancePrototype));
__ j(not_zero, &slow_case);
// If there is a call site cache don't look in the global cache, but do the
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
// Look up the function and the map in the instanceof cache.
Label miss;
__ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ j(not_equal, &miss, Label::kNear);
__ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
__ j(not_equal, &miss, Label::kNear);
__ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&miss);
}
// Ensure that {function} is not bound.
Register const shared_info = kScratchRegister;
__ movp(shared_info,
FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ TestBitSharedFunctionInfoSpecialField(
shared_info, SharedFunctionInfo::kCompilerHintsOffset,
SharedFunctionInfo::kBoundFunction);
__ j(not_zero, &slow_case);
// Get the prototype of the function.
__ TryGetFunctionPrototype(function, prototype, &slow, true);
// Get the "prototype" (or initial map) of the {function}.
__ movp(function_prototype,
FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
__ AssertNotSmi(function_prototype);
// Check that the function prototype is a JS object.
__ JumpIfSmi(prototype, &slow);
__ CmpObjectType(prototype, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
__ j(below, &slow);
__ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE);
__ j(above, &slow);
// Resolve the prototype if the {function} has an initial map. Afterwards the
// {function_prototype} will be either the JSReceiver prototype object or the
// hole value, which means that no instances of the {function} were created so
// far and hence we should return false.
Label function_prototype_valid;
Register const function_prototype_map = kScratchRegister;
__ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
__ j(not_equal, &function_prototype_valid, Label::kNear);
__ movp(function_prototype,
FieldOperand(function_prototype, Map::kPrototypeOffset));
__ bind(&function_prototype_valid);
__ AssertNotSmi(function_prototype);
// Update the global instanceof or call site inlined cache with the current
// map and function. The cached answer will be set when it is known below.
if (!HasCallSiteInlineCheck()) {
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
} else {
// The constants for the code patching are based on push instructions
// at the call site.
DCHECK(!HasArgsInRegisters());
// Get return address and delta to inlined map check.
__ movq(kScratchRegister, StackOperandForReturnAddress(0));
__ subp(kScratchRegister, args.GetArgumentOperand(2));
if (FLAG_debug_code) {
__ movl(scratch, Immediate(kWordBeforeMapCheckValue));
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), scratch);
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
}
__ movp(kScratchRegister,
Operand(kScratchRegister, kOffsetToMapCheckValue));
__ movp(Operand(kScratchRegister, 0), map);
// Update the global instanceof cache with the current {object} map and
// {function}. The cached answer will be set when it is known below.
__ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
__ movp(r8, map);
// Scratch points at the cell payload. Calculate the start of the object.
__ subp(kScratchRegister, Immediate(Cell::kValueOffset - 1));
__ RecordWriteField(kScratchRegister, Cell::kValueOffset, r8, function,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
}
// Loop through the prototype chain looking for the function prototype.
__ movp(scratch, FieldOperand(map, Map::kPrototypeOffset));
Label loop, is_instance, is_not_instance;
__ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
// Loop through the prototype chain looking for the {function} prototype.
// Assume true, and change to false if not found.
Register const object_prototype = object_map;
Label done, loop;
__ LoadRoot(rax, Heap::kTrueValueRootIndex);
__ bind(&loop);
__ cmpp(scratch, prototype);
__ j(equal, &is_instance, Label::kNear);
__ cmpp(scratch, kScratchRegister);
// The code at is_not_instance assumes that kScratchRegister contains a
// non-zero GCable value (the null object in this case).
__ j(equal, &is_not_instance, Label::kNear);
__ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
__ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
__ jmp(&loop);
__ movp(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
__ cmpp(object_prototype, function_prototype);
__ j(equal, &done, Label::kNear);
__ CompareRoot(object_prototype, Heap::kNullValueRootIndex);
__ movp(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
__ j(not_equal, &loop);
__ LoadRoot(rax, Heap::kFalseValueRootIndex);
__ bind(&done);
__ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
__ ret(0);
__ bind(&is_instance);
if (!HasCallSiteInlineCheck()) {
__ xorl(rax, rax);
// Store bitwise zero in the cache. This is a Smi in GC terms.
STATIC_ASSERT(kSmiTag == 0);
__ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ LoadRoot(rax, Heap::kTrueValueRootIndex);
}
} else {
// Store offset of true in the root array at the inline check site.
int true_offset = 0x100 +
(Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
// Assert it is a 1-byte signed value.
DCHECK(true_offset >= 0 && true_offset < 0x100);
__ movl(rax, Immediate(true_offset));
__ movq(kScratchRegister, StackOperandForReturnAddress(0));
__ subp(kScratchRegister, args.GetArgumentOperand(2));
__ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
}
if (!ReturnTrueFalseObject()) {
__ Set(rax, 0);
}
}
__ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
kPointerSize);
__ bind(&is_not_instance);
if (!HasCallSiteInlineCheck()) {
// We have to store a non-zero value in the cache.
__ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
if (ReturnTrueFalseObject()) {
__ LoadRoot(rax, Heap::kFalseValueRootIndex);
}
} else {
// Store offset of false in the root array at the inline check site.
int false_offset = 0x100 +
(Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
// Assert it is a 1-byte signed value.
DCHECK(false_offset >= 0 && false_offset < 0x100);
__ movl(rax, Immediate(false_offset));
__ movq(kScratchRegister, StackOperandForReturnAddress(0));
__ subp(kScratchRegister, args.GetArgumentOperand(2));
__ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
if (FLAG_debug_code) {
__ movl(rax, Immediate(kWordBeforeResultValue));
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
}
}
__ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
kPointerSize);
// Slow-case: Go through the JavaScript implementation.
__ bind(&slow);
if (!ReturnTrueFalseObject()) {
// Tail call the builtin which returns 0 or 1.
DCHECK(!HasArgsInRegisters());
if (HasCallSiteInlineCheck()) {
// Remove extra value from the stack.
__ PopReturnAddressTo(rcx);
__ Pop(rax);
__ PushReturnAddressFrom(rcx);
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
// Call the builtin and convert 0/1 to true/false.
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(object);
__ Push(function);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
}
Label true_value, done;
__ testq(rax, rax);
__ j(zero, &true_value, Label::kNear);
__ LoadRoot(rax, Heap::kFalseValueRootIndex);
__ jmp(&done, Label::kNear);
__ bind(&true_value);
__ LoadRoot(rax, Heap::kTrueValueRootIndex);
__ bind(&done);
__ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
kPointerSize);
}
// Slow-case: Call the runtime function.
__ bind(&slow_case);
__ PopReturnAddressTo(kScratchRegister);
__ Push(object);
__ Push(function);
__ PushReturnAddressFrom(kScratchRegister);
__ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
}

View File

@ -46,8 +46,8 @@ const Register StoreGlobalViaContextDescriptor::SlotRegister() { return rbx; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return rax; }
const Register InstanceofDescriptor::left() { return rax; }
const Register InstanceofDescriptor::right() { return rdx; }
const Register InstanceOfDescriptor::LeftRegister() { return rdx; }
const Register InstanceOfDescriptor::RightRegister() { return rax; }
const Register ArgumentsAccessReadDescriptor::index() { return rdx; }

View File

@ -2093,7 +2093,14 @@ void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
}
template<class InstrType>
template <class InstrType>
void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) {
int true_block = instr->TrueDestination(chunk_);
__ j(cc, chunk_->GetAssemblyLabel(true_block));
}
template <class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
int false_block = instr->FalseDestination(chunk_);
__ j(cc, chunk_->GetAssemblyLabel(false_block));
@ -2671,128 +2678,40 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
DCHECK(ToRegister(instr->context()).is(rsi));
InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ Push(ToRegister(instr->left()));
__ Push(ToRegister(instr->right()));
DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
DCHECK(ToRegister(instr->result()).is(rax));
InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
Label true_value, done;
__ testp(rax, rax);
__ j(zero, &true_value, Label::kNear);
__ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
__ jmp(&done, Label::kNear);
__ bind(&true_value);
__ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
__ bind(&done);
}
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
public:
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
void Generate() override {
codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
}
LInstruction* instr() override { return instr_; }
Label* map_check() { return &map_check_; }
private:
LInstanceOfKnownGlobal* instr_;
Label map_check_;
};
void LCodeGen::DoHasInPrototypeChainAndBranch(
LHasInPrototypeChainAndBranch* instr) {
Register const object = ToRegister(instr->object());
Register const object_map = kScratchRegister;
Register const object_prototype = object_map;
Register const prototype = ToRegister(instr->prototype());
DCHECK(ToRegister(instr->context()).is(rsi));
DeferredInstanceOfKnownGlobal* deferred;
deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->value());
// A Smi is not an instance of anything.
__ JumpIfSmi(object, &false_result, Label::kNear);
// This is the inlined call site instanceof cache. The two occurences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
Label cache_miss;
// Use a temp register to avoid memory operands with variable lengths.
Register map = ToRegister(instr->temp());
__ movp(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
__ Move(kScratchRegister, cache_cell, RelocInfo::CELL);
__ cmpp(map, Operand(kScratchRegister, 0));
__ j(not_equal, &cache_miss, Label::kNear);
// Patched to load either true or false.
__ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
#ifdef DEBUG
// Check that the code size between patch label and patch sites is invariant.
Label end_of_patched_code;
__ bind(&end_of_patched_code);
DCHECK(true);
#endif
__ jmp(&done, Label::kNear);
// The inlined call site cache did not match. Check for null and string
// before calling the deferred code.
__ bind(&cache_miss); // Null is not an instance of anything.
__ CompareRoot(object, Heap::kNullValueRootIndex);
__ j(equal, &false_result, Label::kNear);
// String values are not instances of anything.
__ JumpIfNotString(object, kScratchRegister, deferred->entry());
__ bind(&false_result);
__ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
__ bind(deferred->exit());
__ bind(&done);
}
void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check) {
{
PushSafepointRegistersScope scope(this);
InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
InstanceofStub stub(isolate(), flags);
__ Push(ToRegister(instr->value()));
__ Push(instr->function());
static const int kAdditionalDelta = kPointerSize == kInt64Size ? 10 : 16;
int delta =
masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
DCHECK(delta >= 0);
__ PushImm32(delta);
// We are pushing three values on the stack but recording a
// safepoint with two arguments because stub is going to
// remove the third argument from the stack before jumping
// to instanceof builtin on the slow path.
CallCodeGeneric(stub.GetCode(),
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS,
2);
DCHECK(delta == masm_->SizeOfCodeGeneratedSince(map_check));
LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Move result to a register that survives the end of the
// PushSafepointRegisterScope.
__ movp(kScratchRegister, rax);
// The {object} must be a spec object. It's sufficient to know that {object}
// is not a smi, since all other non-spec objects have {null} prototypes and
// will be ruled out below.
if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
Condition is_smi = __ CheckSmi(object);
EmitFalseBranch(instr, is_smi);
}
__ testp(kScratchRegister, kScratchRegister);
Label load_false;
Label done;
__ j(not_zero, &load_false, Label::kNear);
__ LoadRoot(rax, Heap::kTrueValueRootIndex);
__ jmp(&done, Label::kNear);
__ bind(&load_false);
__ LoadRoot(rax, Heap::kFalseValueRootIndex);
__ bind(&done);
// Loop through the {object}s prototype chain looking for the {prototype}.
__ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
Label loop;
__ bind(&loop);
__ movp(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
__ cmpp(object_prototype, prototype);
EmitTrueBranch(instr, equal);
__ CompareRoot(object_prototype, Heap::kNullValueRootIndex);
EmitFalseBranch(instr, equal);
__ movp(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
__ jmp(&loop);
}

View File

@ -99,8 +99,6 @@ class LCodeGen: public LCodeGenBase {
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
Register object,
@ -255,7 +253,9 @@ class LCodeGen: public LCodeGenBase {
// EmitBranch expects to be the last instruction of a block.
template<class InstrType>
void EmitBranch(InstrType instr, Condition cc);
template<class InstrType>
template <class InstrType>
void EmitTrueBranch(InstrType instr, Condition cc);
template <class InstrType>
void EmitFalseBranch(InstrType instr, Condition cc);
void EmitNumberUntagD(LNumberUntagD* instr, Register input,
XMMRegister result, NumberUntagDMode mode);

View File

@ -947,22 +947,14 @@ void LChunkBuilder::AddInstruction(LInstruction* instr,
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
LInstruction* instruction_needing_environment = NULL;
if (hydrogen_val->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(hydrogen_val->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
@ -1016,21 +1008,21 @@ LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LOperand* left = UseFixed(instr->left(), rax);
LOperand* right = UseFixed(instr->right(), rdx);
LOperand* left =
UseFixed(instr->left(), InstanceOfDescriptor::LeftRegister());
LOperand* right =
UseFixed(instr->right(), InstanceOfDescriptor::RightRegister());
LOperand* context = UseFixed(instr->context(), rsi);
LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
LInstanceOf* result = new (zone()) LInstanceOf(context, left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->context(), rsi),
UseFixed(instr->left(), rax),
FixedTemp(rdi));
return MarkAsCall(DefineFixed(result, rax), instr);
LInstruction* LChunkBuilder::DoHasInPrototypeChainAndBranch(
HHasInPrototypeChainAndBranch* instr) {
LOperand* object = UseRegister(instr->object());
LOperand* prototype = UseRegister(instr->prototype());
return new (zone()) LHasInPrototypeChainAndBranch(object, prototype);
}

View File

@ -83,10 +83,10 @@ class LCodeGen;
V(GetCachedArrayIndex) \
V(Goto) \
V(HasCachedArrayIndexAndBranch) \
V(HasInPrototypeChainAndBranch) \
V(HasInstanceTypeAndBranch) \
V(InnerAllocatedObject) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(InstructionGap) \
V(Integer32ToDouble) \
V(InvokeFunction) \
@ -242,8 +242,6 @@ class LInstruction : public ZoneObject {
return IsCall();
}
virtual void SetDeferredLazyDeoptimizationEnvironment(LEnvironment* env) { }
// Interface to the register allocator and iterators.
bool IsMarkedAsCall() const { return IsCall(); }
@ -1176,33 +1174,19 @@ class LInstanceOf final : public LTemplateInstruction<1, 3, 0> {
};
class LInstanceOfKnownGlobal final : public LTemplateInstruction<1, 2, 1> {
class LHasInPrototypeChainAndBranch final : public LControlInstruction<2, 0> {
public:
LInstanceOfKnownGlobal(LOperand* context, LOperand* value, LOperand* temp) {
inputs_[0] = context;
inputs_[1] = value;
temps_[0] = temp;
LHasInPrototypeChainAndBranch(LOperand* object, LOperand* prototype) {
inputs_[0] = object;
inputs_[1] = prototype;
}
LOperand* context() { return inputs_[0]; }
LOperand* value() { return inputs_[1]; }
LOperand* temp() { return temps_[0]; }
LOperand* object() const { return inputs_[0]; }
LOperand* prototype() const { return inputs_[1]; }
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
LEnvironment* GetDeferredLazyDeoptimizationEnvironment() {
return lazy_deopt_env_;
}
virtual void SetDeferredLazyDeoptimizationEnvironment(
LEnvironment* env) override {
lazy_deopt_env_ = env;
}
private:
LEnvironment* lazy_deopt_env_;
DECLARE_CONCRETE_INSTRUCTION(HasInPrototypeChainAndBranch,
"has-in-prototype-chain-and-branch")
DECLARE_HYDROGEN_ACCESSOR(HasInPrototypeChainAndBranch)
};

View File

@ -3456,35 +3456,8 @@ void MacroAssembler::GetMapConstructor(Register result, Register map,
}
void MacroAssembler::TryGetFunctionPrototype(Register function,
Register result,
Label* miss,
bool miss_on_bound_function) {
Label non_instance;
if (miss_on_bound_function) {
// Check that the receiver isn't a smi.
testl(function, Immediate(kSmiTagMask));
j(zero, miss);
// Check that the function really is a function.
CmpObjectType(function, JS_FUNCTION_TYPE, result);
j(not_equal, miss);
movp(kScratchRegister,
FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
// It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
// field).
TestBitSharedFunctionInfoSpecialField(kScratchRegister,
SharedFunctionInfo::kCompilerHintsOffset,
SharedFunctionInfo::kBoundFunction);
j(not_zero, miss);
// Make sure that the function has an instance prototype.
testb(FieldOperand(result, Map::kBitFieldOffset),
Immediate(1 << Map::kHasNonInstancePrototype));
j(not_zero, &non_instance, Label::kNear);
}
void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
Label* miss) {
// Get the prototype or initial map from the function.
movp(result,
FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
@ -3503,15 +3476,6 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// Get the prototype from the initial map.
movp(result, FieldOperand(result, Map::kPrototypeOffset));
if (miss_on_bound_function) {
jmp(&done, Label::kNear);
// Non-instance prototype: Fetch prototype from constructor field
// in initial map.
bind(&non_instance);
GetMapConstructor(result, result, kScratchRegister);
}
// All done.
bind(&done);
}

View File

@ -1240,10 +1240,7 @@ class MacroAssembler: public Assembler {
// function and jumps to the miss label if the fast checks fail. The
// function register will be untouched; the other register may be
// clobbered.
void TryGetFunctionPrototype(Register function,
Register result,
Label* miss,
bool miss_on_bound_function = false);
void TryGetFunctionPrototype(Register function, Register result, Label* miss);
// Picks out an array index from the hash field.
// Register use:

View File

@ -0,0 +1,12 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function Bar() { }
function Baz() { }
Baz.prototype = { __proto__: new Bar() }
var x = new Baz();
function foo(y) { return y instanceof Bar; }
assertTrue(foo(x));
Baz.prototype.__proto__ = null;
assertFalse(foo(x));

View File

@ -0,0 +1,9 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function Foo() {}
var x = new Foo();
Foo.prototype = 1;
function foo() { return x instanceof Foo; }
assertThrows(foo, TypeError);

View File

@ -0,0 +1,10 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function Foo() {}
var x = new Foo();
function foo() { return x instanceof Foo; }
assertTrue(foo());
Foo.prototype = 1;
assertThrows(foo, TypeError);

View File

@ -32,7 +32,7 @@ PASS Object.prototype.toLocaleString.call(undefined) threw exception TypeError:
PASS Object.prototype.valueOf.call(undefined) threw exception TypeError: Cannot convert undefined or null to object.
PASS Object.prototype.hasOwnProperty.call(undefined, 'hasOwnProperty') threw exception TypeError: Cannot convert undefined or null to object.
PASS Object.prototype.propertyIsEnumerable.call(undefined, 'propertyIsEnumerable') threw exception TypeError: Cannot convert undefined or null to object.
PASS Object.prototype.isPrototypeOf.call(undefined, this) threw exception TypeError: Object.prototype.isPrototypeOf called on null or undefined.
PASS Object.prototype.isPrototypeOf.call(undefined, this) threw exception TypeError: Cannot convert undefined or null to object.
PASS successfullyParsed is true
TEST COMPLETE