Optimize instanceof further

If the instance of is performed against what is beliwed to be a constant global function inline the instance of check and have the call to the instanceof stub in deferred code. The inlined check will be patched by the instanceof stub when called from deferred code. This is indicated by the lithium instruction LInstanceOfKnownGlobal.

To help the patching the delta from the return address to the patch site is placed just below the return address in the edi slot of the pushad/popad ares. This is safe because the edi register (which is pushed last) is a temporary for the lithium instruction.

As the instanceof stub can call other JavaScript an additional marking for saving all double registers have been added.

Also tweaked the instanceof stub to produce true/false objects instead of 0/1 for the case with deferred code.
Review URL: http://codereview.chromium.org/5990005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@6173 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
sgjesse@chromium.org 2011-01-05 11:17:37 +00:00
parent 44adf11d8a
commit 7311e10fdb
12 changed files with 397 additions and 58 deletions

View File

@ -327,22 +327,38 @@ class InstanceofStub: public CodeStub {
public:
enum Flags {
kNoFlags = 0,
kArgsInRegisters = 1 << 0
kArgsInRegisters = 1 << 0,
kCallSiteInlineCheck = 1 << 1,
kReturnTrueFalseObject = 1 << 2
};
explicit InstanceofStub(Flags flags) : flags_(flags) { }
explicit InstanceofStub(Flags flags) : flags_(flags), name_(NULL) { }
static Register left();
static Register right();
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return Instanceof; }
int MinorKey() { return args_in_registers() ? 1 : 0; }
int MinorKey() { return static_cast<int>(flags_); }
bool args_in_registers() {
bool HasArgsInRegisters() const {
return (flags_ & kArgsInRegisters) != 0;
}
bool HasCallSiteInlineCheck() const {
return (flags_ & kCallSiteInlineCheck) != 0;
}
bool ReturnTrueFalseObject() const {
return (flags_ & kReturnTrueFalseObject) != 0;
}
const char* GetName();
Flags flags_;
char* name_;
};

View File

@ -73,6 +73,7 @@ class LChunkBuilder;
// HCompare
// HCompareJSObjectEq
// HInstanceOf
// HInstanceOfKnownGlobal
// HLoadKeyed
// HLoadKeyedFastElement
// HLoadKeyedGeneric
@ -210,6 +211,7 @@ class LChunkBuilder;
V(GlobalReceiver) \
V(Goto) \
V(InstanceOf) \
V(InstanceOfKnownGlobal) \
V(IsNull) \
V(IsObject) \
V(IsSmi) \
@ -2262,6 +2264,28 @@ class HInstanceOf: public HBinaryOperation {
};
class HInstanceOfKnownGlobal: public HUnaryOperation {
public:
HInstanceOfKnownGlobal(HValue* left, Handle<JSFunction> right)
: HUnaryOperation(left), function_(right) {
set_representation(Representation::Tagged());
SetFlagMask(AllSideEffects());
}
Handle<JSFunction> function() { return function_; }
virtual Representation RequiredInputRepresentation(int index) const {
return Representation::Tagged();
}
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance_of_known_global")
private:
Handle<JSFunction> function_;
};
class HPower: public HBinaryOperation {
public:
HPower(HValue* left, HValue* right)

View File

@ -4862,7 +4862,40 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
TypeInfo info = oracle()->CompareType(expr, TypeFeedbackOracle::RESULT);
HInstruction* instr = NULL;
if (op == Token::INSTANCEOF) {
instr = new HInstanceOf(left, right);
// Check to see if the rhs of the instanceof is a global function not
// residing in new space. If it is we assume that the function will stay the
// same.
Handle<JSFunction> target = Handle<JSFunction>::null();
Variable* var = expr->right()->AsVariableProxy()->AsVariable();
bool global_function = (var != NULL) && var->is_global() && !var->is_this();
CompilationInfo* info = graph()->info();
if (global_function &&
info->has_global_object() &&
!info->global_object()->IsAccessCheckNeeded()) {
Handle<String> name = var->name();
Handle<GlobalObject> global(info->global_object());
LookupResult lookup;
global->Lookup(*name, &lookup);
if (lookup.IsProperty() &&
lookup.type() == NORMAL &&
lookup.GetValue()->IsJSFunction()) {
Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
// If the function is in new space we assume it's more likely to
// change and thus prefer the general IC code.
if (!Heap::InNewSpace(*candidate)) {
target = candidate;
}
}
}
// If the target is not null we have found a known global function that is
// assumed to stay the same for this instanceof.
if (target.is_null()) {
instr = new HInstanceOf(left, right);
} else {
AddInstruction(new HCheckFunction(right, target));
instr = new HInstanceOfKnownGlobal(left, target);
}
} else if (op == Token::IN) {
BAILOUT("Unsupported comparison: in");
} else if (info.IsNonPrimitive()) {

View File

@ -4973,7 +4973,26 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
}
// Generate stub code for instanceof.
// This code can patch a call site inlined cache of the instance of check,
// which looks like this.
//
// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
// 75 0a jne <some near label>
// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
//
// If call site patching is requested the stack will have the delta from the
// return address to the cmp instruction just below the return address. This
// also means that call site patching can only take place with arguments in
// registers. TOS looks like this when call site patching is requested
//
// esp[0] : return address
// esp[4] : delta from return address to cmp instruction
//
void InstanceofStub::Generate(MacroAssembler* masm) {
// Call site inlining and patching implies arguments in registers.
ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
// Fixed register usage throughout the stub.
Register object = eax; // Object (lhs).
Register map = ebx; // Map of the object.
@ -4981,9 +5000,22 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
Register prototype = edi; // Prototype of the function.
Register scratch = ecx;
// Constants describing the call site code to patch.
static const int kDeltaToCmpImmediate = 2;
static const int kDeltaToMov = 8;
static const int kDeltaToMovImmediate = 9;
static const int8_t kCmpEdiImmediateByte1 = static_cast<int8_t>(0x81);
static const int8_t kCmpEdiImmediateByte2 = static_cast<int8_t>(0xff);
static const int8_t kMovEaxImmediateByte = static_cast<int8_t>(0xb8);
ExternalReference roots_address = ExternalReference::roots_address();
ASSERT_EQ(object.code(), InstanceofStub::left().code());
ASSERT_EQ(function.code(), InstanceofStub::right().code());
// Get the object and function - they are always both needed.
Label slow, not_js_object;
if (!args_in_registers()) {
if (!HasArgsInRegisters()) {
__ mov(object, Operand(esp, 2 * kPointerSize));
__ mov(function, Operand(esp, 1 * kPointerSize));
}
@ -4993,22 +5025,26 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ j(zero, &not_js_object, not_taken);
__ IsObjectJSObjectType(object, map, scratch, &not_js_object);
// Look up the function and the map in the instanceof cache.
NearLabel miss;
ExternalReference roots_address = ExternalReference::roots_address();
__ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
__ cmp(function,
Operand::StaticArray(scratch, times_pointer_size, roots_address));
__ j(not_equal, &miss);
__ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
__ cmp(map, Operand::StaticArray(scratch, times_pointer_size, roots_address));
__ j(not_equal, &miss);
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(eax, Operand::StaticArray(scratch, times_pointer_size, roots_address));
__ IncrementCounter(&Counters::instance_of_cache, 1);
__ ret((args_in_registers() ? 0 : 2) * kPointerSize);
// If there is a call site cache don't look in the global cache, but do the
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck()) {
// Look up the function and the map in the instanceof cache.
NearLabel miss;
__ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
__ cmp(function,
Operand::StaticArray(scratch, times_pointer_size, roots_address));
__ j(not_equal, &miss);
__ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
__ cmp(map, Operand::StaticArray(
scratch, times_pointer_size, roots_address));
__ j(not_equal, &miss);
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(eax, Operand::StaticArray(
scratch, times_pointer_size, roots_address));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&miss);
}
__ bind(&miss);
// Get the prototype of the function.
__ TryGetFunctionPrototype(function, prototype, scratch, &slow);
@ -5017,13 +5053,29 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ j(zero, &slow, not_taken);
__ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
// Update the golbal instanceof cache with the current map and function. The
// cached answer will be set when it is known.
// Update the global instanceof or call site inlined cache with the current
// map and function. The cached answer will be set when it is known below.
if (!HasCallSiteInlineCheck()) {
__ mov(scratch, Immediate(Heap::kInstanceofCacheMapRootIndex));
__ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), map);
__ mov(scratch, Immediate(Heap::kInstanceofCacheFunctionRootIndex));
__ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address),
function);
} else {
// The constants for the code patching are based on no push instructions
// at the call site.
ASSERT(HasArgsInRegisters());
// Get return address and delta to inlined map check.
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
__ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
}
__ mov(Operand(scratch, kDeltaToCmpImmediate), map);
}
// Loop through the prototype chain of the object looking for the function
// prototype.
@ -5039,18 +5091,48 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ jmp(&loop);
__ bind(&is_instance);
__ IncrementCounter(&Counters::instance_of_stub_true, 1);
__ Set(eax, Immediate(0));
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax);
__ ret((args_in_registers() ? 0 : 2) * kPointerSize);
if (!HasCallSiteInlineCheck()) {
__ Set(eax, Immediate(0));
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(Operand::StaticArray(scratch,
times_pointer_size, roots_address), eax);
} else {
// Get return address and delta to inlined map check.
__ mov(eax, Factory::true_value());
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
__ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
__ Set(eax, Immediate(0));
}
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&is_not_instance);
__ IncrementCounter(&Counters::instance_of_stub_false, 1);
__ Set(eax, Immediate(Smi::FromInt(1)));
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(Operand::StaticArray(scratch, times_pointer_size, roots_address), eax);
__ ret((args_in_registers() ? 0 : 2) * kPointerSize);
if (!HasCallSiteInlineCheck()) {
__ Set(eax, Immediate(Smi::FromInt(1)));
__ mov(scratch, Immediate(Heap::kInstanceofCacheAnswerRootIndex));
__ mov(Operand::StaticArray(
scratch, times_pointer_size, roots_address), eax);
} else {
// Get return address and delta to inlined map check.
__ mov(eax, Factory::false_value());
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
__ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
}
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
if (!ReturnTrueFalseObject()) {
__ Set(eax, Immediate(Smi::FromInt(1)));
}
}
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
Label object_not_null, object_not_null_or_smi;
__ bind(&not_js_object);
@ -5064,39 +5146,72 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Null is not instance of anything.
__ cmp(object, Factory::null_value());
__ j(not_equal, &object_not_null);
__ IncrementCounter(&Counters::instance_of_stub_false_null, 1);
__ Set(eax, Immediate(Smi::FromInt(1)));
__ ret((args_in_registers() ? 0 : 2) * kPointerSize);
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&object_not_null);
// Smi values is not instance of anything.
__ test(object, Immediate(kSmiTagMask));
__ j(not_zero, &object_not_null_or_smi, not_taken);
__ Set(eax, Immediate(Smi::FromInt(1)));
__ ret((args_in_registers() ? 0 : 2) * kPointerSize);
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&object_not_null_or_smi);
// String values is not instance of anything.
Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
__ j(NegateCondition(is_string), &slow);
__ IncrementCounter(&Counters::instance_of_stub_false_string, 1);
__ Set(eax, Immediate(Smi::FromInt(1)));
__ ret((args_in_registers() ? 0 : 2) * kPointerSize);
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
// Slow-case: Go through the JavaScript implementation.
__ bind(&slow);
if (args_in_registers()) {
if (HasArgsInRegisters()) {
// Push arguments below return address.
__ pop(scratch);
__ push(object);
__ push(function);
__ push(scratch);
}
__ IncrementCounter(&Counters::instance_of_slow, 1);
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
}
Register InstanceofStub::left() { return eax; }
Register InstanceofStub::right() { return edx; }
const char* InstanceofStub::GetName() {
if (name_ != NULL) return name_;
const int kMaxNameLength = 100;
name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
if (name_ == NULL) return "OOM";
const char* args = "";
if (HasArgsInRegisters()) {
args = "_REGS";
}
const char* inline_check = "";
if (HasCallSiteInlineCheck()) {
inline_check = "_INLINE";
}
const char* return_true_false_object = "";
if (ReturnTrueFalseObject()) {
return_true_false_object = "_TRUEFALSE";
}
OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
"InstanceofStub%s%s%s",
args,
inline_check,
return_true_false_object);
return name_;
}
int CompareStub::MinorKey() {
// Encode the three parameters in a unique 16 bit value. To avoid duplicate
// stubs the never NaN NaN condition is only taken into account if the

View File

@ -4042,7 +4042,6 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
__ IncrementCounter(&Counters::instance_of_full, 1);
InstanceofStub stub(InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);

View File

@ -1694,7 +1694,7 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
// Object and function are in fixed registers eax and edx.
// Object and function are in fixed registers defined by the stub.
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
@ -1720,6 +1720,107 @@ void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
}
void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
class DeferredInstanceOfKnownGlobal: public LDeferredCode {
public:
DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
virtual void Generate() {
codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
}
Label* map_check() { return &map_check_; }
private:
LInstanceOfKnownGlobal* instr_;
Label map_check_;
};
DeferredInstanceOfKnownGlobal* deferred;
deferred = new DeferredInstanceOfKnownGlobal(this, instr);
Label done, false_result;
Register object = ToRegister(instr->input());
Register temp = ToRegister(instr->temp());
// A Smi is not instance of anything.
__ test(object, Immediate(kSmiTagMask));
__ j(zero, &false_result, not_taken);
// This is the inlined call site instanceof cache. The two occourences of the
// hole value will be patched to the last map/result pair generated by the
// instanceof stub.
NearLabel cache_miss;
Register map = ToRegister(instr->temp());
__ mov(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
__ cmp(map, Factory::the_hole_value()); // Patched to cached map.
__ j(not_equal, &cache_miss, not_taken);
__ mov(eax, Factory::the_hole_value()); // Patched to either true or false.
__ jmp(&done);
// The inlined call site cache did not match. Check null and string before
// calling the deferred code.
__ bind(&cache_miss);
// Null is not instance of anything.
__ cmp(object, Factory::null_value());
__ j(equal, &false_result);
// String values is not instance of anything.
Condition is_string = masm_->IsObjectStringType(object, temp, temp);
__ j(is_string, &false_result);
// Go to the deferred code.
__ jmp(deferred->entry());
__ bind(&false_result);
__ mov(ToRegister(instr->result()), Factory::false_value());
// Here result has either true or false. Deferred code also produces true or
// false object.
__ bind(deferred->exit());
__ bind(&done);
}
void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check) {
__ PushSafepointRegisters();
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kArgsInRegisters);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
InstanceofStub stub(flags);
// Get the temp register reserved by the instruction. This needs to be edi as
// its slot of the pushing of safepoint registers is used to communicate the
// offset to the location of the map check.
Register temp = ToRegister(instr->temp());
ASSERT(temp.is(edi));
__ mov(InstanceofStub::right(), Immediate(instr->function()));
static const int kAdditionalDelta = 13;
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta;
__ bind(&before_push_delta);
__ mov(temp, Immediate(delta));
__ mov(Operand(esp, EspIndexForPushAll(temp) * kPointerSize), temp);
__ call(stub.GetCode(), RelocInfo::CODE_TARGET);
ASSERT_EQ(kAdditionalDelta,
masm_->SizeOfCodeGeneratedSince(&before_push_delta));
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
// Put the result value into the eax slot and restore all registers.
__ mov(Operand(esp, EspIndexForPushAll(eax) * kPointerSize), eax);
__ PopSafepointRegisters();
}
static Condition ComputeCompareCondition(Token::Value op) {
switch (op) {
case Token::EQ_STRICT:

View File

@ -77,6 +77,8 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LGoto* instr);
void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
// Parallel move support.
void DoParallelMove(LParallelMove* move);

View File

@ -833,6 +833,12 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
}
LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
allocator_->MarkAsSaveDoubles();
return instr;
}
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
instr->set_pointer_map(new LPointerMap(position_));
@ -1257,10 +1263,11 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
} else if (v->IsInstanceOf()) {
HInstanceOf* instance_of = HInstanceOf::cast(v);
LInstruction* result =
new LInstanceOfAndBranch(UseFixed(instance_of->left(), eax),
UseFixed(instance_of->right(), edx),
first_id,
second_id);
new LInstanceOfAndBranch(
UseFixed(instance_of->left(), InstanceofStub::left()),
UseFixed(instance_of->right(), InstanceofStub::right()),
first_id,
second_id);
return MarkAsCall(result, instr);
} else if (v->IsTypeofIs()) {
HTypeofIs* typeof_is = HTypeofIs::cast(v);
@ -1308,12 +1315,23 @@ LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* result =
new LInstanceOf(UseFixed(instr->left(), eax),
UseFixed(instr->right(), edx));
new LInstanceOf(UseFixed(instr->left(), InstanceofStub::left()),
UseFixed(instr->right(), InstanceofStub::right()));
return MarkAsCall(DefineFixed(result, eax), instr);
}
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstruction* result =
new LInstanceOfKnownGlobal(
UseFixed(instr->value(), InstanceofStub::left()),
FixedTemp(edi));
MarkAsSaveDoubles(result);
return AssignEnvironment(AssignPointerMap(DefineFixed(result, eax)));
}
LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* function = UseFixed(instr->function(), edi);
LOperand* receiver = UseFixed(instr->receiver(), eax);

View File

@ -63,6 +63,7 @@ class LGapNode;
// LDivI
// LInstanceOf
// LInstanceOfAndBranch
// LInstanceOfKnownGlobal
// LLoadKeyedFastElement
// LLoadKeyedGeneric
// LModI
@ -207,6 +208,7 @@ class LGapNode;
V(FixedArrayLength) \
V(InstanceOf) \
V(InstanceOfAndBranch) \
V(InstanceOfKnownGlobal) \
V(Integer32ToDouble) \
V(IsNull) \
V(IsNullAndBranch) \
@ -1008,6 +1010,23 @@ class LInstanceOfAndBranch: public LInstanceOf {
};
class LInstanceOfKnownGlobal: public LUnaryOperation {
public:
LInstanceOfKnownGlobal(LOperand* left, LOperand* temp)
: LUnaryOperation(left), temp_(temp) { }
DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
"instance-of-known-global")
DECLARE_HYDROGEN_ACCESSOR(InstanceOfKnownGlobal)
Handle<JSFunction> function() const { return hydrogen()->function(); }
LOperand* temp() const { return temp_; }
private:
LOperand* temp_;
};
class LBoundsCheck: public LBinaryOperation {
public:
LBoundsCheck(LOperand* index, LOperand* length)
@ -2102,6 +2121,7 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* instr,
HInstruction* hinstr,
CanDeoptimize can_deoptimize = CANNOT_DEOPTIMIZE_EAGERLY);
LInstruction* MarkAsSaveDoubles(LInstruction* instr);
LInstruction* SetInstructionPendingDeoptimizationEnvironment(
LInstruction* instr, int ast_id);

View File

@ -940,6 +940,9 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
curr_position.InstructionEnd());
}
}
}
if (summary->IsCall() || summary->IsSaveDoubles()) {
for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
if (output == NULL || !output->IsDoubleRegister() ||
output->index() != i) {
@ -1607,6 +1610,11 @@ void LAllocator::MarkAsCall() {
}
void LAllocator::MarkAsSaveDoubles() {
current_summary()->MarkAsSaveDoubles();
}
void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
operand->set_virtual_register(instr->id());
current_summary()->SetOutput(operand);

View File

@ -482,7 +482,11 @@ class LDoubleRegister: public LOperand {
class InstructionSummary: public ZoneObject {
public:
InstructionSummary()
: output_operand_(NULL), input_count_(0), operands_(4), is_call_(false) {}
: output_operand_(NULL),
input_count_(0),
operands_(4),
is_call_(false),
is_save_doubles_(false) {}
// Output operands.
LOperand* Output() const { return output_operand_; }
@ -510,11 +514,15 @@ class InstructionSummary: public ZoneObject {
void MarkAsCall() { is_call_ = true; }
bool IsCall() const { return is_call_; }
void MarkAsSaveDoubles() { is_save_doubles_ = true; }
bool IsSaveDoubles() const { return is_save_doubles_; }
private:
LOperand* output_operand_;
int input_count_;
ZoneList<LOperand*> operands_;
bool is_call_;
bool is_save_doubles_;
};
// Representation of the non-empty interval [start,end[.
@ -824,6 +832,9 @@ class LAllocator BASE_EMBEDDED {
// Marks the current instruction as a call.
void MarkAsCall();
// Marks the current instruction as requiring saving double registers.
void MarkAsSaveDoubles();
// Checks whether the value of a given virtual register is tagged.
bool HasTaggedValue(int virtual_register) const;

View File

@ -249,15 +249,7 @@ namespace internal {
SC(smi_checks_removed, V8.SmiChecksRemoved) \
SC(map_checks_removed, V8.MapChecksRemoved) \
SC(quote_json_char_count, V8.QuoteJsonCharacterCount) \
SC(quote_json_char_recount, V8.QuoteJsonCharacterReCount) \
SC(instance_of, V8.InstanceOf) \
SC(instance_of_cache, V8.InstanceOfCache) \
SC(instance_of_stub_true, V8.InstanceOfStubTrue) \
SC(instance_of_stub_false, V8.InstanceOfStubFalse) \
SC(instance_of_stub_false_null, V8.InstanceOfStubFalseNull) \
SC(instance_of_stub_false_string, V8.InstanceOfStubFalseString) \
SC(instance_of_full, V8.InstanceOfFull) \
SC(instance_of_slow, V8.InstanceOfSlow)
SC(quote_json_char_recount, V8.QuoteJsonCharacterReCount)
// This file contains all the v8 counters that are in use.