Fix SmiCompare on 64 bit to distinguish between comparisons where

we know that both sides are Smi and those where we don't.  Fix inlined
symbol table probes to cope with strings, undefined and null (indicating
a deleted entry).  Some changes to other architectures that were found
with the new asserts.
Review URL: http://codereview.chromium.org/6682026

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7172 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
erik.corry@gmail.com 2011-03-15 10:03:57 +00:00
parent fc38a2e509
commit 941701d0b9
15 changed files with 276 additions and 206 deletions

View File

@ -5086,7 +5086,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ CompareObjectType(r7, r0, r0, CODE_TYPE);
__ b(ne, &runtime);
// r3: encoding of subject string (1 if ascii, 0 if two_byte);
// r3: encoding of subject string (1 if ASCII, 0 if two_byte);
// r7: code
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
@ -5096,7 +5096,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ mov(r1, Operand(r1, ASR, kSmiTagSize));
// r1: previous index
// r3: encoding of subject string (1 if ascii, 0 if two_byte);
// r3: encoding of subject string (1 if ASCII, 0 if two_byte);
// r7: code
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
@ -5628,7 +5628,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
__ b(ne, &slow_case_);
__ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
// At this point code register contains smi tagged ascii char code.
// At this point code register contains smi tagged ASCII char code.
STATIC_ASSERT(kSmiTag == 0);
__ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize));
__ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
@ -5960,7 +5960,6 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
Register symbol_table = c2;
__ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex);
// Load undefined value
Register undefined = scratch4;
__ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
@ -5981,6 +5980,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
// mask: capacity mask
// first_symbol_table_element: address of the first element of
// the symbol table
// undefined: the undefined object
// scratch: -
// Perform a number of probes in the symbol table.
@ -6008,20 +6008,32 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
kPointerSizeLog2));
// If entry is undefined no string with this hash can be found.
__ cmp(candidate, undefined);
Label is_string;
__ CompareObjectType(candidate, scratch, scratch, ODDBALL_TYPE);
__ b(ne, &is_string);
__ cmp(undefined, candidate);
__ b(eq, not_found);
// Must be null (deleted entry).
if (FLAG_debug_code) {
__ LoadRoot(ip, Heap::kNullValueRootIndex);
__ cmp(ip, candidate);
__ Assert(eq, "oddball in symbol table is not undefined or null");
}
__ jmp(&next_probe[i]);
__ bind(&is_string);
// Check that the candidate is a non-external ASCII string. The instance
// type is still in the scratch register from the CompareObjectType
// operation.
__ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]);
// If length is not 2 the string is not a candidate.
__ ldr(scratch, FieldMemOperand(candidate, String::kLengthOffset));
__ cmp(scratch, Operand(Smi::FromInt(2)));
__ b(ne, &next_probe[i]);
// Check that the candidate is a non-external ascii string.
__ ldr(scratch, FieldMemOperand(candidate, HeapObject::kMapOffset));
__ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
__ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch,
&next_probe[i]);
// Check if the two characters match.
// Assumes that word load is little endian.
__ ldrh(scratch, FieldMemOperand(candidate, SeqAsciiString::kHeaderSize));
@ -6177,7 +6189,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// r3: from index (untaged smi)
// r5: string.
// r7 (a.k.a. from): from offset (smi)
// Check for flat ascii string.
// Check for flat ASCII string.
Label non_ascii_flat;
__ tst(r1, Operand(kStringEncodingMask));
STATIC_ASSERT(kTwoByteStringTag == 0);
@ -6353,10 +6365,10 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
__ bind(&not_same);
// Check that both objects are sequential ascii strings.
// Check that both objects are sequential ASCII strings.
__ JumpIfNotBothSequentialAsciiStrings(r1, r0, r2, r3, &runtime);
// Compare flat ascii strings natively. Remove arguments from stack first.
// Compare flat ASCII strings natively. Remove arguments from stack first.
__ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5);
@ -6448,12 +6460,12 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// Adding two lengths can't overflow.
STATIC_ASSERT(String::kMaxLength < String::kMaxLength * 2);
__ add(r6, r2, Operand(r3));
// Use the runtime system when adding two one character strings, as it
// contains optimizations for this specific case using the symbol table.
// Use the symbol table when adding two one character strings, as it
// helps later optimizations to return a symbol here.
__ cmp(r6, Operand(2));
__ b(ne, &longer_than_two);
// Check that both strings are non-external ascii strings.
// Check that both strings are non-external ASCII strings.
if (flags_ != NO_STRING_ADD_FLAGS) {
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
@ -6501,7 +6513,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ b(hs, &string_add_runtime);
// If result is not supposed to be flat, allocate a cons string object.
// If both strings are ascii the result is an ascii cons string.
// If both strings are ASCII the result is an ASCII cons string.
if (flags_ != NO_STRING_ADD_FLAGS) {
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
@ -6528,7 +6540,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
// to contain only ascii characters.
// to contain only ASCII characters.
// r4: first instance type.
// r5: second instance type.
__ tst(r4, Operand(kAsciiDataHintMask));

View File

@ -5585,8 +5585,8 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CompareObjectType(object, tmp1, tmp2, FIRST_JS_OBJECT_TYPE);
deferred->Branch(lt);
__ CompareObjectType(object, tmp1, tmp2, JS_ARRAY_TYPE);
deferred->Branch(ne);
__ ldrb(tmp2, FieldMemOperand(tmp1, Map::kBitFieldOffset));
__ tst(tmp2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
deferred->Branch(ne);
@ -7140,7 +7140,6 @@ void CodeGenerator::EmitKeyedStore(StaticType* key_type,
scratch1, scratch2);
// Load the value, key and receiver from the stack.
bool value_is_harmless = frame_->KnownSmiAt(0);
if (wb_info == NEVER_NEWSPACE) value_is_harmless = true;
@ -7188,12 +7187,6 @@ void CodeGenerator::EmitKeyedStore(StaticType* key_type,
__ CompareObjectType(receiver, scratch1, scratch1, JS_ARRAY_TYPE);
deferred->Branch(ne);
// Check that the key is within bounds. Both the key and the length of
// the JSArray are smis. Use unsigned comparison to handle negative keys.
__ ldr(scratch1, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ cmp(scratch1, key);
deferred->Branch(ls); // Unsigned less equal.
// Get the elements array from the receiver.
__ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
if (!value_is_harmless && wb_info != LIKELY_SMI) {
@ -7208,6 +7201,7 @@ void CodeGenerator::EmitKeyedStore(StaticType* key_type,
}
// Check that the elements array is not a dictionary.
__ ldr(scratch2, FieldMemOperand(scratch1, JSObject::kMapOffset));
// The following instructions are the part of the inlined store keyed
// property code which can be patched. Therefore the exact number of
// instructions generated need to be fixed, so the constant pool is blocked
@ -7227,6 +7221,14 @@ void CodeGenerator::EmitKeyedStore(StaticType* key_type,
__ cmp(scratch2, scratch3);
deferred->Branch(ne);
// Check that the key is within bounds. Both the key and the length of
// the JSArray are smis (because the fixed array check above ensures the
// elements are in fast case). Use unsigned comparison to handle negative
// keys.
__ ldr(scratch3, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ cmp(scratch3, key);
deferred->Branch(ls); // Unsigned less equal.
// Store the value.
__ add(scratch1, scratch1,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));

View File

@ -268,7 +268,7 @@ class CodeGenerator: public AstVisitor {
static int GetInlinedKeyedLoadInstructionsAfterPatch() {
return FLAG_debug_code ? 32 : 13;
}
static const int kInlinedKeyedStoreInstructionsAfterPatch = 5;
static const int kInlinedKeyedStoreInstructionsAfterPatch = 8;
static int GetInlinedNamedStoreInstructionsAfterPatch() {
ASSERT(inlined_write_barrier_size_ != -1);
return inlined_write_barrier_size_ + 4;

View File

@ -3132,8 +3132,8 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CompareObjectType(object, scratch1, scratch2, FIRST_JS_OBJECT_TYPE);
__ b(lt, &slow_case);
__ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
__ b(ne, &slow_case);
// Map is now in scratch1.
__ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));

View File

@ -5509,8 +5509,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
// Handle exceptionally long strings in the runtime system.
__ j(overflow, &string_add_runtime);
// Use the runtime system when adding two one character strings, as it
// contains optimizations for this specific case using the symbol table.
// Use the symbol table when adding two one character strings, as it
// helps later optimizations to return a symbol here.
__ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
__ j(not_equal, &longer_than_two);
@ -5927,6 +5927,8 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
// If entry is undefined no string with this hash can be found.
__ cmp(candidate, Factory::undefined_value());
__ j(equal, not_found);
__ cmp(candidate, Factory::null_value());
__ j(equal, &next_probe[i]);
// If length is not 2 the string is not a candidate.
__ cmp(FieldOperand(candidate, String::kLengthOffset),

View File

@ -9944,12 +9944,6 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
__ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, tmp.reg());
deferred->Branch(not_equal);
// Check that the key is within bounds. Both the key and the length of
// the JSArray are smis. Use unsigned comparison to handle negative keys.
__ cmp(key.reg(),
FieldOperand(receiver.reg(), JSArray::kLengthOffset));
deferred->Branch(above_equal);
// Get the elements array from the receiver and check that it is not a
// dictionary.
__ mov(tmp.reg(),
@ -9975,6 +9969,14 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
Immediate(Factory::fixed_array_map()));
deferred->Branch(not_equal);
// Check that the key is within bounds. Both the key and the length of
// the JSArray are smis (because the fixed array check above ensures the
// elements are in fast case). Use unsigned comparison to handle negative
// keys.
__ cmp(key.reg(),
FieldOperand(receiver.reg(), JSArray::kLengthOffset));
deferred->Branch(above_equal);
// Store the value.
__ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
__ IncrementCounter(&Counters::keyed_store_inline, 1);

View File

@ -3050,8 +3050,8 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
__ j(below, &slow_case);
__ CmpObjectType(object, JS_ARRAY_TYPE, temp);
__ j(not_equal, &slow_case);
__ test_b(FieldOperand(temp, Map::kBitFieldOffset),
KeyedLoadIC::kSlowCaseBitFieldMask);
__ j(not_zero, &slow_case);

View File

@ -656,7 +656,7 @@ class Assembler : public Malloced {
// Move sign extended immediate to memory location.
void movq(const Operand& dst, Immediate value);
// New x64 instructions to load a 64-bit immediate into a register.
// Instructions to load a 64-bit immediate into a register.
// All 64-bit immediates must have a relocation mode.
void movq(Register dst, void* ptr, RelocInfo::Mode rmode);
void movq(Register dst, int64_t value, RelocInfo::Mode rmode);
@ -681,7 +681,7 @@ class Assembler : public Malloced {
void repmovsl();
void repmovsq();
// New x64 instruction to load from an immediate 64-bit pointer into RAX.
// Instruction to load from an immediate 64-bit pointer into RAX.
void load_rax(void* ptr, RelocInfo::Mode rmode);
void load_rax(ExternalReference ext);

View File

@ -2244,11 +2244,14 @@ void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
Label slow;
__ JumpIfNotSmi(rdx, &slow);
// Check if the calling frame is an arguments adaptor frame.
// Check if the calling frame is an arguments adaptor frame. We look at the
// context offset, and if the frame is not a regular one, then we find a
// Smi instead of the context. We can't use SmiCompare here, because that
// only works for comparing two smis.
Label adaptor;
__ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adaptor);
// Check index against formal parameters count limit passed in
@ -2303,8 +2306,8 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, try_allocate, runtime;
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adaptor_frame);
// Get the length from the frame.
@ -4157,8 +4160,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// Look at the length of the result of adding the two strings.
STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
__ SmiAdd(rbx, rbx, rcx);
// Use the runtime system when adding two one character strings, as it
// contains optimizations for this specific case using the symbol table.
// Use the symbol table when adding two one character strings, as it
// helps later optimizations to return a symbol here.
__ SmiCompare(rbx, Smi::FromInt(2));
__ j(not_equal, &longer_than_two);
@ -4510,15 +4513,14 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
__ decl(mask);
Register undefined = scratch4;
__ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
Register map = scratch4;
// Registers
// chars: two character string, char 1 in byte 0 and char 2 in byte 1.
// hash: hash of two character string (32-bit int)
// symbol_table: symbol table
// mask: capacity mask (32-bit int)
// undefined: undefined value
// map: -
// scratch: -
// Perform a number of probes in the symbol table.
@ -4533,7 +4535,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
}
__ andl(scratch, mask);
// Load the entry from the symble table.
// Load the entry from the symbol table.
Register candidate = scratch; // Scratch register contains candidate.
STATIC_ASSERT(SymbolTable::kEntrySize == 1);
__ movq(candidate,
@ -4543,8 +4545,16 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
SymbolTable::kElementsStartOffset));
// If entry is undefined no string with this hash can be found.
__ cmpq(candidate, undefined);
NearLabel is_string;
__ CmpObjectType(candidate, ODDBALL_TYPE, map);
__ j(not_equal, &is_string);
__ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
__ j(equal, not_found);
// Must be null (deleted entry).
__ jmp(&next_probe[i]);
__ bind(&is_string);
// If length is not 2 the string is not a candidate.
__ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
@ -4556,8 +4566,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
Register temp = kScratchRegister;
// Check that the candidate is a non-external ascii string.
__ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
__ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
__ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
__ JumpIfInstanceTypeIsNotSequentialAscii(
temp, temp, &next_probe[i]);

View File

@ -766,7 +766,7 @@ void CodeGenerator::ToBoolean(ControlDestination* dest) {
__ AbortIfNotNumber(value.reg());
}
// Smi => false iff zero.
__ SmiCompare(value.reg(), Smi::FromInt(0));
__ Cmp(value.reg(), Smi::FromInt(0));
if (value.is_smi()) {
value.Unuse();
dest->Split(not_zero);
@ -794,7 +794,7 @@ void CodeGenerator::ToBoolean(ControlDestination* dest) {
dest->false_target()->Branch(equal);
// Smi => false iff zero.
__ SmiCompare(value.reg(), Smi::FromInt(0));
__ Cmp(value.reg(), Smi::FromInt(0));
dest->false_target()->Branch(equal);
Condition is_smi = masm_->CheckSmi(value.reg());
dest->true_target()->Branch(is_smi);
@ -1036,7 +1036,7 @@ void CodeGenerator::GenericBinaryOperation(BinaryOperation* expr,
true, overwrite_mode);
} else {
// Set the flags based on the operation, type and loop nesting level.
// Bit operations always assume they likely operate on Smis. Still only
// Bit operations always assume they likely operate on smis. Still only
// generate the inline Smi check code if this operation is part of a loop.
// For all other operations only inline the Smi check code for likely smis
// if the operation is part of a loop.
@ -2108,7 +2108,7 @@ void CodeGenerator::Comparison(AstNode* node,
if (cc == equal) {
Label comparison_done;
__ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
Smi::FromInt(1));
Smi::FromInt(1));
__ j(not_equal, &comparison_done);
uint8_t char_value =
static_cast<uint8_t>(String::cast(*right_val)->Get(0));
@ -2294,7 +2294,7 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
// CompareStub and the inline code both support all values of cc.
}
// Implement comparison against a constant Smi, inlining the case
// where both sides are Smis.
// where both sides are smis.
left_side->ToRegister();
Register left_reg = left_side->reg();
Smi* constant_smi = Smi::cast(*right_side->handle());
@ -2304,7 +2304,6 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
__ AbortIfNotSmi(left_reg);
}
// Test smi equality and comparison by signed int comparison.
// Both sides are smis, so we can use an Immediate.
__ SmiCompare(left_reg, constant_smi);
left_side->Unuse();
right_side->Unuse();
@ -2314,7 +2313,7 @@ void CodeGenerator::ConstantSmiComparison(Condition cc,
JumpTarget is_smi;
if (cc == equal) {
// We can do the equality comparison before the smi check.
__ SmiCompare(left_reg, constant_smi);
__ Cmp(left_reg, constant_smi);
dest->true_target()->Branch(equal);
Condition left_is_smi = masm_->CheckSmi(left_reg);
dest->false_target()->Branch(left_is_smi);
@ -2575,8 +2574,8 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
// adaptor frame below it.
Label invoke, adapted;
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adapted);
// No arguments adaptor frame. Copy fixed number of arguments.
@ -3857,7 +3856,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
__ movq(rbx, rax);
// If the property has been removed while iterating, we just skip it.
__ SmiCompare(rbx, Smi::FromInt(0));
__ Cmp(rbx, Smi::FromInt(0));
node->continue_target()->Branch(equal);
end_del_check.Bind();
@ -6199,15 +6198,15 @@ void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) {
// Skip the arguments adaptor frame if it exists.
Label check_frame_marker;
__ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &check_frame_marker);
__ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
// Check the marker in the calling frame.
__ bind(&check_frame_marker);
__ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
Smi::FromInt(StackFrame::CONSTRUCT));
__ Cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
Smi::FromInt(StackFrame::CONSTRUCT));
fp.Unuse();
destination()->Split(equal);
}
@ -6227,8 +6226,8 @@ void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
// Check if the calling frame is an arguments adaptor frame.
__ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &exit);
// Arguments adaptor case: Read the arguments length from the
@ -6784,8 +6783,8 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
deferred->Branch(below);
__ CmpObjectType(object.reg(), JS_ARRAY_TYPE, tmp1.reg());
deferred->Branch(not_equal);
__ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
deferred->Branch(not_zero);
@ -6827,7 +6826,7 @@ void CodeGenerator::GenerateSwapElements(ZoneList<Expression*>* args) {
Label done;
__ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
// Possible optimization: do a check that both values are Smis
// Possible optimization: do a check that both values are smis
// (or them and test against Smi mask.)
__ movq(tmp2.reg(), tmp1.reg());
@ -8517,12 +8516,6 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
__ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
deferred->Branch(not_equal);
// Check that the key is within bounds. Both the key and the length of
// the JSArray are smis. Use unsigned comparison to handle negative keys.
__ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
key.reg());
deferred->Branch(below_equal);
// Get the elements array from the receiver and check that it is not a
// dictionary.
__ movq(tmp.reg(),
@ -8551,6 +8544,14 @@ Result CodeGenerator::EmitKeyedStore(StaticType* key_type) {
kScratchRegister);
deferred->Branch(not_equal);
// Check that the key is within bounds. Both the key and the length of
// the JSArray are smis (because the fixed array check above ensures the
// elements are in fast case). Use unsigned comparison to handle negative
// keys.
__ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
key.reg());
deferred->Branch(below_equal);
// Store the value.
SmiIndex index =
masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);

View File

@ -549,7 +549,7 @@ void FullCodeGenerator::DoTest(Label* if_true,
__ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
__ j(equal, if_false);
STATIC_ASSERT(kSmiTag == 0);
__ SmiCompare(result_register(), Smi::FromInt(0));
__ Cmp(result_register(), Smi::FromInt(0));
__ j(equal, if_false);
Condition is_smi = masm_->CheckSmi(result_register());
__ j(is_smi, if_true);
@ -995,7 +995,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ push(rcx); // Enumerable.
__ push(rbx); // Current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
__ SmiCompare(rax, Smi::FromInt(0));
__ Cmp(rax, Smi::FromInt(0));
__ j(equal, loop_statement.continue_target());
__ movq(rbx, rax);
@ -2503,15 +2503,15 @@ void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
// Skip the arguments adaptor frame if it exists.
Label check_frame_marker;
__ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &check_frame_marker);
__ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
// Check the marker in the calling frame.
__ bind(&check_frame_marker);
__ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
Smi::FromInt(StackFrame::CONSTRUCT));
__ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
Smi::FromInt(StackFrame::CONSTRUCT));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
@ -2565,8 +2565,8 @@ void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
// Check if the calling frame is an arguments adaptor frame.
__ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &exit);
// Arguments adaptor case: Read the arguments length from the
@ -3011,8 +3011,8 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
// Fetch the map and check if array is in fast case.
// Check that object doesn't require security checks and
// has no indexed interceptor.
__ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
__ j(below, &slow_case);
__ CmpObjectType(object, JS_ARRAY_TYPE, temp);
__ j(not_equal, &slow_case);
__ testb(FieldOperand(temp, Map::kBitFieldOffset),
Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
__ j(not_zero, &slow_case);

View File

@ -1244,7 +1244,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
__ j(equal, true_label);
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ j(equal, false_label);
__ SmiCompare(reg, Smi::FromInt(0));
__ Cmp(reg, Smi::FromInt(0));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
@ -2196,8 +2196,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
// Check for arguments adapter frame.
NearLabel done, adapted;
__ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
__ SmiCompare(Operand(result, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(equal, &adapted);
// No arguments adaptor frame.
@ -3681,15 +3681,15 @@ void LCodeGen::EmitIsConstructCall(Register temp) {
// Skip the arguments adaptor frame if it exists.
NearLabel check_frame_marker;
__ SmiCompare(Operand(temp, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
__ j(not_equal, &check_frame_marker);
__ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
// Check the marker in the calling frame.
__ bind(&check_frame_marker);
__ SmiCompare(Operand(temp, StandardFrameConstants::kMarkerOffset),
Smi::FromInt(StackFrame::CONSTRUCT));
__ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
Smi::FromInt(StackFrame::CONSTRUCT));
}

View File

@ -124,7 +124,7 @@ void MacroAssembler::RecordWrite(Register object,
ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis and stores into young gen.
// catch stores of smis and stores into the young generation.
Label done;
JumpIfSmi(value, &done);
@ -153,7 +153,7 @@ void MacroAssembler::RecordWrite(Register object,
ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis and stores into young gen.
// catch stores of smis and stores into the young generation.
Label done;
JumpIfSmi(value, &done);
@ -837,12 +837,24 @@ void MacroAssembler::SmiTest(Register src) {
}
void MacroAssembler::SmiCompare(Register dst, Register src) {
cmpq(dst, src);
void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
if (FLAG_debug_code) {
AbortIfNotSmi(smi1);
AbortIfNotSmi(smi2);
}
cmpq(smi1, smi2);
}
void MacroAssembler::SmiCompare(Register dst, Smi* src) {
if (FLAG_debug_code) {
AbortIfNotSmi(dst);
}
Cmp(dst, src);
}
void MacroAssembler::Cmp(Register dst, Smi* src) {
ASSERT(!dst.is(kScratchRegister));
if (src->value() == 0) {
testq(dst, dst);
@ -854,20 +866,39 @@ void MacroAssembler::SmiCompare(Register dst, Smi* src) {
void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
if (FLAG_debug_code) {
AbortIfNotSmi(dst);
AbortIfNotSmi(src);
}
cmpq(dst, src);
}
void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
if (FLAG_debug_code) {
AbortIfNotSmi(dst);
AbortIfNotSmi(src);
}
cmpq(dst, src);
}
void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
if (FLAG_debug_code) {
AbortIfNotSmi(dst);
}
cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
}
void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
// The Operand cannot use the smi register.
Register smi_reg = GetSmiConstant(src);
ASSERT(!dst.AddressUsesRegister(smi_reg));
cmpq(dst, smi_reg);
}
void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
}
@ -1352,7 +1383,7 @@ void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
if (source->IsSmi()) {
SmiCompare(dst, Smi::cast(*source));
Cmp(dst, Smi::cast(*source));
} else {
Move(kScratchRegister, source);
cmpq(dst, kScratchRegister);
@ -1362,7 +1393,7 @@ void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
if (source->IsSmi()) {
SmiCompare(dst, Smi::cast(*source));
Cmp(dst, Smi::cast(*source));
} else {
ASSERT(source->IsHeapObject());
movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
@ -1753,7 +1784,12 @@ void MacroAssembler::AbortIfSmi(Register object) {
void MacroAssembler::AbortIfNotSmi(Register object) {
NearLabel ok;
Condition is_smi = CheckSmi(object);
Assert(is_smi, "Operand is not a smi");
}
void MacroAssembler::AbortIfNotSmi(const Operand& object) {
Condition is_smi = CheckSmi(object);
Assert(is_smi, "Operand is not a smi");
}

View File

@ -278,8 +278,9 @@ class MacroAssembler: public Assembler {
int power);
// Simple comparison of smis.
void SmiCompare(Register dst, Register src);
// Simple comparison of smis. Both sides must be known smis to use these,
// otherwise use Cmp.
void SmiCompare(Register smi1, Register smi2);
void SmiCompare(Register dst, Smi* src);
void SmiCompare(Register dst, const Operand& src);
void SmiCompare(const Operand& dst, Register src);
@ -609,6 +610,8 @@ class MacroAssembler: public Assembler {
void Move(const Operand& dst, Handle<Object> source);
void Cmp(Register dst, Handle<Object> source);
void Cmp(const Operand& dst, Handle<Object> source);
void Cmp(Register dst, Smi* src);
void Cmp(const Operand& dst, Smi* src);
void Push(Handle<Object> source);
// Emit code to discard a non-negative number of pointer-sized elements
@ -702,6 +705,7 @@ class MacroAssembler: public Assembler {
// Abort execution if argument is not a smi. Used in debug code.
void AbortIfNotSmi(Register object);
void AbortIfNotSmi(const Operand& object);
// Abort execution if argument is a string. Used in debug code.
void AbortIfNotString(Register object);

View File

@ -220,7 +220,7 @@ void TestSmiCompare(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ j(less_equal, exit);
}
} else {
__ SmiCompare(rcx, rcx);
__ cmpq(rcx, rcx);
__ movl(rax, Immediate(id + 11));
__ j(not_equal, exit);
__ incq(rax);
@ -232,10 +232,11 @@ void TestSmiCompare(MacroAssembler* masm, Label* exit, int id, int x, int y) {
// Test that we can compare smis for equality (and more).
TEST(SmiCompare) {
v8::V8::Initialize();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer =
static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
&actual_size,
true));
CHECK(buffer);
@ -300,35 +301,35 @@ TEST(Integer32ToSmi) {
__ movl(rcx, Immediate(0));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(0)));
__ SmiCompare(rcx, rdx);
__ cmpq(rcx, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(2)); // Test number.
__ movl(rcx, Immediate(1024));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(1024)));
__ SmiCompare(rcx, rdx);
__ cmpq(rcx, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(3)); // Test number.
__ movl(rcx, Immediate(-1));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(-1)));
__ SmiCompare(rcx, rdx);
__ cmpq(rcx, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(4)); // Test number.
__ movl(rcx, Immediate(Smi::kMaxValue));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(Smi::kMaxValue)));
__ SmiCompare(rcx, rdx);
__ cmpq(rcx, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(5)); // Test number.
__ movl(rcx, Immediate(Smi::kMinValue));
__ Integer32ToSmi(rcx, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(Smi::kMinValue)));
__ SmiCompare(rcx, rdx);
__ cmpq(rcx, rdx);
__ j(not_equal, &exit);
// Different target register.
@ -337,35 +338,35 @@ TEST(Integer32ToSmi) {
__ movl(rcx, Immediate(0));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(0)));
__ SmiCompare(r8, rdx);
__ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(7)); // Test number.
__ movl(rcx, Immediate(1024));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(1024)));
__ SmiCompare(r8, rdx);
__ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(8)); // Test number.
__ movl(rcx, Immediate(-1));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(-1)));
__ SmiCompare(r8, rdx);
__ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(9)); // Test number.
__ movl(rcx, Immediate(Smi::kMaxValue));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(Smi::kMaxValue)));
__ SmiCompare(r8, rdx);
__ cmpq(r8, rdx);
__ j(not_equal, &exit);
__ movq(rax, Immediate(10)); // Test number.
__ movl(rcx, Immediate(Smi::kMinValue));
__ Integer32ToSmi(r8, rcx);
__ Set(rdx, reinterpret_cast<intptr_t>(Smi::FromInt(Smi::kMinValue)));
__ SmiCompare(r8, rdx);
__ cmpq(r8, rdx);
__ j(not_equal, &exit);
@ -394,16 +395,16 @@ void TestI64PlusConstantToSmi(MacroAssembler* masm,
__ movq(rcx, x, RelocInfo::NONE);
__ movq(r11, rcx);
__ Integer64PlusConstantToSmi(rdx, rcx, y);
__ SmiCompare(rdx, r8);
__ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ Integer64PlusConstantToSmi(rcx, rcx, y);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
}
@ -660,14 +661,14 @@ void TestSmiNeg(MacroAssembler* masm, Label* exit, int id, int x) {
__ SmiNeg(r9, rcx, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiNeg(rcx, rcx, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
} else {
Label smi_ok, smi_ok2;
@ -679,11 +680,11 @@ void TestSmiNeg(MacroAssembler* masm, Label* exit, int id, int x) {
__ jmp(exit);
__ bind(&smi_ok);
__ incq(rax);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
@ -691,7 +692,7 @@ void TestSmiNeg(MacroAssembler* masm, Label* exit, int id, int x) {
__ jmp(exit);
__ bind(&smi_ok2);
__ incq(rax);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
}
}
@ -751,12 +752,12 @@ static void SmiAddTest(MacroAssembler* masm,
__ movl(rax, Immediate(id)); // Test number.
__ SmiAdd(r9, rcx, rdx, exit);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiAdd(rcx, rcx, rdx, exit); \
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
__ movl(rcx, Immediate(first));
@ -764,11 +765,11 @@ static void SmiAddTest(MacroAssembler* masm,
__ incq(rax);
__ SmiAddConstant(r9, rcx, Smi::FromInt(second));
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ SmiAddConstant(rcx, rcx, Smi::FromInt(second));
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
__ movl(rcx, Immediate(first));
@ -776,12 +777,12 @@ static void SmiAddTest(MacroAssembler* masm,
__ incq(rax);
__ SmiAddConstant(r9, rcx, Smi::FromInt(second), exit);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiAddConstant(rcx, rcx, Smi::FromInt(second), exit);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
}
@ -834,36 +835,36 @@ static void SmiSubTest(MacroAssembler* masm,
__ movl(rax, Immediate(id)); // Test 0.
__ SmiSub(r9, rcx, rdx, exit);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax); // Test 1.
__ SmiSub(rcx, rcx, rdx, exit);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
__ Move(rcx, Smi::FromInt(first));
__ incq(rax); // Test 2.
__ SmiSubConstant(r9, rcx, Smi::FromInt(second));
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax); // Test 3.
__ SmiSubConstant(rcx, rcx, Smi::FromInt(second));
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
__ Move(rcx, Smi::FromInt(first));
__ incq(rax); // Test 4.
__ SmiSubConstant(r9, rcx, Smi::FromInt(second), exit);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax); // Test 5.
__ SmiSubConstant(rcx, rcx, Smi::FromInt(second), exit);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
}
@ -886,7 +887,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
@ -897,7 +898,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
@ -909,7 +910,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
@ -920,7 +921,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
@ -933,7 +934,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
@ -944,7 +945,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
@ -956,7 +957,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
@ -967,7 +968,7 @@ static void SmiSubOverflowTest(MacroAssembler* masm,
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
}
@ -1032,15 +1033,15 @@ void TestSmiMul(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ Move(r8, Smi::FromIntptr(result));
__ SmiMul(r9, rcx, rdx, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiMul(rcx, rcx, rdx, exit);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
} else {
__ movl(rax, Immediate(id + 8));
@ -1049,7 +1050,7 @@ void TestSmiMul(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ jmp(exit);
__ bind(&overflow_ok);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiMul(rcx, rcx, rdx, &overflow_ok2);
@ -1057,7 +1058,7 @@ void TestSmiMul(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ bind(&overflow_ok2);
// 31-bit version doesn't preserve rcx on failure.
// __ incq(rax);
// __ SmiCompare(r11, rcx);
// __ cmpq(r11, rcx);
// __ j(not_equal, exit);
}
}
@ -1126,20 +1127,20 @@ void TestSmiDiv(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ SmiDiv(r9, rcx, r14, exit);
// Might have destroyed rcx and r14.
__ incq(r15);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(r15);
__ movq(rcx, r11);
__ Move(r14, Smi::FromInt(y));
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(r15);
__ SmiDiv(rcx, rcx, r14, exit);
__ incq(r15);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
} else {
// Division fails.
@ -1152,7 +1153,7 @@ void TestSmiDiv(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ bind(&fail_ok);
__ incq(r15);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(r15);
@ -1161,7 +1162,7 @@ void TestSmiDiv(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ bind(&fail_ok2);
__ incq(r15);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
}
@ -1238,18 +1239,18 @@ void TestSmiMod(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ SmiMod(r9, rcx, r14, exit);
__ incq(r15);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(r15);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(r15);
__ SmiMod(rcx, rcx, r14, exit);
__ incq(r15);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
} else {
// Modulo fails.
@ -1261,7 +1262,7 @@ void TestSmiMod(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ bind(&fail_ok);
__ incq(r15);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(r15);
@ -1270,7 +1271,7 @@ void TestSmiMod(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ bind(&fail_ok2);
__ incq(r15);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
}
}
@ -1340,7 +1341,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) {
ASSERT(index.reg.is(rcx) || index.reg.is(rdx));
__ shl(index.reg, Immediate(index.scale));
__ Set(r8, static_cast<intptr_t>(x) << i);
__ SmiCompare(index.reg, r8);
__ cmpq(index.reg, r8);
__ j(not_equal, exit);
__ incq(rax);
__ Move(rcx, Smi::FromInt(x));
@ -1348,7 +1349,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) {
ASSERT(index.reg.is(rcx));
__ shl(rcx, Immediate(index.scale));
__ Set(r8, static_cast<intptr_t>(x) << i);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1357,7 +1358,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) {
ASSERT(index.reg.is(rcx) || index.reg.is(rdx));
__ shl(index.reg, Immediate(index.scale));
__ Set(r8, static_cast<intptr_t>(-x) << i);
__ SmiCompare(index.reg, r8);
__ cmpq(index.reg, r8);
__ j(not_equal, exit);
__ incq(rax);
__ Move(rcx, Smi::FromInt(x));
@ -1365,7 +1366,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) {
ASSERT(index.reg.is(rcx));
__ shl(rcx, Immediate(index.scale));
__ Set(r8, static_cast<intptr_t>(-x) << i);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
__ incq(rax);
}
@ -1414,7 +1415,7 @@ void TestSelectNonSmi(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ SelectNonSmi(r9, rcx, rdx, exit);
__ incq(rax);
__ SmiCompare(r9, rdx);
__ cmpq(r9, rdx);
__ j(not_equal, exit);
__ incq(rax);
@ -1424,7 +1425,7 @@ void TestSelectNonSmi(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ SelectNonSmi(r9, rcx, rdx, exit);
__ incq(rax);
__ SmiCompare(r9, rcx);
__ cmpq(r9, rcx);
__ j(not_equal, exit);
__ incq(rax);
@ -1488,31 +1489,31 @@ void TestSmiAnd(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ Move(rdx, Smi::FromInt(y));
__ Move(r8, Smi::FromInt(result));
__ SmiAnd(r9, rcx, rdx);
__ SmiCompare(r8, r9);
__ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiAnd(rcx, rcx, rdx);
__ SmiCompare(r8, rcx);
__ cmpq(r8, rcx);
__ j(not_equal, exit);
__ movq(rcx, r11);
__ incq(rax);
__ SmiAndConstant(r9, rcx, Smi::FromInt(y));
__ SmiCompare(r8, r9);
__ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiAndConstant(rcx, rcx, Smi::FromInt(y));
__ SmiCompare(r8, rcx);
__ cmpq(r8, rcx);
__ j(not_equal, exit);
}
@ -1568,31 +1569,31 @@ void TestSmiOr(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ Move(rdx, Smi::FromInt(y));
__ Move(r8, Smi::FromInt(result));
__ SmiOr(r9, rcx, rdx);
__ SmiCompare(r8, r9);
__ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiOr(rcx, rcx, rdx);
__ SmiCompare(r8, rcx);
__ cmpq(r8, rcx);
__ j(not_equal, exit);
__ movq(rcx, r11);
__ incq(rax);
__ SmiOrConstant(r9, rcx, Smi::FromInt(y));
__ SmiCompare(r8, r9);
__ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiOrConstant(rcx, rcx, Smi::FromInt(y));
__ SmiCompare(r8, rcx);
__ cmpq(r8, rcx);
__ j(not_equal, exit);
}
@ -1650,31 +1651,31 @@ void TestSmiXor(MacroAssembler* masm, Label* exit, int id, int x, int y) {
__ Move(rdx, Smi::FromInt(y));
__ Move(r8, Smi::FromInt(result));
__ SmiXor(r9, rcx, rdx);
__ SmiCompare(r8, r9);
__ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiXor(rcx, rcx, rdx);
__ SmiCompare(r8, rcx);
__ cmpq(r8, rcx);
__ j(not_equal, exit);
__ movq(rcx, r11);
__ incq(rax);
__ SmiXorConstant(r9, rcx, Smi::FromInt(y));
__ SmiCompare(r8, r9);
__ cmpq(r8, r9);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiXorConstant(rcx, rcx, Smi::FromInt(y));
__ SmiCompare(r8, rcx);
__ cmpq(r8, rcx);
__ j(not_equal, exit);
}
@ -1731,16 +1732,16 @@ void TestSmiNot(MacroAssembler* masm, Label* exit, int id, int x) {
__ movq(r11, rcx);
__ SmiNot(r9, rcx);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx);
__ cmpq(r11, rcx);
__ j(not_equal, exit);
__ incq(rax);
__ SmiNot(rcx, rcx);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
}
@ -1797,7 +1798,7 @@ void TestSmiShiftLeft(MacroAssembler* masm, Label* exit, int id, int x) {
__ SmiShiftLeftConstant(r9, rcx, shift);
__ incq(rax);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1805,7 +1806,7 @@ void TestSmiShiftLeft(MacroAssembler* masm, Label* exit, int id, int x) {
__ SmiShiftLeftConstant(rcx, rcx, shift);
__ incq(rax);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1814,7 +1815,7 @@ void TestSmiShiftLeft(MacroAssembler* masm, Label* exit, int id, int x) {
__ SmiShiftLeft(r9, rdx, rcx);
__ incq(rax);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1823,7 +1824,7 @@ void TestSmiShiftLeft(MacroAssembler* masm, Label* exit, int id, int x) {
__ SmiShiftLeft(r9, rdx, r11);
__ incq(rax);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1832,7 +1833,7 @@ void TestSmiShiftLeft(MacroAssembler* masm, Label* exit, int id, int x) {
__ SmiShiftLeft(rdx, rdx, r11);
__ incq(rax);
__ SmiCompare(rdx, r8);
__ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1893,7 +1894,7 @@ void TestSmiShiftLogicalRight(MacroAssembler* masm,
__ SmiShiftLogicalRightConstant(r9, rcx, shift, exit);
__ incq(rax);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1902,7 +1903,7 @@ void TestSmiShiftLogicalRight(MacroAssembler* masm,
__ SmiShiftLogicalRight(r9, rdx, rcx, exit);
__ incq(rax);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1911,7 +1912,7 @@ void TestSmiShiftLogicalRight(MacroAssembler* masm,
__ SmiShiftLogicalRight(r9, rdx, r11, exit);
__ incq(rax);
__ SmiCompare(r9, r8);
__ cmpq(r9, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -1925,7 +1926,7 @@ void TestSmiShiftLogicalRight(MacroAssembler* masm,
__ bind(&fail_ok);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
__ incq(rax);
@ -1936,7 +1937,7 @@ void TestSmiShiftLogicalRight(MacroAssembler* masm,
__ bind(&fail_ok3);
__ incq(rax);
__ SmiCompare(rcx, r11);
__ cmpq(rcx, r11);
__ j(not_equal, exit);
__ addq(rax, Immediate(3));
@ -1997,7 +1998,7 @@ void TestSmiShiftArithmeticRight(MacroAssembler* masm,
__ Move(rcx, Smi::FromInt(x));
__ SmiShiftArithmeticRightConstant(rcx, rcx, shift);
__ SmiCompare(rcx, r8);
__ cmpq(rcx, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -2005,7 +2006,7 @@ void TestSmiShiftArithmeticRight(MacroAssembler* masm,
__ Move(r11, Smi::FromInt(shift));
__ SmiShiftArithmeticRight(rdx, rdx, r11);
__ SmiCompare(rdx, r8);
__ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
@ -2062,14 +2063,14 @@ void TestPositiveSmiPowerUp(MacroAssembler* masm, Label* exit, int id, int x) {
__ Move(rcx, Smi::FromInt(x));
__ movq(r11, rcx);
__ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rcx, power);
__ SmiCompare(rdx, r8);
__ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
__ SmiCompare(r11, rcx); // rcx unchanged.
__ cmpq(r11, rcx); // rcx unchanged.
__ j(not_equal, exit);
__ incq(rax);
__ PositiveSmiTimesPowerOfTwoToInteger64(rcx, rcx, power);
__ SmiCompare(rdx, r8);
__ cmpq(rdx, r8);
__ j(not_equal, exit);
__ incq(rax);
}
@ -2077,10 +2078,11 @@ void TestPositiveSmiPowerUp(MacroAssembler* masm, Label* exit, int id, int x) {
TEST(PositiveSmiTimesPowerOfTwoToInteger64) {
v8::V8::Initialize();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer =
static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 4,
&actual_size,
true));
CHECK(buffer);