MIPS: Convert a bunch of ASSERTs to STATIC_ASSERTs

Ported r9057 (f6c548d)

BUG=
TEST=

Review URL: http://codereview.chromium.org/7740073

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9086 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
jkummerow@chromium.org 2011-08-31 15:22:43 +00:00
parent f3fc54666a
commit 6c9ce544cc
4 changed files with 14 additions and 14 deletions

View File

@ -210,7 +210,7 @@ static void AllocateJSArray(MacroAssembler* masm,
// Allocate the JSArray object together with space for a FixedArray with the
// requested number of elements.
__ bind(&not_empty);
ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ li(elements_array_end,
(JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize);
__ sra(scratch1, array_size, kSmiTagSize);
@ -261,7 +261,7 @@ static void AllocateJSArray(MacroAssembler* masm,
// Length of the FixedArray is the number of pre-allocated elements if
// the actual JSArray has length 0 and the size of the JSArray for non-empty
// JSArrays. The length of a FixedArray is stored as a smi.
ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTag == 0);
__ li(at, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
__ movz(array_size, at, array_size);
@ -273,7 +273,7 @@ static void AllocateJSArray(MacroAssembler* masm,
// result: JSObject
// elements_array_storage: elements array element storage
// array_size: smi-tagged size of elements array
ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ sll(elements_array_end, array_size, kPointerSizeLog2 - kSmiTagSize);
__ Addu(elements_array_end, elements_array_storage, elements_array_end);
@ -336,14 +336,14 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ bind(&argc_one_or_more);
__ Branch(&argc_two_or_more, ne, a0, Operand(1));
ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTag == 0);
__ lw(a2, MemOperand(sp)); // Get the argument from the stack.
__ And(a3, a2, Operand(kIntptrSignBit | kSmiTagMask));
__ Branch(call_generic_code, eq, a3, Operand(zero_reg));
// Handle construction of an empty array of a certain size. Bail out if size
// is too large to actually allocate an elements array.
ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTag == 0);
__ Branch(call_generic_code, Ugreater_equal, a2,
Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
@ -576,7 +576,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// Is it a String?
__ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
__ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
ASSERT(kNotStringTag != 0);
STATIC_ASSERT(kNotStringTag != 0);
__ And(t0, a3, Operand(kIsNotStringMask));
__ Branch(&convert_argument, ne, t0, Operand(zero_reg));
__ mov(argument, a0);

View File

@ -3270,7 +3270,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
Label done, not_found;
ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
// a2 now holds finger offset as a smi.
__ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@ -4265,7 +4265,7 @@ void FullCodeGenerator::EnterFinallyBlock() {
// Cook return address in link register to stack (smi encoded Code* delta).
__ Subu(a1, ra, Operand(masm_->CodeObject()));
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
ASSERT_EQ(0, kSmiTag);
STATIC_ASSERT(0 == kSmiTag);
__ Addu(a1, a1, Operand(a1)); // Convert to smi.
__ push(a1);
}

View File

@ -338,7 +338,7 @@ static void GenerateFastArrayLoad(MacroAssembler* masm,
__ Addu(scratch1, elements,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
// The key is a smi.
ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ sll(at, key, kPointerSizeLog2 - kSmiTagSize);
__ addu(at, at, scratch1);
__ lw(scratch2, MemOperand(at));
@ -372,7 +372,7 @@ static void GenerateKeyStringCheck(MacroAssembler* masm,
// Is the string a symbol?
// map: key map
__ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
ASSERT(kSymbolTag != 0);
STATIC_ASSERT(kSymbolTag != 0);
__ And(at, hash, Operand(kIsSymbolMask));
__ Branch(not_symbol, eq, at, Operand(zero_reg));
}
@ -1269,7 +1269,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
__ Branch(&slow, hs, key, Operand(t0));
// Calculate key + 1 as smi.
ASSERT_EQ(0, kSmiTag);
STATIC_ASSERT(0 == kSmiTag);
__ Addu(t3, key, Operand(Smi::FromInt(1)));
__ sw(t3, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ Branch(&fast);

View File

@ -3501,7 +3501,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// We are not untagging smi key and instead work with it
// as if it was premultiplied by 2.
ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
Register value = a2;
switch (elements_kind) {
@ -4213,7 +4213,7 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// Load the result and make sure it's not the hole.
__ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t0, t0, a3);
__ lw(t0, MemOperand(t0));
@ -4344,7 +4344,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm,
__ Addu(scratch,
elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ sll(scratch2, key_reg, kPointerSizeLog2 - kSmiTagSize);
__ Addu(scratch3, scratch2, scratch);
__ sw(value_reg, MemOperand(scratch3));