Extract hardcoded error strings into a single place and replace them with enum.
I'd like to propagate bailout reason to cpu profiler. So I need to save it into heap object SharedFunctionInfo. But: 1) all bailout reason strings spread across all the sources. 2) they are native strings and if I convert them into String then I may have a performance issue. 3) one byte is enough for 184 bailout reasons. Otherwise we need 8 bytes for the pointer. Also I think it would be nice to have error strings collected in one place. In that case we will get additional benefits: It allows us to keep this set of messages under control. It gives us a chance to internationalize them. It slightly reduces the binary footprint. From the other hand the developers have to add new strings into that enum. BUG= R=jkummerow@chromium.org Review URL: https://codereview.chromium.org/20843012 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16024 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
477b41c89b
commit
d2c443b774
@ -119,9 +119,9 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
|
||||
// Initial map for the builtin InternalArray functions should be maps.
|
||||
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
__ SmiTst(r2);
|
||||
__ Assert(ne, "Unexpected initial map for InternalArray function");
|
||||
__ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
|
||||
__ CompareObjectType(r2, r3, r4, MAP_TYPE);
|
||||
__ Assert(eq, "Unexpected initial map for InternalArray function");
|
||||
__ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
|
||||
}
|
||||
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
@ -147,9 +147,9 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
|
||||
// Initial map for the builtin Array functions should be maps.
|
||||
__ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
__ SmiTst(r2);
|
||||
__ Assert(ne, "Unexpected initial map for Array function");
|
||||
__ Assert(ne, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CompareObjectType(r2, r3, r4, MAP_TYPE);
|
||||
__ Assert(eq, "Unexpected initial map for Array function");
|
||||
__ Assert(eq, kUnexpectedInitialMapForArrayFunction);
|
||||
}
|
||||
|
||||
// Run the native code for the Array function called as a normal function.
|
||||
@ -178,7 +178,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
|
||||
__ cmp(function, Operand(r2));
|
||||
__ Assert(eq, "Unexpected String function");
|
||||
__ Assert(eq, kUnexpectedStringFunction);
|
||||
}
|
||||
|
||||
// Load the first arguments in r0 and get rid of the rest.
|
||||
@ -224,10 +224,10 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
|
||||
__ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
|
||||
__ Assert(eq, "Unexpected string wrapper instance size");
|
||||
__ Assert(eq, kUnexpectedStringWrapperInstanceSize);
|
||||
__ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
|
||||
__ cmp(r4, Operand::Zero());
|
||||
__ Assert(eq, "Unexpected unused properties of string wrapper");
|
||||
__ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
|
||||
}
|
||||
__ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
|
||||
|
||||
@ -471,7 +471,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
// r0: offset of first field after pre-allocated fields
|
||||
if (FLAG_debug_code) {
|
||||
__ cmp(r0, r6);
|
||||
__ Assert(le, "Unexpected number of pre-allocated property fields.");
|
||||
__ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
|
||||
}
|
||||
__ InitializeFieldsWithFiller(r5, r0, r7);
|
||||
// To allow for truncation.
|
||||
@ -503,7 +503,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
|
||||
// Done if no extra properties are to be allocated.
|
||||
__ b(eq, &allocated);
|
||||
__ Assert(pl, "Property allocation count failed.");
|
||||
__ Assert(pl, kPropertyAllocationCountFailed);
|
||||
|
||||
// Scale the number of elements by pointer size and add the header for
|
||||
// FixedArrays to the start of the next object calculation from above.
|
||||
@ -547,7 +547,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
} else if (FLAG_debug_code) {
|
||||
__ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
|
||||
__ cmp(r7, r8);
|
||||
__ Assert(eq, "Undefined value not loaded.");
|
||||
__ Assert(eq, kUndefinedValueNotLoaded);
|
||||
}
|
||||
__ b(&entry);
|
||||
__ bind(&loop);
|
||||
|
@ -520,9 +520,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
||||
Label after_sentinel;
|
||||
__ JumpIfNotSmi(r3, &after_sentinel);
|
||||
if (FLAG_debug_code) {
|
||||
const char* message = "Expected 0 as a Smi sentinel";
|
||||
__ cmp(r3, Operand::Zero());
|
||||
__ Assert(eq, message);
|
||||
__ Assert(eq, kExpected0AsASmiSentinel);
|
||||
}
|
||||
__ ldr(r3, GlobalObjectOperand());
|
||||
__ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset));
|
||||
@ -3917,9 +3916,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
__ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset));
|
||||
if (FLAG_debug_code) {
|
||||
__ SmiTst(regexp_data);
|
||||
__ Check(ne, "Unexpected type for RegExp data, FixedArray expected");
|
||||
__ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
|
||||
__ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE);
|
||||
__ Check(eq, "Unexpected type for RegExp data, FixedArray expected");
|
||||
__ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
|
||||
}
|
||||
|
||||
// regexp_data: RegExp data (FixedArray)
|
||||
@ -4261,7 +4260,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
// Assert that we do not have a cons or slice (indirect strings) here.
|
||||
// Sequential strings have already been ruled out.
|
||||
__ tst(r0, Operand(kIsIndirectStringMask));
|
||||
__ Assert(eq, "external string expected, but not found");
|
||||
__ Assert(eq, kExternalStringExpectedButNotFound);
|
||||
}
|
||||
__ ldr(subject,
|
||||
FieldMemOperand(subject, ExternalString::kResourceDataOffset));
|
||||
@ -4643,7 +4642,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
void StringCharCodeAtGenerator::GenerateSlow(
|
||||
MacroAssembler* masm,
|
||||
const RuntimeCallHelper& call_helper) {
|
||||
__ Abort("Unexpected fallthrough to CharCodeAt slow case");
|
||||
__ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
|
||||
|
||||
// Index is not a smi.
|
||||
__ bind(&index_not_smi_);
|
||||
@ -4688,7 +4687,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
|
||||
call_helper.AfterCall(masm);
|
||||
__ jmp(&exit_);
|
||||
|
||||
__ Abort("Unexpected fallthrough from CharCodeAt slow case");
|
||||
__ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
|
||||
}
|
||||
|
||||
|
||||
@ -4718,7 +4717,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
void StringCharFromCodeGenerator::GenerateSlow(
|
||||
MacroAssembler* masm,
|
||||
const RuntimeCallHelper& call_helper) {
|
||||
__ Abort("Unexpected fallthrough to CharFromCode slow case");
|
||||
__ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
|
||||
|
||||
__ bind(&slow_case_);
|
||||
call_helper.BeforeCall(masm);
|
||||
@ -4728,7 +4727,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
|
||||
call_helper.AfterCall(masm);
|
||||
__ jmp(&exit_);
|
||||
|
||||
__ Abort("Unexpected fallthrough from CharFromCode slow case");
|
||||
__ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
|
||||
}
|
||||
|
||||
|
||||
@ -4785,7 +4784,7 @@ void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
|
||||
// Check that destination is actually word aligned if the flag says
|
||||
// that it is.
|
||||
__ tst(dest, Operand(kPointerAlignmentMask));
|
||||
__ Check(eq, "Destination of copy not aligned.");
|
||||
__ Check(eq, kDestinationOfCopyNotAligned);
|
||||
}
|
||||
|
||||
const int kReadAlignment = 4;
|
||||
@ -5014,7 +5013,7 @@ void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
|
||||
if (FLAG_debug_code) {
|
||||
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
|
||||
__ cmp(ip, candidate);
|
||||
__ Assert(eq, "oddball in string table is not undefined or the hole");
|
||||
__ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole);
|
||||
}
|
||||
__ jmp(&next_probe[i]);
|
||||
|
||||
@ -6912,7 +6911,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// If we reached this point there is a problem.
|
||||
__ Abort("Unexpected ElementsKind in array constructor");
|
||||
__ Abort(kUnexpectedElementsKindInArrayConstructor);
|
||||
}
|
||||
|
||||
|
||||
@ -6969,7 +6968,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// If we reached this point there is a problem.
|
||||
__ Abort("Unexpected ElementsKind in array constructor");
|
||||
__ Abort(kUnexpectedElementsKindInArrayConstructor);
|
||||
}
|
||||
|
||||
|
||||
@ -7030,9 +7029,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
// Will both indicate a NULL and a Smi.
|
||||
__ tst(r3, Operand(kSmiTagMask));
|
||||
__ Assert(ne, "Unexpected initial map for Array function");
|
||||
__ Assert(ne, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
|
||||
__ Assert(eq, "Unexpected initial map for Array function");
|
||||
__ Assert(eq, kUnexpectedInitialMapForArrayFunction);
|
||||
|
||||
// We should either have undefined in ebx or a valid cell
|
||||
Label okay_here;
|
||||
@ -7041,7 +7040,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ b(eq, &okay_here);
|
||||
__ ldr(r3, FieldMemOperand(r2, 0));
|
||||
__ cmp(r3, Operand(cell_map));
|
||||
__ Assert(eq, "Expected property cell in register ebx");
|
||||
__ Assert(eq, kExpectedPropertyCellInRegisterEbx);
|
||||
__ bind(&okay_here);
|
||||
}
|
||||
|
||||
@ -7144,9 +7143,9 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
// Will both indicate a NULL and a Smi.
|
||||
__ tst(r3, Operand(kSmiTagMask));
|
||||
__ Assert(ne, "Unexpected initial map for Array function");
|
||||
__ Assert(ne, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CompareObjectType(r3, r3, r4, MAP_TYPE);
|
||||
__ Assert(eq, "Unexpected initial map for Array function");
|
||||
__ Assert(eq, kUnexpectedInitialMapForArrayFunction);
|
||||
}
|
||||
|
||||
// Figure out the right elements kind
|
||||
@ -7163,7 +7162,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ b(eq, &done);
|
||||
__ cmp(r3, Operand(FAST_HOLEY_ELEMENTS));
|
||||
__ Assert(eq,
|
||||
"Invalid ElementsKind for InternalArray or InternalPackedArray");
|
||||
kInvalidElementsKindForInternalArrayOrInternalPackedArray);
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
|
@ -532,7 +532,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
|
||||
__ SmiTag(r9);
|
||||
__ orr(r9, r9, Operand(1));
|
||||
__ CompareRoot(r9, Heap::kTheHoleValueRootIndex);
|
||||
__ Assert(eq, "object found in smi-only array");
|
||||
__ Assert(eq, kObjectFoundInSmiOnlyArray);
|
||||
}
|
||||
__ Strd(r4, r5, MemOperand(r7, 8, PostIndex));
|
||||
|
||||
@ -728,7 +728,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
|
||||
// Assert that we do not have a cons or slice (indirect strings) here.
|
||||
// Sequential strings have already been ruled out.
|
||||
__ tst(result, Operand(kIsIndirectStringMask));
|
||||
__ Assert(eq, "external string expected, but not found");
|
||||
__ Assert(eq, kExternalStringExpectedButNotFound);
|
||||
}
|
||||
// Rule out short external strings.
|
||||
STATIC_CHECK(kShortExternalStringTag != 0);
|
||||
|
@ -130,7 +130,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
if (FLAG_debug_code) {
|
||||
__ tst(reg, Operand(0xc0000000));
|
||||
__ Assert(eq, "Unable to encode value as smi");
|
||||
__ Assert(eq, kUnableToEncodeValueAsSmi);
|
||||
}
|
||||
__ SmiTag(reg);
|
||||
}
|
||||
@ -313,12 +313,12 @@ void Debug::GenerateSlotDebugBreak(MacroAssembler* masm) {
|
||||
|
||||
|
||||
void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) {
|
||||
masm->Abort("LiveEdit frame dropping is not supported on arm");
|
||||
masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnArm);
|
||||
}
|
||||
|
||||
|
||||
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
|
||||
masm->Abort("LiveEdit frame dropping is not supported on arm");
|
||||
masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnArm);
|
||||
}
|
||||
|
||||
const bool Debug::kFrameDropperSupported = false;
|
||||
|
@ -786,9 +786,9 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
||||
// Check that we're not inside a with or catch context.
|
||||
__ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
|
||||
__ CompareRoot(r1, Heap::kWithContextMapRootIndex);
|
||||
__ Check(ne, "Declaration in with context.");
|
||||
__ Check(ne, kDeclarationInWithContext);
|
||||
__ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
|
||||
__ Check(ne, "Declaration in catch context.");
|
||||
__ Check(ne, kDeclarationInCatchContext);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2512,7 +2512,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
||||
// Check for an uninitialized let binding.
|
||||
__ ldr(r2, location);
|
||||
__ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
|
||||
__ Check(eq, "Let binding re-initialization.");
|
||||
__ Check(eq, kLetBindingReInitialization);
|
||||
}
|
||||
// Perform the assignment.
|
||||
__ str(r0, location);
|
||||
@ -3473,23 +3473,23 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
|
||||
Register value,
|
||||
uint32_t encoding_mask) {
|
||||
__ SmiTst(index);
|
||||
__ Check(eq, "Non-smi index");
|
||||
__ Check(eq, kNonSmiIndex);
|
||||
__ SmiTst(value);
|
||||
__ Check(eq, "Non-smi value");
|
||||
__ Check(eq, kNonSmiValue);
|
||||
|
||||
__ ldr(ip, FieldMemOperand(string, String::kLengthOffset));
|
||||
__ cmp(index, ip);
|
||||
__ Check(lt, "Index is too large");
|
||||
__ Check(lt, kIndexIsTooLarge);
|
||||
|
||||
__ cmp(index, Operand(Smi::FromInt(0)));
|
||||
__ Check(ge, "Index is negative");
|
||||
__ Check(ge, kIndexIsNegative);
|
||||
|
||||
__ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
|
||||
__ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
|
||||
|
||||
__ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
|
||||
__ cmp(ip, Operand(encoding_mask));
|
||||
__ Check(eq, "Unexpected string type");
|
||||
__ Check(eq, kUnexpectedStringType);
|
||||
}
|
||||
|
||||
|
||||
@ -3849,7 +3849,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
isolate()->native_context()->jsfunction_result_caches());
|
||||
if (jsfunction_result_caches->length() <= cache_id) {
|
||||
__ Abort("Attempt to use undefined cache.");
|
||||
__ Abort(kAttemptToUseUndefinedCache);
|
||||
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
|
||||
context()->Plug(r0);
|
||||
return;
|
||||
@ -4030,7 +4030,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// elements_end: Array end.
|
||||
if (generate_debug_code_) {
|
||||
__ cmp(array_length, Operand::Zero());
|
||||
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
|
||||
__ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
|
||||
}
|
||||
__ bind(&loop);
|
||||
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
|
||||
|
@ -437,7 +437,7 @@ LPlatformChunk* LChunkBuilder::Build() {
|
||||
}
|
||||
|
||||
|
||||
void LChunkBuilder::Abort(const char* reason) {
|
||||
void LChunkBuilder::Abort(BailoutReason reason) {
|
||||
info()->set_bailout_reason(reason);
|
||||
status_ = ABORTED;
|
||||
}
|
||||
@ -645,7 +645,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
|
||||
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
|
||||
int vreg = allocator_->GetVirtualRegister();
|
||||
if (!allocator_->AllocationOk()) {
|
||||
Abort("Out of virtual registers while trying to allocate temp register.");
|
||||
Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
|
||||
vreg = 0;
|
||||
}
|
||||
operand->set_virtual_register(vreg);
|
||||
@ -2442,7 +2442,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
|
||||
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
|
||||
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
|
||||
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
|
||||
Abort("Too many spill slots needed for OSR");
|
||||
Abort(kTooManySpillSlotsNeededForOSR);
|
||||
spill_index = 0;
|
||||
}
|
||||
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
|
||||
|
@ -2670,7 +2670,7 @@ class LChunkBuilder BASE_EMBEDDED {
|
||||
bool is_done() const { return status_ == DONE; }
|
||||
bool is_aborted() const { return status_ == ABORTED; }
|
||||
|
||||
void Abort(const char* reason);
|
||||
void Abort(BailoutReason reason);
|
||||
|
||||
// Methods for getting operands for Use / Define / Temp.
|
||||
LUnallocated* ToUnallocated(Register reg);
|
||||
|
@ -91,7 +91,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::Abort(const char* reason) {
|
||||
void LCodeGen::Abort(BailoutReason reason) {
|
||||
info()->set_bailout_reason(reason);
|
||||
status_ = ABORTED;
|
||||
}
|
||||
@ -334,7 +334,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
// 32bit data after it.
|
||||
if (!is_int24((masm()->pc_offset() / Assembler::kInstrSize) +
|
||||
deopt_jump_table_.length() * 7)) {
|
||||
Abort("Generated code is too large");
|
||||
Abort(kGeneratedCodeIsTooLarge);
|
||||
}
|
||||
|
||||
if (deopt_jump_table_.length() > 0) {
|
||||
@ -423,7 +423,7 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
|
||||
ASSERT(literal->IsNumber());
|
||||
__ mov(scratch, Operand(static_cast<int32_t>(literal->Number())));
|
||||
} else if (r.IsDouble()) {
|
||||
Abort("EmitLoadRegister: Unsupported double immediate.");
|
||||
Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
|
||||
} else {
|
||||
ASSERT(r.IsTagged());
|
||||
__ LoadObject(scratch, literal);
|
||||
@ -461,9 +461,9 @@ DwVfpRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
|
||||
__ vcvt_f64_s32(dbl_scratch, flt_scratch);
|
||||
return dbl_scratch;
|
||||
} else if (r.IsDouble()) {
|
||||
Abort("unsupported double immediate");
|
||||
Abort(kUnsupportedDoubleImmediate);
|
||||
} else if (r.IsTagged()) {
|
||||
Abort("unsupported tagged immediate");
|
||||
Abort(kUnsupportedTaggedImmediate);
|
||||
}
|
||||
} else if (op->IsStackSlot() || op->IsArgument()) {
|
||||
// TODO(regis): Why is vldr not taking a MemOperand?
|
||||
@ -534,14 +534,14 @@ Operand LCodeGen::ToOperand(LOperand* op) {
|
||||
ASSERT(constant->HasInteger32Value());
|
||||
return Operand(constant->Integer32Value());
|
||||
} else if (r.IsDouble()) {
|
||||
Abort("ToOperand Unsupported double immediate.");
|
||||
Abort(kToOperandUnsupportedDoubleImmediate);
|
||||
}
|
||||
ASSERT(r.IsTagged());
|
||||
return Operand(constant->handle());
|
||||
} else if (op->IsRegister()) {
|
||||
return Operand(ToRegister(op));
|
||||
} else if (op->IsDoubleRegister()) {
|
||||
Abort("ToOperand IsDoubleRegister unimplemented");
|
||||
Abort(kToOperandIsDoubleRegisterUnimplemented);
|
||||
return Operand::Zero();
|
||||
}
|
||||
// Stack slots not implemented, use ToMemOperand instead.
|
||||
@ -772,7 +772,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
Address entry =
|
||||
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
|
||||
if (entry == NULL) {
|
||||
Abort("bailout was not prepared");
|
||||
Abort(kBailoutWasNotPrepared);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1936,7 +1936,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
|
||||
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
|
||||
__ cmp(ip, Operand(encoding == String::ONE_BYTE_ENCODING
|
||||
? one_byte_seq_type : two_byte_seq_type));
|
||||
__ Check(eq, "Unexpected string type");
|
||||
__ Check(eq, kUnexpectedStringType);
|
||||
}
|
||||
|
||||
__ add(ip,
|
||||
@ -3200,7 +3200,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
|
||||
if (key_is_constant) {
|
||||
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
|
||||
if (constant_key & 0xF0000000) {
|
||||
Abort("array index constant value too big.");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
} else {
|
||||
key = ToRegister(instr->key());
|
||||
@ -3284,7 +3284,7 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
|
||||
if (key_is_constant) {
|
||||
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
|
||||
if (constant_key & 0xF0000000) {
|
||||
Abort("array index constant value too big.");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
} else {
|
||||
key = ToRegister(instr->key());
|
||||
@ -3545,7 +3545,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
|
||||
void LCodeGen::DoPushArgument(LPushArgument* instr) {
|
||||
LOperand* argument = instr->value();
|
||||
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
|
||||
Abort("DoPushArgument not implemented for double type.");
|
||||
Abort(kDoPushArgumentNotImplementedForDoubleType);
|
||||
} else {
|
||||
Register argument_reg = EmitLoadRegister(argument, ip);
|
||||
__ push(argument_reg);
|
||||
@ -4319,7 +4319,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
|
||||
if (key_is_constant) {
|
||||
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
|
||||
if (constant_key & 0xF0000000) {
|
||||
Abort("array index constant value too big.");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
} else {
|
||||
key = ToRegister(instr->key());
|
||||
@ -4392,7 +4392,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
|
||||
if (key_is_constant) {
|
||||
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
|
||||
if (constant_key & 0xF0000000) {
|
||||
Abort("array index constant value too big.");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
} else {
|
||||
key = ToRegister(instr->key());
|
||||
@ -4415,7 +4415,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
|
||||
if (masm()->emit_debug_code()) {
|
||||
__ vmrs(ip);
|
||||
__ tst(ip, Operand(kVFPDefaultNaNModeControlBit));
|
||||
__ Assert(ne, "Default NaN mode not set");
|
||||
__ Assert(ne, kDefaultNaNModeNotSet);
|
||||
}
|
||||
__ VFPCanonicalizeNaN(value);
|
||||
}
|
||||
|
@ -214,7 +214,7 @@ class LCodeGen BASE_EMBEDDED {
|
||||
|
||||
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
|
||||
|
||||
void Abort(const char* reason);
|
||||
void Abort(BailoutReason reason);
|
||||
void FPRINTF_CHECKING Comment(const char* format, ...);
|
||||
|
||||
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
|
||||
|
@ -489,7 +489,7 @@ void MacroAssembler::RecordWrite(Register object,
|
||||
if (emit_debug_code()) {
|
||||
ldr(ip, MemOperand(address));
|
||||
cmp(ip, value);
|
||||
Check(eq, "Wrong address or value passed to RecordWrite");
|
||||
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
|
||||
}
|
||||
|
||||
Label done;
|
||||
@ -1490,7 +1490,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
// In debug mode, make sure the lexical context is set.
|
||||
#ifdef DEBUG
|
||||
cmp(scratch, Operand::Zero());
|
||||
Check(ne, "we should not have an empty lexical context");
|
||||
Check(ne, kWeShouldNotHaveAnEmptyLexicalContext);
|
||||
#endif
|
||||
|
||||
// Load the native context of the current context.
|
||||
@ -1508,7 +1508,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
ldr(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
|
||||
LoadRoot(ip, Heap::kNativeContextMapRootIndex);
|
||||
cmp(holder_reg, ip);
|
||||
Check(eq, "JSGlobalObject::native_context should be a native context.");
|
||||
Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
|
||||
pop(holder_reg); // Restore holder.
|
||||
}
|
||||
|
||||
@ -1525,12 +1525,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
mov(holder_reg, ip); // Move ip to its holding place.
|
||||
LoadRoot(ip, Heap::kNullValueRootIndex);
|
||||
cmp(holder_reg, ip);
|
||||
Check(ne, "JSGlobalProxy::context() should not be null.");
|
||||
Check(ne, kJSGlobalProxyContextShouldNotBeNull);
|
||||
|
||||
ldr(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
|
||||
LoadRoot(ip, Heap::kNativeContextMapRootIndex);
|
||||
cmp(holder_reg, ip);
|
||||
Check(eq, "JSGlobalObject::native_context should be a native context.");
|
||||
Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
|
||||
// Restore ip is not needed. ip is reloaded below.
|
||||
pop(holder_reg); // Restore holder.
|
||||
// Restore ip to holder's context.
|
||||
@ -1727,7 +1727,7 @@ void MacroAssembler::Allocate(int object_size,
|
||||
// respect to register content between debug and release mode.
|
||||
ldr(ip, MemOperand(topaddr));
|
||||
cmp(result, ip);
|
||||
Check(eq, "Unexpected allocation top");
|
||||
Check(eq, kUnexpectedAllocationTop);
|
||||
}
|
||||
// Load allocation limit into ip. Result already contains allocation top.
|
||||
ldr(ip, MemOperand(topaddr, limit - top));
|
||||
@ -1825,7 +1825,7 @@ void MacroAssembler::Allocate(Register object_size,
|
||||
// respect to register content between debug and release mode.
|
||||
ldr(ip, MemOperand(topaddr));
|
||||
cmp(result, ip);
|
||||
Check(eq, "Unexpected allocation top");
|
||||
Check(eq, kUnexpectedAllocationTop);
|
||||
}
|
||||
// Load allocation limit into ip. Result already contains allocation top.
|
||||
ldr(ip, MemOperand(topaddr, limit - top));
|
||||
@ -1859,7 +1859,7 @@ void MacroAssembler::Allocate(Register object_size,
|
||||
// Update allocation top. result temporarily holds the new top.
|
||||
if (emit_debug_code()) {
|
||||
tst(scratch2, Operand(kObjectAlignmentMask));
|
||||
Check(eq, "Unaligned allocation in new space");
|
||||
Check(eq, kUnalignedAllocationInNewSpace);
|
||||
}
|
||||
str(scratch2, MemOperand(topaddr));
|
||||
|
||||
@ -1882,7 +1882,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object,
|
||||
mov(scratch, Operand(new_space_allocation_top));
|
||||
ldr(scratch, MemOperand(scratch));
|
||||
cmp(object, scratch);
|
||||
Check(lt, "Undo allocation of non allocated memory");
|
||||
Check(lt, kUndoAllocationOfNonAllocatedMemory);
|
||||
#endif
|
||||
// Write the address of the object to un-allocate as the current top.
|
||||
mov(scratch, Operand(new_space_allocation_top));
|
||||
@ -2131,7 +2131,7 @@ void MacroAssembler::StoreNumberToDoubleElements(
|
||||
if (emit_debug_code()) {
|
||||
vmrs(ip);
|
||||
tst(ip, Operand(kVFPDefaultNaNModeControlBit));
|
||||
Assert(ne, "Default NaN mode not set");
|
||||
Assert(ne, kDefaultNaNModeNotSet);
|
||||
}
|
||||
VFPCanonicalizeNaN(double_scratch);
|
||||
b(&store);
|
||||
@ -2381,7 +2381,7 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
|
||||
if (emit_debug_code()) {
|
||||
ldr(r1, MemOperand(r7, kLevelOffset));
|
||||
cmp(r1, r6);
|
||||
Check(eq, "Unexpected level after return from api call");
|
||||
Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
|
||||
}
|
||||
sub(r6, r6, Operand(1));
|
||||
str(r6, MemOperand(r7, kLevelOffset));
|
||||
@ -2782,9 +2782,9 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Assert(Condition cond, const char* msg) {
|
||||
void MacroAssembler::Assert(Condition cond, BailoutReason reason) {
|
||||
if (emit_debug_code())
|
||||
Check(cond, msg);
|
||||
Check(cond, reason);
|
||||
}
|
||||
|
||||
|
||||
@ -2803,23 +2803,23 @@ void MacroAssembler::AssertFastElements(Register elements) {
|
||||
LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
|
||||
cmp(elements, ip);
|
||||
b(eq, &ok);
|
||||
Abort("JSObject with fast elements map has slow elements");
|
||||
Abort(kJSObjectWithFastElementsMapHasSlowElements);
|
||||
bind(&ok);
|
||||
pop(elements);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Check(Condition cond, const char* msg) {
|
||||
void MacroAssembler::Check(Condition cond, BailoutReason reason) {
|
||||
Label L;
|
||||
b(cond, &L);
|
||||
Abort(msg);
|
||||
Abort(reason);
|
||||
// will not return here
|
||||
bind(&L);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Abort(const char* msg) {
|
||||
void MacroAssembler::Abort(BailoutReason reason) {
|
||||
Label abort_start;
|
||||
bind(&abort_start);
|
||||
// We want to pass the msg string like a smi to avoid GC
|
||||
@ -2827,6 +2827,7 @@ void MacroAssembler::Abort(const char* msg) {
|
||||
// properly. Instead, we pass an aligned pointer that is
|
||||
// a proper v8 smi, but also pass the alignment difference
|
||||
// from the real pointer as a smi.
|
||||
const char* msg = GetBailoutReason(reason);
|
||||
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
|
||||
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
|
||||
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
|
||||
@ -2969,7 +2970,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
|
||||
CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
|
||||
b(&ok);
|
||||
bind(&fail);
|
||||
Abort("Global functions must have initial map");
|
||||
Abort(kGlobalFunctionsMustHaveInitialMap);
|
||||
bind(&ok);
|
||||
}
|
||||
}
|
||||
@ -3038,7 +3039,7 @@ void MacroAssembler::AssertNotSmi(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
tst(object, Operand(kSmiTagMask));
|
||||
Check(ne, "Operand is a smi");
|
||||
Check(ne, kOperandIsASmi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3047,7 +3048,7 @@ void MacroAssembler::AssertSmi(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
tst(object, Operand(kSmiTagMask));
|
||||
Check(eq, "Operand is not smi");
|
||||
Check(eq, kOperandIsNotSmi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3056,12 +3057,12 @@ void MacroAssembler::AssertString(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
tst(object, Operand(kSmiTagMask));
|
||||
Check(ne, "Operand is a smi and not a string");
|
||||
Check(ne, kOperandIsASmiAndNotAString);
|
||||
push(object);
|
||||
ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
CompareInstanceType(object, object, FIRST_NONSTRING_TYPE);
|
||||
pop(object);
|
||||
Check(lo, "Operand is not a string");
|
||||
Check(lo, kOperandIsNotAString);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3070,12 +3071,12 @@ void MacroAssembler::AssertName(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
tst(object, Operand(kSmiTagMask));
|
||||
Check(ne, "Operand is a smi and not a name");
|
||||
Check(ne, kOperandIsASmiAndNotAName);
|
||||
push(object);
|
||||
ldr(object, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
CompareInstanceType(object, object, LAST_NAME_TYPE);
|
||||
pop(object);
|
||||
Check(le, "Operand is not a name");
|
||||
Check(le, kOperandIsNotAName);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3084,7 +3085,7 @@ void MacroAssembler::AssertName(Register object) {
|
||||
void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
|
||||
if (emit_debug_code()) {
|
||||
CompareRoot(reg, index);
|
||||
Check(eq, "HeapNumberMap register clobbered.");
|
||||
Check(eq, kHeapNumberMapRegisterClobbered);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3230,7 +3231,7 @@ void MacroAssembler::CopyBytes(Register src,
|
||||
bind(&word_loop);
|
||||
if (emit_debug_code()) {
|
||||
tst(src, Operand(kPointerSize - 1));
|
||||
Assert(eq, "Expecting alignment for CopyBytes");
|
||||
Assert(eq, kExpectingAlignmentForCopyBytes);
|
||||
}
|
||||
cmp(length, Operand(kPointerSize));
|
||||
b(lt, &byte_loop);
|
||||
@ -3494,7 +3495,7 @@ void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
|
||||
// Check that the instruction is a ldr reg, [pc + offset] .
|
||||
and_(result, result, Operand(kLdrPCPattern));
|
||||
cmp(result, Operand(kLdrPCPattern));
|
||||
Check(eq, "The instruction to patch should be a load from pc.");
|
||||
Check(eq, kTheInstructionToPatchShouldBeALoadFromPc);
|
||||
// Result was clobbered. Restore it.
|
||||
ldr(result, MemOperand(ldr_location));
|
||||
}
|
||||
|
@ -1136,14 +1136,14 @@ class MacroAssembler: public Assembler {
|
||||
|
||||
// Calls Abort(msg) if the condition cond is not satisfied.
|
||||
// Use --debug_code to enable.
|
||||
void Assert(Condition cond, const char* msg);
|
||||
void Assert(Condition cond, BailoutReason reason);
|
||||
void AssertFastElements(Register elements);
|
||||
|
||||
// Like Assert(), but always enabled.
|
||||
void Check(Condition cond, const char* msg);
|
||||
void Check(Condition cond, BailoutReason reason);
|
||||
|
||||
// Print a message to stdout and abort execution.
|
||||
void Abort(const char* msg);
|
||||
void Abort(BailoutReason msg);
|
||||
|
||||
// Verify restrictions about code generated in stubs.
|
||||
void set_generating_stub(bool value) { generating_stub_ = value; }
|
||||
|
@ -41,13 +41,13 @@ static LChunk* OptimizeGraph(HGraph* graph) {
|
||||
DisallowHandleDereference no_deref;
|
||||
|
||||
ASSERT(graph != NULL);
|
||||
SmartArrayPointer<char> bailout_reason;
|
||||
BailoutReason bailout_reason = kNoReason;
|
||||
if (!graph->Optimize(&bailout_reason)) {
|
||||
FATAL(bailout_reason.is_empty() ? "unknown" : *bailout_reason);
|
||||
FATAL(GetBailoutReason(bailout_reason));
|
||||
}
|
||||
LChunk* chunk = LChunk::NewChunk(graph);
|
||||
if (chunk == NULL) {
|
||||
FATAL(graph->info()->bailout_reason());
|
||||
FATAL(GetBailoutReason(graph->info()->bailout_reason()));
|
||||
}
|
||||
return chunk;
|
||||
}
|
||||
|
@ -127,7 +127,7 @@ void CompilationInfo::Initialize(Isolate* isolate,
|
||||
ASSERT(language_mode() == CLASSIC_MODE);
|
||||
SetLanguageMode(shared_info_->language_mode());
|
||||
}
|
||||
set_bailout_reason("unknown");
|
||||
set_bailout_reason(kUnknown);
|
||||
}
|
||||
|
||||
|
||||
@ -342,7 +342,7 @@ OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
|
||||
const int kMaxOptCount =
|
||||
FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
|
||||
if (info()->opt_count() > kMaxOptCount) {
|
||||
info()->set_bailout_reason("optimized too many times");
|
||||
info()->set_bailout_reason(kOptimizedTooManyTimes);
|
||||
return AbortOptimization();
|
||||
}
|
||||
|
||||
@ -356,14 +356,14 @@ OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
|
||||
const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
|
||||
Scope* scope = info()->scope();
|
||||
if ((scope->num_parameters() + 1) > parameter_limit) {
|
||||
info()->set_bailout_reason("too many parameters");
|
||||
info()->set_bailout_reason(kTooManyParameters);
|
||||
return AbortOptimization();
|
||||
}
|
||||
|
||||
const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
|
||||
if (!info()->osr_ast_id().IsNone() &&
|
||||
scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
|
||||
info()->set_bailout_reason("too many parameters/locals");
|
||||
info()->set_bailout_reason(kTooManyParametersLocals);
|
||||
return AbortOptimization();
|
||||
}
|
||||
|
||||
@ -458,9 +458,9 @@ OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() {
|
||||
ASSERT(last_status() == SUCCEEDED);
|
||||
Timer t(this, &time_taken_to_optimize_);
|
||||
ASSERT(graph_ != NULL);
|
||||
SmartArrayPointer<char> bailout_reason;
|
||||
BailoutReason bailout_reason = kNoReason;
|
||||
if (!graph_->Optimize(&bailout_reason)) {
|
||||
if (!bailout_reason.is_empty()) graph_builder_->Bailout(*bailout_reason);
|
||||
if (bailout_reason == kNoReason) graph_builder_->Bailout(bailout_reason);
|
||||
return SetLastStatus(BAILED_OUT);
|
||||
} else {
|
||||
chunk_ = LChunk::NewChunk(graph_);
|
||||
@ -485,7 +485,9 @@ OptimizingCompiler::Status OptimizingCompiler::GenerateAndInstallCode() {
|
||||
DisallowDeferredHandleDereference no_deferred_handle_deref;
|
||||
Handle<Code> optimized_code = chunk_->Codegen();
|
||||
if (optimized_code.is_null()) {
|
||||
info()->set_bailout_reason("code generation failed");
|
||||
if (info()->bailout_reason() != kNoReason) {
|
||||
info()->set_bailout_reason(kCodeGenerationFailed);
|
||||
}
|
||||
return AbortOptimization();
|
||||
}
|
||||
info()->SetCode(optimized_code);
|
||||
@ -780,7 +782,7 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
|
||||
if (!result.is_null()) {
|
||||
// Explicitly disable optimization for eval code. We're not yet prepared
|
||||
// to handle eval-code in the optimizing compiler.
|
||||
result->DisableOptimization("eval");
|
||||
result->DisableOptimization(kEval);
|
||||
|
||||
// If caller is strict mode, the result must be in strict mode or
|
||||
// extended mode as well, but not the other way around. Consider:
|
||||
@ -1055,13 +1057,13 @@ void Compiler::InstallOptimizedCode(OptimizingCompiler* optimizing_compiler) {
|
||||
// the unoptimized code.
|
||||
OptimizingCompiler::Status status = optimizing_compiler->last_status();
|
||||
if (info->HasAbortedDueToDependencyChange()) {
|
||||
info->set_bailout_reason("bailed out due to dependent map");
|
||||
info->set_bailout_reason(kBailedOutDueToDependentMap);
|
||||
status = optimizing_compiler->AbortOptimization();
|
||||
} else if (status != OptimizingCompiler::SUCCEEDED) {
|
||||
info->set_bailout_reason("failed/bailed out last time");
|
||||
info->set_bailout_reason(kFailedBailedOutLastTime);
|
||||
status = optimizing_compiler->AbortOptimization();
|
||||
} else if (isolate->DebuggerHasBreakPoints()) {
|
||||
info->set_bailout_reason("debugger is active");
|
||||
info->set_bailout_reason(kDebuggerIsActive);
|
||||
status = optimizing_compiler->AbortOptimization();
|
||||
} else {
|
||||
status = optimizing_compiler->GenerateAndInstallCode();
|
||||
|
@ -258,8 +258,8 @@ class CompilationInfo {
|
||||
SaveHandle(&script_);
|
||||
}
|
||||
|
||||
const char* bailout_reason() const { return bailout_reason_; }
|
||||
void set_bailout_reason(const char* reason) { bailout_reason_ = reason; }
|
||||
BailoutReason bailout_reason() const { return bailout_reason_; }
|
||||
void set_bailout_reason(BailoutReason reason) { bailout_reason_ = reason; }
|
||||
|
||||
int prologue_offset() const {
|
||||
ASSERT_NE(kPrologueOffsetNotSet, prologue_offset_);
|
||||
@ -412,7 +412,7 @@ class CompilationInfo {
|
||||
}
|
||||
}
|
||||
|
||||
const char* bailout_reason_;
|
||||
BailoutReason bailout_reason_;
|
||||
|
||||
int prologue_offset_;
|
||||
|
||||
|
131
src/hydrogen.cc
131
src/hydrogen.cc
@ -2578,7 +2578,7 @@ void ValueContext::ReturnValue(HValue* value) {
|
||||
// The value is tracked in the bailout environment, and communicated
|
||||
// through the environment as the result of the expression.
|
||||
if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
|
||||
owner()->Bailout("bad value context for arguments value");
|
||||
owner()->Bailout(kBadValueContextForArgumentsValue);
|
||||
}
|
||||
owner()->Push(value);
|
||||
}
|
||||
@ -2630,7 +2630,7 @@ void EffectContext::ReturnContinuation(HIfContinuation* continuation,
|
||||
void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
|
||||
ASSERT(!instr->IsControlInstruction());
|
||||
if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
|
||||
return owner()->Bailout("bad value context for arguments object value");
|
||||
return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
|
||||
}
|
||||
owner()->AddInstruction(instr);
|
||||
owner()->Push(instr);
|
||||
@ -2643,7 +2643,7 @@ void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
|
||||
void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
|
||||
ASSERT(!instr->HasObservableSideEffects());
|
||||
if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
|
||||
return owner()->Bailout("bad value context for arguments object value");
|
||||
return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
|
||||
}
|
||||
HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
|
||||
HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
|
||||
@ -2733,7 +2733,7 @@ void TestContext::BuildBranch(HValue* value) {
|
||||
// branch.
|
||||
HOptimizedGraphBuilder* builder = owner();
|
||||
if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
|
||||
builder->Bailout("arguments object value in a test context");
|
||||
builder->Bailout(kArgumentsObjectValueInATestContext);
|
||||
}
|
||||
if (value->IsConstant()) {
|
||||
HConstant* constant_value = HConstant::cast(value);
|
||||
@ -2779,7 +2779,7 @@ void TestContext::BuildBranch(HValue* value) {
|
||||
} while (false)
|
||||
|
||||
|
||||
void HOptimizedGraphBuilder::Bailout(const char* reason) {
|
||||
void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
|
||||
current_info()->set_bailout_reason(reason);
|
||||
SetStackOverflow();
|
||||
}
|
||||
@ -2838,16 +2838,16 @@ void HOptimizedGraphBuilder::VisitExpressions(
|
||||
|
||||
bool HOptimizedGraphBuilder::BuildGraph() {
|
||||
if (current_info()->function()->is_generator()) {
|
||||
Bailout("function is a generator");
|
||||
Bailout(kFunctionIsAGenerator);
|
||||
return false;
|
||||
}
|
||||
Scope* scope = current_info()->scope();
|
||||
if (scope->HasIllegalRedeclaration()) {
|
||||
Bailout("function with illegal redeclaration");
|
||||
Bailout(kFunctionWithIllegalRedeclaration);
|
||||
return false;
|
||||
}
|
||||
if (scope->calls_eval()) {
|
||||
Bailout("function calls eval");
|
||||
Bailout(kFunctionCallsEval);
|
||||
return false;
|
||||
}
|
||||
SetUpScope(scope);
|
||||
@ -2913,8 +2913,7 @@ bool HOptimizedGraphBuilder::BuildGraph() {
|
||||
}
|
||||
|
||||
|
||||
bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
|
||||
*bailout_reason = SmartArrayPointer<char>();
|
||||
bool HGraph::Optimize(BailoutReason* bailout_reason) {
|
||||
OrderBlocks();
|
||||
AssignDominators();
|
||||
|
||||
@ -2935,14 +2934,12 @@ bool HGraph::Optimize(SmartArrayPointer<char>* bailout_reason) {
|
||||
|
||||
Run<HPropagateDeoptimizingMarkPhase>();
|
||||
if (!CheckConstPhiUses()) {
|
||||
*bailout_reason = SmartArrayPointer<char>(StrDup(
|
||||
"Unsupported phi use of const variable"));
|
||||
*bailout_reason = kUnsupportedPhiUseOfConstVariable;
|
||||
return false;
|
||||
}
|
||||
Run<HRedundantPhiEliminationPhase>();
|
||||
if (!CheckArgumentsPhiUses()) {
|
||||
*bailout_reason = SmartArrayPointer<char>(StrDup(
|
||||
"Unsupported phi use of arguments"));
|
||||
*bailout_reason = kUnsupportedPhiUseOfArguments;
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -3124,7 +3121,7 @@ void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
|
||||
// not have declarations).
|
||||
if (scope->arguments() != NULL) {
|
||||
if (!scope->arguments()->IsStackAllocated()) {
|
||||
return Bailout("context-allocated arguments");
|
||||
return Bailout(kContextAllocatedArguments);
|
||||
}
|
||||
|
||||
environment()->Bind(scope->arguments(),
|
||||
@ -3145,7 +3142,7 @@ void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
if (stmt->scope() != NULL) {
|
||||
return Bailout("ScopedBlock");
|
||||
return Bailout(kScopedBlock);
|
||||
}
|
||||
BreakAndContinueInfo break_info(stmt);
|
||||
{ BreakAndContinueScope push(&break_info, this);
|
||||
@ -3357,7 +3354,7 @@ void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
return Bailout("WithStatement");
|
||||
return Bailout(kWithStatement);
|
||||
}
|
||||
|
||||
|
||||
@ -3372,12 +3369,12 @@ void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
|
||||
ZoneList<CaseClause*>* clauses = stmt->cases();
|
||||
int clause_count = clauses->length();
|
||||
if (clause_count > kCaseClauseLimit) {
|
||||
return Bailout("SwitchStatement: too many clauses");
|
||||
return Bailout(kSwitchStatementTooManyClauses);
|
||||
}
|
||||
|
||||
ASSERT(stmt->switch_type() != SwitchStatement::UNKNOWN_SWITCH);
|
||||
if (stmt->switch_type() == SwitchStatement::GENERIC_SWITCH) {
|
||||
return Bailout("SwitchStatement: mixed or non-literal switch labels");
|
||||
return Bailout(kSwitchStatementMixedOrNonLiteralSwitchLabels);
|
||||
}
|
||||
|
||||
HValue* context = environment()->context();
|
||||
@ -3669,16 +3666,16 @@ void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
|
||||
if (!FLAG_optimize_for_in) {
|
||||
return Bailout("ForInStatement optimization is disabled");
|
||||
return Bailout(kForInStatementOptimizationIsDisabled);
|
||||
}
|
||||
|
||||
if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
|
||||
return Bailout("ForInStatement is not fast case");
|
||||
return Bailout(kForInStatementIsNotFastCase);
|
||||
}
|
||||
|
||||
if (!stmt->each()->IsVariableProxy() ||
|
||||
!stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
|
||||
return Bailout("ForInStatement with non-local each variable");
|
||||
return Bailout(kForInStatementWithNonLocalEachVariable);
|
||||
}
|
||||
|
||||
Variable* each_var = stmt->each()->AsVariableProxy()->var();
|
||||
@ -3772,7 +3769,7 @@ void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
return Bailout("ForOfStatement");
|
||||
return Bailout(kForOfStatement);
|
||||
}
|
||||
|
||||
|
||||
@ -3780,7 +3777,7 @@ void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
return Bailout("TryCatchStatement");
|
||||
return Bailout(kTryCatchStatement);
|
||||
}
|
||||
|
||||
|
||||
@ -3789,7 +3786,7 @@ void HOptimizedGraphBuilder::VisitTryFinallyStatement(
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
return Bailout("TryFinallyStatement");
|
||||
return Bailout(kTryFinallyStatement);
|
||||
}
|
||||
|
||||
|
||||
@ -3797,7 +3794,7 @@ void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
return Bailout("DebuggerStatement");
|
||||
return Bailout(kDebuggerStatement);
|
||||
}
|
||||
|
||||
|
||||
@ -3843,7 +3840,7 @@ void HOptimizedGraphBuilder::VisitSharedFunctionInfoLiteral(
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
return Bailout("SharedFunctionInfoLiteral");
|
||||
return Bailout(kSharedFunctionInfoLiteral);
|
||||
}
|
||||
|
||||
|
||||
@ -3923,7 +3920,7 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
|
||||
case Variable::UNALLOCATED: {
|
||||
if (IsLexicalVariableMode(variable->mode())) {
|
||||
// TODO(rossberg): should this be an ASSERT?
|
||||
return Bailout("reference to global lexical variable");
|
||||
return Bailout(kReferenceToGlobalLexicalVariable);
|
||||
}
|
||||
// Handle known global constants like 'undefined' specially to avoid a
|
||||
// load from a global cell for them.
|
||||
@ -3980,7 +3977,7 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
|
||||
if (value == graph()->GetConstantHole()) {
|
||||
ASSERT(IsDeclaredVariableMode(variable->mode()) &&
|
||||
variable->mode() != VAR);
|
||||
return Bailout("reference to uninitialized variable");
|
||||
return Bailout(kReferenceToUninitializedVariable);
|
||||
}
|
||||
return ast_context()->ReturnValue(value);
|
||||
}
|
||||
@ -3992,7 +3989,7 @@ void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
|
||||
}
|
||||
|
||||
case Variable::LOOKUP:
|
||||
return Bailout("reference to a variable which requires dynamic lookup");
|
||||
return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
|
||||
}
|
||||
}
|
||||
|
||||
@ -4291,7 +4288,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
|
||||
case ObjectLiteral::Property::PROTOTYPE:
|
||||
case ObjectLiteral::Property::SETTER:
|
||||
case ObjectLiteral::Property::GETTER:
|
||||
return Bailout("Object literal with complex property");
|
||||
return Bailout(kObjectLiteralWithComplexProperty);
|
||||
default: UNREACHABLE();
|
||||
}
|
||||
}
|
||||
@ -4330,7 +4327,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
||||
raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
|
||||
isolate(), literals, expr->constant_elements());
|
||||
if (raw_boilerplate.is_null()) {
|
||||
return Bailout("array boilerplate creation failed");
|
||||
return Bailout(kArrayBoilerplateCreationFailed);
|
||||
}
|
||||
|
||||
site = isolate()->factory()->NewAllocationSite();
|
||||
@ -4421,7 +4418,7 @@ void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
|
||||
|
||||
CHECK_ALIVE(VisitForValue(subexpr));
|
||||
HValue* value = Pop();
|
||||
if (!Smi::IsValid(i)) return Bailout("Non-smi key in array literal");
|
||||
if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
|
||||
|
||||
elements = AddLoadElements(literal);
|
||||
|
||||
@ -4501,7 +4498,7 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
|
||||
if (proto_result.IsProperty()) {
|
||||
// If the inherited property could induce readonly-ness, bail out.
|
||||
if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) {
|
||||
Bailout("improper object on prototype chain for store");
|
||||
Bailout(kImproperObjectOnPrototypeChainForStore);
|
||||
return NULL;
|
||||
}
|
||||
// We only need to check up to the preexisting property.
|
||||
@ -5005,7 +5002,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
|
||||
if (proxy != NULL) {
|
||||
Variable* var = proxy->var();
|
||||
if (var->mode() == LET) {
|
||||
return Bailout("unsupported let compound assignment");
|
||||
return Bailout(kUnsupportedLetCompoundAssignment);
|
||||
}
|
||||
|
||||
CHECK_ALIVE(VisitForValue(operation));
|
||||
@ -5021,7 +5018,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
|
||||
case Variable::PARAMETER:
|
||||
case Variable::LOCAL:
|
||||
if (var->mode() == CONST) {
|
||||
return Bailout("unsupported const compound assignment");
|
||||
return Bailout(kUnsupportedConstCompoundAssignment);
|
||||
}
|
||||
BindIfLive(var, Top());
|
||||
break;
|
||||
@ -5037,8 +5034,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
|
||||
int count = current_info()->scope()->num_parameters();
|
||||
for (int i = 0; i < count; ++i) {
|
||||
if (var == current_info()->scope()->parameter(i)) {
|
||||
Bailout(
|
||||
"assignment to parameter, function uses arguments object");
|
||||
Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5069,7 +5065,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
|
||||
}
|
||||
|
||||
case Variable::LOOKUP:
|
||||
return Bailout("compound assignment to lookup slot");
|
||||
return Bailout(kCompoundAssignmentToLookupSlot);
|
||||
}
|
||||
return ast_context()->ReturnValue(Pop());
|
||||
|
||||
@ -5158,7 +5154,7 @@ void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
|
||||
}
|
||||
|
||||
} else {
|
||||
return Bailout("invalid lhs in compound assignment");
|
||||
return Bailout(kInvalidLhsInCompoundAssignment);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5195,11 +5191,11 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
|
||||
}
|
||||
} else if (var->mode() == CONST_HARMONY) {
|
||||
if (expr->op() != Token::INIT_CONST_HARMONY) {
|
||||
return Bailout("non-initializer assignment to const");
|
||||
return Bailout(kNonInitializerAssignmentToConst);
|
||||
}
|
||||
}
|
||||
|
||||
if (proxy->IsArguments()) return Bailout("assignment to arguments");
|
||||
if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
|
||||
|
||||
// Handle the assignment.
|
||||
switch (var->location()) {
|
||||
@ -5218,7 +5214,7 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
|
||||
if (var->mode() == LET && expr->op() == Token::ASSIGN) {
|
||||
HValue* env_value = environment()->Lookup(var);
|
||||
if (env_value == graph()->GetConstantHole()) {
|
||||
return Bailout("assignment to let variable before initialization");
|
||||
return Bailout(kAssignmentToLetVariableBeforeInitialization);
|
||||
}
|
||||
}
|
||||
// We do not allow the arguments object to occur in a context where it
|
||||
@ -5240,7 +5236,7 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
|
||||
int count = current_info()->scope()->num_parameters();
|
||||
for (int i = 0; i < count; ++i) {
|
||||
if (var == current_info()->scope()->parameter(i)) {
|
||||
return Bailout("assignment to parameter in arguments object");
|
||||
return Bailout(kAssignmentToParameterInArgumentsObject);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5281,10 +5277,10 @@ void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
|
||||
}
|
||||
|
||||
case Variable::LOOKUP:
|
||||
return Bailout("assignment to LOOKUP variable");
|
||||
return Bailout(kAssignmentToLOOKUPVariable);
|
||||
}
|
||||
} else {
|
||||
return Bailout("invalid left-hand side in assignment");
|
||||
return Bailout(kInvalidLeftHandSideInAssignment);
|
||||
}
|
||||
}
|
||||
|
||||
@ -6276,7 +6272,7 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
|
||||
if (target_info.isolate()->has_pending_exception()) {
|
||||
// Parse or scope error, never optimize this function.
|
||||
SetStackOverflow();
|
||||
target_shared->DisableOptimization("parse/scope error");
|
||||
target_shared->DisableOptimization(kParseScopeError);
|
||||
}
|
||||
TraceInline(target, caller, "parse failure");
|
||||
return false;
|
||||
@ -6415,7 +6411,7 @@ bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
|
||||
// Bail out if the inline function did, as we cannot residualize a call
|
||||
// instead.
|
||||
TraceInline(target, caller, "inline graph construction failed");
|
||||
target_shared->DisableOptimization("inlining bailed out");
|
||||
target_shared->DisableOptimization(kInliningBailedOut);
|
||||
inline_bailout_ = true;
|
||||
delete target_state;
|
||||
return true;
|
||||
@ -6960,7 +6956,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
||||
} else {
|
||||
VariableProxy* proxy = expr->expression()->AsVariableProxy();
|
||||
if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
|
||||
return Bailout("possible direct call to eval");
|
||||
return Bailout(kPossibleDirectCallToEval);
|
||||
}
|
||||
|
||||
bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
|
||||
@ -7228,7 +7224,7 @@ void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
|
||||
ASSERT(current_block() != NULL);
|
||||
ASSERT(current_block()->HasPredecessor());
|
||||
if (expr->is_jsruntime()) {
|
||||
return Bailout("call to a JavaScript runtime function");
|
||||
return Bailout(kCallToAJavaScriptRuntimeFunction);
|
||||
}
|
||||
|
||||
const Runtime::Function* function = expr->function();
|
||||
@ -7295,7 +7291,7 @@ void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
|
||||
} else if (proxy != NULL) {
|
||||
Variable* var = proxy->var();
|
||||
if (var->IsUnallocated()) {
|
||||
Bailout("delete with global variable");
|
||||
Bailout(kDeleteWithGlobalVariable);
|
||||
} else if (var->IsStackAllocated() || var->IsContextSlot()) {
|
||||
// Result of deleting non-global variables is false. 'this' is not
|
||||
// really a variable, though we implement it as one. The
|
||||
@ -7305,7 +7301,7 @@ void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
|
||||
: graph()->GetConstantFalse();
|
||||
return ast_context()->ReturnValue(value);
|
||||
} else {
|
||||
Bailout("delete with non-global variable");
|
||||
Bailout(kDeleteWithNonGlobalVariable);
|
||||
}
|
||||
} else {
|
||||
// Result of deleting non-property, non-variable reference is true.
|
||||
@ -7437,7 +7433,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
|
||||
VariableProxy* proxy = target->AsVariableProxy();
|
||||
Property* prop = target->AsProperty();
|
||||
if (proxy == NULL && prop == NULL) {
|
||||
return Bailout("invalid lhs in count operation");
|
||||
return Bailout(kInvalidLhsInCountOperation);
|
||||
}
|
||||
|
||||
// Match the full code generator stack by simulating an extra stack
|
||||
@ -7451,7 +7447,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
|
||||
if (proxy != NULL) {
|
||||
Variable* var = proxy->var();
|
||||
if (var->mode() == CONST) {
|
||||
return Bailout("unsupported count operation with const");
|
||||
return Bailout(kUnsupportedCountOperationWithConst);
|
||||
}
|
||||
// Argument of the count operation is a variable, not a property.
|
||||
ASSERT(prop == NULL);
|
||||
@ -7485,7 +7481,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
|
||||
int count = current_info()->scope()->num_parameters();
|
||||
for (int i = 0; i < count; ++i) {
|
||||
if (var == current_info()->scope()->parameter(i)) {
|
||||
return Bailout("assignment to parameter in arguments object");
|
||||
return Bailout(kAssignmentToParameterInArgumentsObject);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -7502,7 +7498,7 @@ void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
|
||||
}
|
||||
|
||||
case Variable::LOOKUP:
|
||||
return Bailout("lookup variable in count operation");
|
||||
return Bailout(kLookupVariableInCountOperation);
|
||||
}
|
||||
|
||||
} else {
|
||||
@ -8091,7 +8087,7 @@ void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
|
||||
}
|
||||
}
|
||||
default:
|
||||
return Bailout("Unsupported non-primitive compare");
|
||||
return Bailout(kUnsupportedNonPrimitiveCompare);
|
||||
}
|
||||
} else if (combined_type->Is(Type::InternalizedString()) &&
|
||||
Token::IsEqualityOp(op)) {
|
||||
@ -8558,7 +8554,7 @@ void HOptimizedGraphBuilder::VisitVariableDeclaration(
|
||||
}
|
||||
break;
|
||||
case Variable::LOOKUP:
|
||||
return Bailout("unsupported lookup slot in declaration");
|
||||
return Bailout(kUnsupportedLookupSlotInDeclaration);
|
||||
}
|
||||
}
|
||||
|
||||
@ -8596,7 +8592,7 @@ void HOptimizedGraphBuilder::VisitFunctionDeclaration(
|
||||
break;
|
||||
}
|
||||
case Variable::LOOKUP:
|
||||
return Bailout("unsupported lookup slot in declaration");
|
||||
return Bailout(kUnsupportedLookupSlotInDeclaration);
|
||||
}
|
||||
}
|
||||
|
||||
@ -8717,7 +8713,7 @@ void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
|
||||
|
||||
|
||||
void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
|
||||
return Bailout("inlined runtime function: IsNonNegativeSmi");
|
||||
return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
|
||||
}
|
||||
|
||||
|
||||
@ -8733,8 +8729,7 @@ void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
|
||||
|
||||
void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
|
||||
CallRuntime* call) {
|
||||
return Bailout(
|
||||
"inlined runtime function: IsStringWrapperSafeForDefaultValueOf");
|
||||
return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
|
||||
}
|
||||
|
||||
|
||||
@ -8788,7 +8783,7 @@ void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
|
||||
void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
|
||||
// The special form detected by IsClassOfTest is detected before we get here
|
||||
// and does not cause a bailout.
|
||||
return Bailout("inlined runtime function: ClassOf");
|
||||
return Bailout(kInlinedRuntimeFunctionClassOf);
|
||||
}
|
||||
|
||||
|
||||
@ -9005,7 +9000,7 @@ void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
|
||||
|
||||
// Support for fast native caches.
|
||||
void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
|
||||
return Bailout("inlined runtime function: GetFromCache");
|
||||
return Bailout(kInlinedRuntimeFunctionGetFromCache);
|
||||
}
|
||||
|
||||
|
||||
@ -9135,7 +9130,7 @@ void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
|
||||
|
||||
// Check whether two RegExps are equivalent
|
||||
void HOptimizedGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
|
||||
return Bailout("inlined runtime function: IsRegExpEquivalent");
|
||||
return Bailout(kInlinedRuntimeFunctionIsRegExpEquivalent);
|
||||
}
|
||||
|
||||
|
||||
@ -9149,18 +9144,18 @@ void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
|
||||
|
||||
|
||||
void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
|
||||
return Bailout("inlined runtime function: FastAsciiArrayJoin");
|
||||
return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
|
||||
}
|
||||
|
||||
|
||||
// Support for generators.
|
||||
void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
|
||||
return Bailout("inlined runtime function: GeneratorNext");
|
||||
return Bailout(kInlinedRuntimeFunctionGeneratorNext);
|
||||
}
|
||||
|
||||
|
||||
void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
|
||||
return Bailout("inlined runtime function: GeneratorThrow");
|
||||
return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
|
||||
}
|
||||
|
||||
|
||||
|
@ -367,7 +367,7 @@ class HGraph: public ZoneObject {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
bool Optimize(SmartArrayPointer<char>* bailout_reason);
|
||||
bool Optimize(BailoutReason* bailout_reason);
|
||||
|
||||
#ifdef DEBUG
|
||||
void Verify(bool do_full_verify) const;
|
||||
@ -1726,7 +1726,7 @@ class HOptimizedGraphBuilder: public HGraphBuilder, public AstVisitor {
|
||||
|
||||
HValue* context() { return environment()->context(); }
|
||||
|
||||
void Bailout(const char* reason);
|
||||
void Bailout(BailoutReason reason);
|
||||
|
||||
HBasicBlock* CreateJoin(HBasicBlock* first,
|
||||
HBasicBlock* second,
|
||||
|
@ -241,7 +241,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
if (FLAG_debug_code) {
|
||||
__ cmp(esi, edi);
|
||||
__ Assert(less_equal,
|
||||
"Unexpected number of pre-allocated property fields.");
|
||||
kUnexpectedNumberOfPreAllocatedPropertyFields);
|
||||
}
|
||||
__ InitializeFieldsWithFiller(ecx, esi, edx);
|
||||
__ mov(edx, factory->one_pointer_filler_map());
|
||||
@ -272,7 +272,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
__ sub(edx, ecx);
|
||||
// Done if no extra properties are to be allocated.
|
||||
__ j(zero, &allocated);
|
||||
__ Assert(positive, "Property allocation count failed.");
|
||||
__ Assert(positive, kPropertyAllocationCountFailed);
|
||||
|
||||
// Scale the number of elements by pointer size and add the header for
|
||||
// FixedArrays to the start of the next object calculation from above.
|
||||
@ -654,7 +654,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
|
||||
__ ret(2 * kPointerSize); // Remove state, eax.
|
||||
|
||||
__ bind(¬_tos_eax);
|
||||
__ Abort("no cases left");
|
||||
__ Abort(kNoCasesLeft);
|
||||
}
|
||||
|
||||
|
||||
@ -1033,9 +1033,9 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
|
||||
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
// Will both indicate a NULL and a Smi.
|
||||
__ test(ebx, Immediate(kSmiTagMask));
|
||||
__ Assert(not_zero, "Unexpected initial map for InternalArray function");
|
||||
__ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
|
||||
__ CmpObjectType(ebx, MAP_TYPE, ecx);
|
||||
__ Assert(equal, "Unexpected initial map for InternalArray function");
|
||||
__ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
|
||||
}
|
||||
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
@ -1062,9 +1062,9 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
|
||||
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
// Will both indicate a NULL and a Smi.
|
||||
__ test(ebx, Immediate(kSmiTagMask));
|
||||
__ Assert(not_zero, "Unexpected initial map for Array function");
|
||||
__ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CmpObjectType(ebx, MAP_TYPE, ecx);
|
||||
__ Assert(equal, "Unexpected initial map for Array function");
|
||||
__ Assert(equal, kUnexpectedInitialMapForArrayFunction);
|
||||
}
|
||||
|
||||
// Run the native code for the Array function called as a normal function.
|
||||
@ -1092,7 +1092,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
|
||||
__ cmp(edi, ecx);
|
||||
__ Assert(equal, "Unexpected String function");
|
||||
__ Assert(equal, kUnexpectedStringFunction);
|
||||
}
|
||||
|
||||
// Load the first argument into eax and get rid of the rest
|
||||
@ -1137,9 +1137,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ cmpb(FieldOperand(ecx, Map::kInstanceSizeOffset),
|
||||
JSValue::kSize >> kPointerSizeLog2);
|
||||
__ Assert(equal, "Unexpected string wrapper instance size");
|
||||
__ Assert(equal, kUnexpectedStringWrapperInstanceSize);
|
||||
__ cmpb(FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset), 0);
|
||||
__ Assert(equal, "Unexpected unused properties of string wrapper");
|
||||
__ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
|
||||
}
|
||||
__ mov(FieldOperand(eax, HeapObject::kMapOffset), ecx);
|
||||
|
||||
|
@ -511,9 +511,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
||||
Label after_sentinel;
|
||||
__ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
|
||||
if (FLAG_debug_code) {
|
||||
const char* message = "Expected 0 as a Smi sentinel";
|
||||
__ cmp(ecx, 0);
|
||||
__ Assert(equal, message);
|
||||
__ Assert(equal, kExpected0AsASmiSentinel);
|
||||
}
|
||||
__ mov(ecx, GlobalObjectOperand());
|
||||
__ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
|
||||
@ -3469,9 +3468,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
__ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
|
||||
if (FLAG_debug_code) {
|
||||
__ test(ecx, Immediate(kSmiTagMask));
|
||||
__ Check(not_zero, "Unexpected type for RegExp data, FixedArray expected");
|
||||
__ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
|
||||
__ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
|
||||
__ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
|
||||
__ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
|
||||
}
|
||||
|
||||
// ecx: RegExp data (FixedArray)
|
||||
@ -3831,7 +3830,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
// Assert that we do not have a cons or slice (indirect strings) here.
|
||||
// Sequential strings have already been ruled out.
|
||||
__ test_b(ebx, kIsIndirectStringMask);
|
||||
__ Assert(zero, "external string expected, but not found");
|
||||
__ Assert(zero, kExternalStringExpectedButNotFound);
|
||||
}
|
||||
__ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
|
||||
// Move the pointer so that offset-wise, it looks like a sequential string.
|
||||
@ -4326,7 +4325,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
|
||||
edi);
|
||||
}
|
||||
#ifdef DEBUG
|
||||
__ Abort("Unexpected fall-through from string comparison");
|
||||
__ Abort(kUnexpectedFallThroughFromStringComparison);
|
||||
#endif
|
||||
|
||||
__ bind(&check_unequal_objects);
|
||||
@ -5085,9 +5084,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
__ sub(scratch, Operand(esp, 1 * kPointerSize));
|
||||
if (FLAG_debug_code) {
|
||||
__ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
|
||||
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
|
||||
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
|
||||
__ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
|
||||
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
|
||||
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
|
||||
}
|
||||
__ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
|
||||
__ mov(Operand(scratch, 0), map);
|
||||
@ -5120,7 +5119,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
__ sub(scratch, Operand(esp, 1 * kPointerSize));
|
||||
if (FLAG_debug_code) {
|
||||
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
|
||||
__ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
|
||||
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
|
||||
}
|
||||
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
|
||||
if (!ReturnTrueFalseObject()) {
|
||||
@ -5142,7 +5141,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
__ sub(scratch, Operand(esp, 1 * kPointerSize));
|
||||
if (FLAG_debug_code) {
|
||||
__ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
|
||||
__ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
|
||||
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
|
||||
}
|
||||
__ mov(Operand(scratch, kDeltaToMovImmediate), eax);
|
||||
if (!ReturnTrueFalseObject()) {
|
||||
@ -5255,7 +5254,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
void StringCharCodeAtGenerator::GenerateSlow(
|
||||
MacroAssembler* masm,
|
||||
const RuntimeCallHelper& call_helper) {
|
||||
__ Abort("Unexpected fallthrough to CharCodeAt slow case");
|
||||
__ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
|
||||
|
||||
// Index is not a smi.
|
||||
__ bind(&index_not_smi_);
|
||||
@ -5305,7 +5304,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
|
||||
call_helper.AfterCall(masm);
|
||||
__ jmp(&exit_);
|
||||
|
||||
__ Abort("Unexpected fallthrough from CharCodeAt slow case");
|
||||
__ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
|
||||
}
|
||||
|
||||
|
||||
@ -5340,7 +5339,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
void StringCharFromCodeGenerator::GenerateSlow(
|
||||
MacroAssembler* masm,
|
||||
const RuntimeCallHelper& call_helper) {
|
||||
__ Abort("Unexpected fallthrough to CharFromCode slow case");
|
||||
__ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
|
||||
|
||||
__ bind(&slow_case_);
|
||||
call_helper.BeforeCall(masm);
|
||||
@ -5352,7 +5351,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
|
||||
call_helper.AfterCall(masm);
|
||||
__ jmp(&exit_);
|
||||
|
||||
__ Abort("Unexpected fallthrough from CharFromCode slow case");
|
||||
__ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
|
||||
}
|
||||
|
||||
|
||||
@ -7482,7 +7481,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// If we reached this point there is a problem.
|
||||
__ Abort("Unexpected ElementsKind in array constructor");
|
||||
__ Abort(kUnexpectedElementsKindInArrayConstructor);
|
||||
}
|
||||
|
||||
|
||||
@ -7545,7 +7544,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// If we reached this point there is a problem.
|
||||
__ Abort("Unexpected ElementsKind in array constructor");
|
||||
__ Abort(kUnexpectedElementsKindInArrayConstructor);
|
||||
}
|
||||
|
||||
|
||||
@ -7610,9 +7609,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
// Will both indicate a NULL and a Smi.
|
||||
__ test(ecx, Immediate(kSmiTagMask));
|
||||
__ Assert(not_zero, "Unexpected initial map for Array function");
|
||||
__ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CmpObjectType(ecx, MAP_TYPE, ecx);
|
||||
__ Assert(equal, "Unexpected initial map for Array function");
|
||||
__ Assert(equal, kUnexpectedInitialMapForArrayFunction);
|
||||
|
||||
// We should either have undefined in ebx or a valid cell
|
||||
Label okay_here;
|
||||
@ -7620,7 +7619,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ cmp(ebx, Immediate(undefined_sentinel));
|
||||
__ j(equal, &okay_here);
|
||||
__ cmp(FieldOperand(ebx, 0), Immediate(cell_map));
|
||||
__ Assert(equal, "Expected property cell in register ebx");
|
||||
__ Assert(equal, kExpectedPropertyCellInRegisterEbx);
|
||||
__ bind(&okay_here);
|
||||
}
|
||||
|
||||
@ -7724,9 +7723,9 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
// Will both indicate a NULL and a Smi.
|
||||
__ test(ecx, Immediate(kSmiTagMask));
|
||||
__ Assert(not_zero, "Unexpected initial map for Array function");
|
||||
__ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CmpObjectType(ecx, MAP_TYPE, ecx);
|
||||
__ Assert(equal, "Unexpected initial map for Array function");
|
||||
__ Assert(equal, kUnexpectedInitialMapForArrayFunction);
|
||||
}
|
||||
|
||||
// Figure out the right elements kind
|
||||
@ -7745,7 +7744,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ j(equal, &done);
|
||||
__ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
|
||||
__ Assert(equal,
|
||||
"Invalid ElementsKind for InternalArray or InternalPackedArray");
|
||||
kInvalidElementsKindForInternalArrayOrInternalPackedArray);
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
|
@ -779,7 +779,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
__ cmp(ebx, masm->isolate()->factory()->the_hole_value());
|
||||
__ Assert(equal, "object found in smi-only array");
|
||||
__ Assert(equal, kObjectFoundInSmiOnlyArray);
|
||||
}
|
||||
|
||||
if (CpuFeatures::IsSupported(SSE2)) {
|
||||
@ -1011,7 +1011,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
|
||||
// Assert that we do not have a cons or slice (indirect strings) here.
|
||||
// Sequential strings have already been ruled out.
|
||||
__ test(result, Immediate(kIsIndirectStringMask));
|
||||
__ Assert(zero, "external string expected, but not found");
|
||||
__ Assert(zero, kExternalStringExpectedButNotFound);
|
||||
}
|
||||
// Rule out short external strings.
|
||||
STATIC_CHECK(kShortExternalStringTag != 0);
|
||||
|
@ -128,7 +128,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
if (FLAG_debug_code) {
|
||||
__ test(reg, Immediate(0xc0000000));
|
||||
__ Assert(zero, "Unable to encode value as smi");
|
||||
__ Assert(zero, kUnableToEncodeValueAsSmi);
|
||||
}
|
||||
__ SmiTag(reg);
|
||||
__ push(reg);
|
||||
|
@ -625,7 +625,7 @@ void Deoptimizer::EntryGenerator::Generate() {
|
||||
__ pop(ecx);
|
||||
if (FLAG_debug_code) {
|
||||
__ cmp(ecx, Immediate(kAlignmentZapValue));
|
||||
__ Assert(equal, "alignment marker expected");
|
||||
__ Assert(equal, kAlignmentMarkerExpected);
|
||||
}
|
||||
__ bind(&no_padding);
|
||||
} else {
|
||||
|
@ -745,9 +745,9 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
||||
// Check that we're not inside a with or catch context.
|
||||
__ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
|
||||
__ cmp(ebx, isolate()->factory()->with_context_map());
|
||||
__ Check(not_equal, "Declaration in with context.");
|
||||
__ Check(not_equal, kDeclarationInWithContext);
|
||||
__ cmp(ebx, isolate()->factory()->catch_context_map());
|
||||
__ Check(not_equal, "Declaration in catch context.");
|
||||
__ Check(not_equal, kDeclarationInCatchContext);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2169,7 +2169,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
|
||||
__ Push(Smi::FromInt(resume_mode));
|
||||
__ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
|
||||
// Not reached: the runtime call returns elsewhere.
|
||||
__ Abort("Generator failed to resume.");
|
||||
__ Abort(kGeneratorFailedToResume);
|
||||
|
||||
// Throw error if we attempt to operate on a running generator.
|
||||
__ bind(&wrong_state);
|
||||
@ -2468,7 +2468,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
||||
// Check for an uninitialized let binding.
|
||||
__ mov(edx, location);
|
||||
__ cmp(edx, isolate()->factory()->the_hole_value());
|
||||
__ Check(equal, "Let binding re-initialization.");
|
||||
__ Check(equal, kLetBindingReInitialization);
|
||||
}
|
||||
// Perform the assignment.
|
||||
__ mov(location, eax);
|
||||
@ -3430,15 +3430,15 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
|
||||
Register value,
|
||||
uint32_t encoding_mask) {
|
||||
__ test(index, Immediate(kSmiTagMask));
|
||||
__ Check(zero, "Non-smi index");
|
||||
__ Check(zero, kNonSmiIndex);
|
||||
__ test(value, Immediate(kSmiTagMask));
|
||||
__ Check(zero, "Non-smi value");
|
||||
__ Check(zero, kNonSmiValue);
|
||||
|
||||
__ cmp(index, FieldOperand(string, String::kLengthOffset));
|
||||
__ Check(less, "Index is too large");
|
||||
__ Check(less, kIndexIsTooLarge);
|
||||
|
||||
__ cmp(index, Immediate(Smi::FromInt(0)));
|
||||
__ Check(greater_equal, "Index is negative");
|
||||
__ Check(greater_equal, kIndexIsNegative);
|
||||
|
||||
__ push(value);
|
||||
__ mov(value, FieldOperand(string, HeapObject::kMapOffset));
|
||||
@ -3446,7 +3446,7 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
|
||||
|
||||
__ and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
|
||||
__ cmp(value, Immediate(encoding_mask));
|
||||
__ Check(equal, "Unexpected string type");
|
||||
__ Check(equal, kUnexpectedStringType);
|
||||
__ pop(value);
|
||||
}
|
||||
|
||||
@ -3818,7 +3818,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
isolate()->native_context()->jsfunction_result_caches());
|
||||
if (jsfunction_result_caches->length() <= cache_id) {
|
||||
__ Abort("Attempt to use undefined cache.");
|
||||
__ Abort(kAttemptToUseUndefinedCache);
|
||||
__ mov(eax, isolate()->factory()->undefined_value());
|
||||
context()->Plug(eax);
|
||||
return;
|
||||
@ -4000,7 +4000,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// scratch, string_length, elements.
|
||||
if (generate_debug_code_) {
|
||||
__ cmp(index, array_length);
|
||||
__ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
|
||||
__ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
|
||||
}
|
||||
__ bind(&loop);
|
||||
__ mov(string, FieldOperand(elements,
|
||||
|
@ -483,7 +483,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
|
||||
// based on 32 bits of the map pointer and the string hash.
|
||||
if (FLAG_debug_code) {
|
||||
__ cmp(eax, FieldOperand(edx, HeapObject::kMapOffset));
|
||||
__ Check(equal, "Map is no longer in eax.");
|
||||
__ Check(equal, kMapIsNoLongerInEax);
|
||||
}
|
||||
__ mov(ebx, eax); // Keep the map around for later.
|
||||
__ shr(eax, KeyedLookupCache::kMapHashShift);
|
||||
|
@ -113,7 +113,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::Abort(const char* reason) {
|
||||
void LCodeGen::Abort(BailoutReason reason) {
|
||||
info()->set_bailout_reason(reason);
|
||||
status_ = ABORTED;
|
||||
}
|
||||
@ -220,7 +220,7 @@ bool LCodeGen::GeneratePrologue() {
|
||||
dynamic_frame_alignment_ &&
|
||||
FLAG_debug_code) {
|
||||
__ test(esp, Immediate(kPointerSize));
|
||||
__ Assert(zero, "frame is expected to be aligned");
|
||||
__ Assert(zero, kFrameIsExpectedToBeAligned);
|
||||
}
|
||||
|
||||
// Reserve space for the stack slots needed by the code.
|
||||
@ -948,7 +948,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
Address entry =
|
||||
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
|
||||
if (entry == NULL) {
|
||||
Abort("bailout was not prepared");
|
||||
Abort(kBailoutWasNotPrepared);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1976,7 +1976,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
|
||||
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
|
||||
__ cmp(value, Immediate(encoding == String::ONE_BYTE_ENCODING
|
||||
? one_byte_seq_type : two_byte_seq_type));
|
||||
__ Check(equal, "Unexpected string type");
|
||||
__ Check(equal, kUnexpectedStringType);
|
||||
__ pop(value);
|
||||
}
|
||||
|
||||
@ -2863,7 +2863,7 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) {
|
||||
__ cmp(Operand(esp,
|
||||
(parameter_count + extra_value_count) * kPointerSize),
|
||||
Immediate(kAlignmentZapValue));
|
||||
__ Assert(equal, "expected alignment marker");
|
||||
__ Assert(equal, kExpectedAlignmentMarker);
|
||||
}
|
||||
__ Ret((parameter_count + extra_value_count) * kPointerSize, ecx);
|
||||
} else {
|
||||
@ -2876,7 +2876,7 @@ void LCodeGen::EmitReturn(LReturn* instr, bool dynamic_frame_alignment) {
|
||||
__ cmp(Operand(esp, reg, times_pointer_size,
|
||||
extra_value_count * kPointerSize),
|
||||
Immediate(kAlignmentZapValue));
|
||||
__ Assert(equal, "expected alignment marker");
|
||||
__ Assert(equal, kExpectedAlignmentMarker);
|
||||
}
|
||||
|
||||
// emit code to restore stack based on instr->parameter_count()
|
||||
@ -3447,7 +3447,7 @@ Operand LCodeGen::BuildFastArrayOperand(
|
||||
if (key->IsConstantOperand()) {
|
||||
int constant_value = ToInteger32(LConstantOperand::cast(key));
|
||||
if (constant_value & 0xF0000000) {
|
||||
Abort("array index constant value too big");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
return Operand(elements_pointer_reg,
|
||||
((constant_value + additional_index) << shift_size)
|
||||
|
@ -212,7 +212,7 @@ class LCodeGen BASE_EMBEDDED {
|
||||
|
||||
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
|
||||
|
||||
void Abort(const char* reason);
|
||||
void Abort(BailoutReason reason);
|
||||
void FPRINTF_CHECKING Comment(const char* format, ...);
|
||||
|
||||
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
|
||||
|
@ -487,7 +487,7 @@ LPlatformChunk* LChunkBuilder::Build() {
|
||||
}
|
||||
|
||||
|
||||
void LChunkBuilder::Abort(const char* reason) {
|
||||
void LChunkBuilder::Abort(BailoutReason reason) {
|
||||
info()->set_bailout_reason(reason);
|
||||
status_ = ABORTED;
|
||||
}
|
||||
@ -698,7 +698,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
|
||||
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
|
||||
int vreg = allocator_->GetVirtualRegister();
|
||||
if (!allocator_->AllocationOk()) {
|
||||
Abort("Out of virtual registers while trying to allocate temp register.");
|
||||
Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
|
||||
vreg = 0;
|
||||
}
|
||||
operand->set_virtual_register(vreg);
|
||||
@ -2567,7 +2567,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
|
||||
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
|
||||
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
|
||||
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
|
||||
Abort("Too many spill slots needed for OSR");
|
||||
Abort(kTooManySpillSlotsNeededForOSR);
|
||||
spill_index = 0;
|
||||
}
|
||||
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
|
||||
|
@ -2800,7 +2800,7 @@ class LChunkBuilder BASE_EMBEDDED {
|
||||
bool is_done() const { return status_ == DONE; }
|
||||
bool is_aborted() const { return status_ == ABORTED; }
|
||||
|
||||
void Abort(const char* reason);
|
||||
void Abort(BailoutReason reason);
|
||||
|
||||
// Methods for getting operands for Use / Define / Temp.
|
||||
LUnallocated* ToUnallocated(Register reg);
|
||||
|
@ -678,7 +678,7 @@ void MacroAssembler::AssertNumber(Register object) {
|
||||
JumpIfSmi(object, &ok);
|
||||
cmp(FieldOperand(object, HeapObject::kMapOffset),
|
||||
isolate()->factory()->heap_number_map());
|
||||
Check(equal, "Operand not a number");
|
||||
Check(equal, kOperandNotANumber);
|
||||
bind(&ok);
|
||||
}
|
||||
}
|
||||
@ -687,7 +687,7 @@ void MacroAssembler::AssertNumber(Register object) {
|
||||
void MacroAssembler::AssertSmi(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
test(object, Immediate(kSmiTagMask));
|
||||
Check(equal, "Operand is not a smi");
|
||||
Check(equal, kOperandIsNotASmi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -695,12 +695,12 @@ void MacroAssembler::AssertSmi(Register object) {
|
||||
void MacroAssembler::AssertString(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
test(object, Immediate(kSmiTagMask));
|
||||
Check(not_equal, "Operand is a smi and not a string");
|
||||
Check(not_equal, kOperandIsASmiAndNotAString);
|
||||
push(object);
|
||||
mov(object, FieldOperand(object, HeapObject::kMapOffset));
|
||||
CmpInstanceType(object, FIRST_NONSTRING_TYPE);
|
||||
pop(object);
|
||||
Check(below, "Operand is not a string");
|
||||
Check(below, kOperandIsNotAString);
|
||||
}
|
||||
}
|
||||
|
||||
@ -708,12 +708,12 @@ void MacroAssembler::AssertString(Register object) {
|
||||
void MacroAssembler::AssertName(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
test(object, Immediate(kSmiTagMask));
|
||||
Check(not_equal, "Operand is a smi and not a name");
|
||||
Check(not_equal, kOperandIsASmiAndNotAName);
|
||||
push(object);
|
||||
mov(object, FieldOperand(object, HeapObject::kMapOffset));
|
||||
CmpInstanceType(object, LAST_NAME_TYPE);
|
||||
pop(object);
|
||||
Check(below_equal, "Operand is not a name");
|
||||
Check(below_equal, kOperandIsNotAName);
|
||||
}
|
||||
}
|
||||
|
||||
@ -721,7 +721,7 @@ void MacroAssembler::AssertName(Register object) {
|
||||
void MacroAssembler::AssertNotSmi(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
test(object, Immediate(kSmiTagMask));
|
||||
Check(not_equal, "Operand is a smi");
|
||||
Check(not_equal, kOperandIsASmi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -734,7 +734,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
|
||||
push(Immediate(CodeObject()));
|
||||
if (emit_debug_code()) {
|
||||
cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
|
||||
Check(not_equal, "code object not properly patched");
|
||||
Check(not_equal, kCodeObjectNotProperlyPatched);
|
||||
}
|
||||
}
|
||||
|
||||
@ -743,7 +743,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
|
||||
if (emit_debug_code()) {
|
||||
cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
|
||||
Immediate(Smi::FromInt(type)));
|
||||
Check(equal, "stack frame types must match");
|
||||
Check(equal, kStackFrameTypesMustMatch);
|
||||
}
|
||||
leave();
|
||||
}
|
||||
@ -1024,7 +1024,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
// When generating debug code, make sure the lexical context is set.
|
||||
if (emit_debug_code()) {
|
||||
cmp(scratch1, Immediate(0));
|
||||
Check(not_equal, "we should not have an empty lexical context");
|
||||
Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
|
||||
}
|
||||
// Load the native context of the current context.
|
||||
int offset =
|
||||
@ -1037,7 +1037,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
// Read the first word and compare to native_context_map.
|
||||
cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
|
||||
isolate()->factory()->native_context_map());
|
||||
Check(equal, "JSGlobalObject::native_context should be a native context.");
|
||||
Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
|
||||
}
|
||||
|
||||
// Check if both contexts are the same.
|
||||
@ -1056,12 +1056,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
// Check the context is a native context.
|
||||
if (emit_debug_code()) {
|
||||
cmp(scratch2, isolate()->factory()->null_value());
|
||||
Check(not_equal, "JSGlobalProxy::context() should not be null.");
|
||||
Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
|
||||
|
||||
// Read the first word and compare to native_context_map(),
|
||||
cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
|
||||
isolate()->factory()->native_context_map());
|
||||
Check(equal, "JSGlobalObject::native_context should be a native context.");
|
||||
Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
|
||||
}
|
||||
|
||||
int token_offset = Context::kHeaderSize +
|
||||
@ -1206,7 +1206,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
|
||||
#ifdef DEBUG
|
||||
// Assert that result actually contains top on entry.
|
||||
cmp(result, Operand::StaticVariable(allocation_top));
|
||||
Check(equal, "Unexpected allocation top");
|
||||
Check(equal, kUnexpectedAllocationTop);
|
||||
#endif
|
||||
return;
|
||||
}
|
||||
@ -1226,7 +1226,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
|
||||
AllocationFlags flags) {
|
||||
if (emit_debug_code()) {
|
||||
test(result_end, Immediate(kObjectAlignmentMask));
|
||||
Check(zero, "Unaligned allocation in new space");
|
||||
Check(zero, kUnalignedAllocationInNewSpace);
|
||||
}
|
||||
|
||||
ExternalReference allocation_top =
|
||||
@ -1458,7 +1458,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) {
|
||||
and_(object, Immediate(~kHeapObjectTagMask));
|
||||
#ifdef DEBUG
|
||||
cmp(object, Operand::StaticVariable(new_space_allocation_top));
|
||||
Check(below, "Undo allocation of non allocated memory");
|
||||
Check(below, kUndoAllocationOfNonAllocatedMemory);
|
||||
#endif
|
||||
mov(Operand::StaticVariable(new_space_allocation_top), object);
|
||||
}
|
||||
@ -2062,7 +2062,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
|
||||
// previous handle scope.
|
||||
mov(Operand::StaticVariable(next_address), ebx);
|
||||
sub(Operand::StaticVariable(level_address), Immediate(1));
|
||||
Assert(above_equal, "Invalid HandleScope level");
|
||||
Assert(above_equal, kInvalidHandleScopeLevel);
|
||||
cmp(edi, Operand::StaticVariable(limit_address));
|
||||
j(not_equal, &delete_allocated_handles);
|
||||
bind(&leave_exit_frame);
|
||||
@ -2104,7 +2104,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
|
||||
cmp(return_value, isolate()->factory()->null_value());
|
||||
j(equal, &ok, Label::kNear);
|
||||
|
||||
Abort("API call returned invalid object");
|
||||
Abort(kAPICallReturnedInvalidObject);
|
||||
|
||||
bind(&ok);
|
||||
#endif
|
||||
@ -2390,7 +2390,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
|
||||
if (emit_debug_code()) {
|
||||
cmp(FieldOperand(dst, HeapObject::kMapOffset),
|
||||
isolate()->factory()->with_context_map());
|
||||
Check(not_equal, "Variable resolved to with context.");
|
||||
Check(not_equal, kVariableResolvedToWithContext);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2477,7 +2477,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
|
||||
CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
|
||||
jmp(&ok);
|
||||
bind(&fail);
|
||||
Abort("Global functions must have initial map");
|
||||
Abort(kGlobalFunctionsMustHaveInitialMap);
|
||||
bind(&ok);
|
||||
}
|
||||
}
|
||||
@ -2578,7 +2578,7 @@ void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
|
||||
and_(eax, kTopMask);
|
||||
shr(eax, 11);
|
||||
cmp(eax, Immediate(tos));
|
||||
Check(equal, "Unexpected FPU stack depth after instruction");
|
||||
Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
|
||||
fnclex();
|
||||
pop(eax);
|
||||
}
|
||||
@ -2661,8 +2661,8 @@ void MacroAssembler::DecrementCounter(Condition cc,
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Assert(Condition cc, const char* msg) {
|
||||
if (emit_debug_code()) Check(cc, msg);
|
||||
void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
|
||||
if (emit_debug_code()) Check(cc, reason);
|
||||
}
|
||||
|
||||
|
||||
@ -2679,16 +2679,16 @@ void MacroAssembler::AssertFastElements(Register elements) {
|
||||
cmp(FieldOperand(elements, HeapObject::kMapOffset),
|
||||
Immediate(factory->fixed_cow_array_map()));
|
||||
j(equal, &ok);
|
||||
Abort("JSObject with fast elements map has slow elements");
|
||||
Abort(kJSObjectWithFastElementsMapHasSlowElements);
|
||||
bind(&ok);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Check(Condition cc, const char* msg) {
|
||||
void MacroAssembler::Check(Condition cc, BailoutReason reason) {
|
||||
Label L;
|
||||
j(cc, &L);
|
||||
Abort(msg);
|
||||
Abort(reason);
|
||||
// will not return here
|
||||
bind(&L);
|
||||
}
|
||||
@ -2709,12 +2709,13 @@ void MacroAssembler::CheckStackAlignment() {
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Abort(const char* msg) {
|
||||
void MacroAssembler::Abort(BailoutReason reason) {
|
||||
// We want to pass the msg string like a smi to avoid GC
|
||||
// problems, however msg is not guaranteed to be aligned
|
||||
// properly. Instead, we pass an aligned pointer that is
|
||||
// a proper v8 smi, but also pass the alignment difference
|
||||
// from the real pointer as a smi.
|
||||
const char* msg = GetBailoutReason(reason);
|
||||
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
|
||||
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
|
||||
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
|
||||
@ -3118,7 +3119,7 @@ void MacroAssembler::EnsureNotWhite(
|
||||
if (emit_debug_code()) {
|
||||
mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
|
||||
cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
|
||||
Check(less_equal, "Live Bytes Count overflow chunk size");
|
||||
Check(less_equal, kLiveBytesCountOverflowChunkSize);
|
||||
}
|
||||
|
||||
bind(&done);
|
||||
|
@ -844,15 +844,15 @@ class MacroAssembler: public Assembler {
|
||||
|
||||
// Calls Abort(msg) if the condition cc is not satisfied.
|
||||
// Use --debug_code to enable.
|
||||
void Assert(Condition cc, const char* msg);
|
||||
void Assert(Condition cc, BailoutReason reason);
|
||||
|
||||
void AssertFastElements(Register elements);
|
||||
|
||||
// Like Assert(), but always enabled.
|
||||
void Check(Condition cc, const char* msg);
|
||||
void Check(Condition cc, BailoutReason reason);
|
||||
|
||||
// Print a message to stdout and abort execution.
|
||||
void Abort(const char* msg);
|
||||
void Abort(BailoutReason reason);
|
||||
|
||||
// Check that the stack is aligned.
|
||||
void CheckStackAlignment();
|
||||
|
@ -3153,7 +3153,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
|
||||
__ j(equal, &miss);
|
||||
} else if (FLAG_debug_code) {
|
||||
__ cmp(eax, factory()->the_hole_value());
|
||||
__ Check(not_equal, "DontDelete cells can't contain the hole");
|
||||
__ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
|
||||
}
|
||||
|
||||
HandlerFrontendFooter(name, &success, &miss);
|
||||
|
@ -425,7 +425,7 @@ LChunk* LChunk::NewChunk(HGraph* graph) {
|
||||
int values = graph->GetMaximumValueID();
|
||||
CompilationInfo* info = graph->info();
|
||||
if (values > LUnallocated::kMaxVirtualRegisters) {
|
||||
info->set_bailout_reason("not enough virtual registers for values");
|
||||
info->set_bailout_reason(kNotEnoughVirtualRegistersForValues);
|
||||
return NULL;
|
||||
}
|
||||
LAllocator allocator(values, graph);
|
||||
@ -434,7 +434,7 @@ LChunk* LChunk::NewChunk(HGraph* graph) {
|
||||
if (chunk == NULL) return NULL;
|
||||
|
||||
if (!allocator.Allocate(chunk)) {
|
||||
info->set_bailout_reason("not enough virtual registers (regalloc)");
|
||||
info->set_bailout_reason(kNotEnoughVirtualRegistersRegalloc);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
@ -123,10 +123,10 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
|
||||
// Initial map for the builtin InternalArray functions should be maps.
|
||||
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
__ And(t0, a2, Operand(kSmiTagMask));
|
||||
__ Assert(ne, "Unexpected initial map for InternalArray function",
|
||||
__ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
|
||||
t0, Operand(zero_reg));
|
||||
__ GetObjectType(a2, a3, t0);
|
||||
__ Assert(eq, "Unexpected initial map for InternalArray function",
|
||||
__ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
|
||||
t0, Operand(MAP_TYPE));
|
||||
}
|
||||
|
||||
@ -153,10 +153,10 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
|
||||
// Initial map for the builtin Array functions should be maps.
|
||||
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
__ And(t0, a2, Operand(kSmiTagMask));
|
||||
__ Assert(ne, "Unexpected initial map for Array function (1)",
|
||||
__ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
|
||||
t0, Operand(zero_reg));
|
||||
__ GetObjectType(a2, a3, t0);
|
||||
__ Assert(eq, "Unexpected initial map for Array function (2)",
|
||||
__ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
|
||||
t0, Operand(MAP_TYPE));
|
||||
}
|
||||
|
||||
@ -185,7 +185,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
Register function = a1;
|
||||
if (FLAG_debug_code) {
|
||||
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
|
||||
__ Assert(eq, "Unexpected String function", function, Operand(a2));
|
||||
__ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
|
||||
}
|
||||
|
||||
// Load the first arguments in a0 and get rid of the rest.
|
||||
@ -231,10 +231,10 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
__ LoadGlobalFunctionInitialMap(function, map, t0);
|
||||
if (FLAG_debug_code) {
|
||||
__ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
|
||||
__ Assert(eq, "Unexpected string wrapper instance size",
|
||||
__ Assert(eq, kUnexpectedStringWrapperInstanceSize,
|
||||
t0, Operand(JSValue::kSize >> kPointerSizeLog2));
|
||||
__ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
|
||||
__ Assert(eq, "Unexpected unused properties of string wrapper",
|
||||
__ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
|
||||
t0, Operand(zero_reg));
|
||||
}
|
||||
__ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
|
||||
@ -489,7 +489,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
__ addu(a0, t5, t0);
|
||||
// a0: offset of first field after pre-allocated fields
|
||||
if (FLAG_debug_code) {
|
||||
__ Assert(le, "Unexpected number of pre-allocated property fields.",
|
||||
__ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
|
||||
a0, Operand(t6));
|
||||
}
|
||||
__ InitializeFieldsWithFiller(t5, a0, t7);
|
||||
@ -522,7 +522,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
|
||||
// Done if no extra properties are to be allocated.
|
||||
__ Branch(&allocated, eq, a3, Operand(zero_reg));
|
||||
__ Assert(greater_equal, "Property allocation count failed.",
|
||||
__ Assert(greater_equal, kPropertyAllocationCountFailed,
|
||||
a3, Operand(zero_reg));
|
||||
|
||||
// Scale the number of elements by pointer size and add the header for
|
||||
@ -569,7 +569,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
__ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
|
||||
} else if (FLAG_debug_code) {
|
||||
__ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
|
||||
__ Assert(eq, "Undefined value not loaded.", t7, Operand(t8));
|
||||
__ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t8));
|
||||
}
|
||||
__ jmp(&entry);
|
||||
__ bind(&loop);
|
||||
|
@ -520,8 +520,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
||||
Label after_sentinel;
|
||||
__ JumpIfNotSmi(a3, &after_sentinel);
|
||||
if (FLAG_debug_code) {
|
||||
const char* message = "Expected 0 as a Smi sentinel";
|
||||
__ Assert(eq, message, a3, Operand(zero_reg));
|
||||
__ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg));
|
||||
}
|
||||
__ lw(a3, GlobalObjectOperand());
|
||||
__ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
|
||||
@ -679,7 +678,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
|
||||
Label* not_number) {
|
||||
__ AssertRootValue(heap_number_map,
|
||||
Heap::kHeapNumberMapRootIndex,
|
||||
"HeapNumberMap register clobbered.");
|
||||
kHeapNumberMapRegisterClobbered);
|
||||
|
||||
Label is_smi, done;
|
||||
|
||||
@ -729,7 +728,7 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
|
||||
Label* not_number) {
|
||||
__ AssertRootValue(heap_number_map,
|
||||
Heap::kHeapNumberMapRootIndex,
|
||||
"HeapNumberMap register clobbered.");
|
||||
kHeapNumberMapRegisterClobbered);
|
||||
Label done;
|
||||
Label not_in_int32_range;
|
||||
|
||||
@ -806,7 +805,7 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
|
||||
__ bind(&obj_is_not_smi);
|
||||
__ AssertRootValue(heap_number_map,
|
||||
Heap::kHeapNumberMapRootIndex,
|
||||
"HeapNumberMap register clobbered.");
|
||||
kHeapNumberMapRegisterClobbered);
|
||||
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
|
||||
|
||||
// Load the number.
|
||||
@ -853,7 +852,7 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
|
||||
|
||||
__ AssertRootValue(heap_number_map,
|
||||
Heap::kHeapNumberMapRootIndex,
|
||||
"HeapNumberMap register clobbered.");
|
||||
kHeapNumberMapRegisterClobbered);
|
||||
|
||||
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined);
|
||||
|
||||
@ -4279,12 +4278,12 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ And(t0, regexp_data, Operand(kSmiTagMask));
|
||||
__ Check(nz,
|
||||
"Unexpected type for RegExp data, FixedArray expected",
|
||||
kUnexpectedTypeForRegExpDataFixedArrayExpected,
|
||||
t0,
|
||||
Operand(zero_reg));
|
||||
__ GetObjectType(regexp_data, a0, a0);
|
||||
__ Check(eq,
|
||||
"Unexpected type for RegExp data, FixedArray expected",
|
||||
kUnexpectedTypeForRegExpDataFixedArrayExpected,
|
||||
a0,
|
||||
Operand(FIXED_ARRAY_TYPE));
|
||||
}
|
||||
@ -4639,7 +4638,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
// Sequential strings have already been ruled out.
|
||||
__ And(at, a0, Operand(kIsIndirectStringMask));
|
||||
__ Assert(eq,
|
||||
"external string expected, but not found",
|
||||
kExternalStringExpectedButNotFound,
|
||||
at,
|
||||
Operand(zero_reg));
|
||||
}
|
||||
@ -5020,7 +5019,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
void StringCharCodeAtGenerator::GenerateSlow(
|
||||
MacroAssembler* masm,
|
||||
const RuntimeCallHelper& call_helper) {
|
||||
__ Abort("Unexpected fallthrough to CharCodeAt slow case");
|
||||
__ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
|
||||
|
||||
// Index is not a smi.
|
||||
__ bind(&index_not_smi_);
|
||||
@ -5069,7 +5068,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
|
||||
call_helper.AfterCall(masm);
|
||||
__ jmp(&exit_);
|
||||
|
||||
__ Abort("Unexpected fallthrough from CharCodeAt slow case");
|
||||
__ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
|
||||
}
|
||||
|
||||
|
||||
@ -5106,7 +5105,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
void StringCharFromCodeGenerator::GenerateSlow(
|
||||
MacroAssembler* masm,
|
||||
const RuntimeCallHelper& call_helper) {
|
||||
__ Abort("Unexpected fallthrough to CharFromCode slow case");
|
||||
__ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
|
||||
|
||||
__ bind(&slow_case_);
|
||||
call_helper.BeforeCall(masm);
|
||||
@ -5117,7 +5116,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
|
||||
call_helper.AfterCall(masm);
|
||||
__ Branch(&exit_);
|
||||
|
||||
__ Abort("Unexpected fallthrough from CharFromCode slow case");
|
||||
__ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
|
||||
}
|
||||
|
||||
|
||||
@ -5172,7 +5171,7 @@ void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
|
||||
// that it is.
|
||||
__ And(scratch4, dest, Operand(kPointerAlignmentMask));
|
||||
__ Check(eq,
|
||||
"Destination of copy not aligned.",
|
||||
kDestinationOfCopyNotAligned,
|
||||
scratch4,
|
||||
Operand(zero_reg));
|
||||
}
|
||||
@ -5372,7 +5371,7 @@ void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
|
||||
// Must be the hole (deleted entry).
|
||||
if (FLAG_debug_code) {
|
||||
__ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
|
||||
__ Assert(eq, "oddball in string table is not undefined or the hole",
|
||||
__ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole,
|
||||
scratch, Operand(candidate));
|
||||
}
|
||||
__ jmp(&next_probe[i]);
|
||||
@ -6580,7 +6579,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
|
||||
// filled with kZapValue by the GC.
|
||||
// Dereference the address and check for this.
|
||||
__ lw(t0, MemOperand(t9));
|
||||
__ Assert(ne, "Received invalid return address.", t0,
|
||||
__ Assert(ne, kReceivedInvalidReturnAddress, t0,
|
||||
Operand(reinterpret_cast<uint32_t>(kZapValue)));
|
||||
}
|
||||
__ Jump(t9);
|
||||
@ -7331,7 +7330,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// If we reached this point there is a problem.
|
||||
__ Abort("Unexpected ElementsKind in array constructor");
|
||||
__ Abort(kUnexpectedElementsKindInArrayConstructor);
|
||||
}
|
||||
|
||||
|
||||
@ -7386,7 +7385,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// If we reached this point there is a problem.
|
||||
__ Abort("Unexpected ElementsKind in array constructor");
|
||||
__ Abort(kUnexpectedElementsKindInArrayConstructor);
|
||||
}
|
||||
|
||||
|
||||
@ -7447,10 +7446,10 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
// Will both indicate a NULL and a Smi.
|
||||
__ And(at, a3, Operand(kSmiTagMask));
|
||||
__ Assert(ne, "Unexpected initial map for Array function",
|
||||
__ Assert(ne, kUnexpectedInitialMapForArrayFunction,
|
||||
at, Operand(zero_reg));
|
||||
__ GetObjectType(a3, a3, t0);
|
||||
__ Assert(eq, "Unexpected initial map for Array function",
|
||||
__ Assert(eq, kUnexpectedInitialMapForArrayFunction,
|
||||
t0, Operand(MAP_TYPE));
|
||||
|
||||
// We should either have undefined in a2 or a valid cell.
|
||||
@ -7459,7 +7458,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
|
||||
__ Branch(&okay_here, eq, a2, Operand(at));
|
||||
__ lw(a3, FieldMemOperand(a2, 0));
|
||||
__ Assert(eq, "Expected property cell in register a2",
|
||||
__ Assert(eq, kExpectedPropertyCellInRegisterA2,
|
||||
a3, Operand(cell_map));
|
||||
__ bind(&okay_here);
|
||||
}
|
||||
@ -7559,10 +7558,10 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
|
||||
// Will both indicate a NULL and a Smi.
|
||||
__ And(at, a3, Operand(kSmiTagMask));
|
||||
__ Assert(ne, "Unexpected initial map for Array function",
|
||||
__ Assert(ne, kUnexpectedInitialMapForArrayFunction,
|
||||
at, Operand(zero_reg));
|
||||
__ GetObjectType(a3, a3, t0);
|
||||
__ Assert(eq, "Unexpected initial map for Array function",
|
||||
__ Assert(eq, kUnexpectedInitialMapForArrayFunction,
|
||||
t0, Operand(MAP_TYPE));
|
||||
}
|
||||
|
||||
@ -7579,7 +7578,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
Label done;
|
||||
__ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
|
||||
__ Assert(
|
||||
eq, "Invalid ElementsKind for InternalArray or InternalPackedArray",
|
||||
eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
|
||||
a3, Operand(FAST_HOLEY_ELEMENTS));
|
||||
__ bind(&done);
|
||||
}
|
||||
|
@ -289,7 +289,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
|
||||
__ SmiTag(t5);
|
||||
__ Or(t5, t5, Operand(1));
|
||||
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
|
||||
__ Assert(eq, "object found in smi-only array", at, Operand(t5));
|
||||
__ Assert(eq, kObjectFoundInSmiOnlyArray, at, Operand(t5));
|
||||
}
|
||||
__ sw(t0, MemOperand(t3)); // mantissa
|
||||
__ sw(t1, MemOperand(t3, kIntSize)); // exponent
|
||||
@ -489,7 +489,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
|
||||
// Assert that we do not have a cons or slice (indirect strings) here.
|
||||
// Sequential strings have already been ruled out.
|
||||
__ And(at, result, Operand(kIsIndirectStringMask));
|
||||
__ Assert(eq, "external string expected, but not found",
|
||||
__ Assert(eq, kExternalStringExpectedButNotFound,
|
||||
at, Operand(zero_reg));
|
||||
}
|
||||
// Rule out short external strings.
|
||||
|
@ -142,8 +142,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
|
||||
if ((non_object_regs & (1 << r)) != 0) {
|
||||
if (FLAG_debug_code) {
|
||||
__ And(at, reg, 0xc0000000);
|
||||
__ Assert(
|
||||
eq, "Unable to encode value as smi", at, Operand(zero_reg));
|
||||
__ Assert(eq, kUnableToEncodeValueAsSmi, at, Operand(zero_reg));
|
||||
}
|
||||
__ sll(reg, reg, kSmiTagSize);
|
||||
}
|
||||
@ -325,12 +324,12 @@ void Debug::GenerateSlotDebugBreak(MacroAssembler* masm) {
|
||||
|
||||
|
||||
void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) {
|
||||
masm->Abort("LiveEdit frame dropping is not supported on mips");
|
||||
masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnMips);
|
||||
}
|
||||
|
||||
|
||||
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
|
||||
masm->Abort("LiveEdit frame dropping is not supported on mips");
|
||||
masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnMips);
|
||||
}
|
||||
|
||||
|
||||
|
@ -786,10 +786,10 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
||||
// Check that we're not inside a with or catch context.
|
||||
__ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
|
||||
__ LoadRoot(t0, Heap::kWithContextMapRootIndex);
|
||||
__ Check(ne, "Declaration in with context.",
|
||||
__ Check(ne, kDeclarationInWithContext,
|
||||
a1, Operand(t0));
|
||||
__ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
|
||||
__ Check(ne, "Declaration in catch context.",
|
||||
__ Check(ne, kDeclarationInCatchContext,
|
||||
a1, Operand(t0));
|
||||
}
|
||||
}
|
||||
@ -2529,7 +2529,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
||||
// Check for an uninitialized let binding.
|
||||
__ lw(a2, location);
|
||||
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
|
||||
__ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
|
||||
__ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
|
||||
}
|
||||
// Perform the assignment.
|
||||
__ sw(v0, location);
|
||||
@ -3492,21 +3492,21 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
|
||||
Register value,
|
||||
uint32_t encoding_mask) {
|
||||
__ And(at, index, Operand(kSmiTagMask));
|
||||
__ Check(eq, "Non-smi index", at, Operand(zero_reg));
|
||||
__ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
|
||||
__ And(at, value, Operand(kSmiTagMask));
|
||||
__ Check(eq, "Non-smi value", at, Operand(zero_reg));
|
||||
__ Check(eq, kNonSmiValue, at, Operand(zero_reg));
|
||||
|
||||
__ lw(at, FieldMemOperand(string, String::kLengthOffset));
|
||||
__ Check(lt, "Index is too large", index, Operand(at));
|
||||
__ Check(lt, kIndexIsTooLarge, index, Operand(at));
|
||||
|
||||
__ Check(ge, "Index is negative", index, Operand(zero_reg));
|
||||
__ Check(ge, kIndexIsNegative, index, Operand(zero_reg));
|
||||
|
||||
__ lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
|
||||
__ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
|
||||
|
||||
__ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask));
|
||||
__ Subu(at, at, Operand(encoding_mask));
|
||||
__ Check(eq, "Unexpected string type", at, Operand(zero_reg));
|
||||
__ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
|
||||
}
|
||||
|
||||
|
||||
@ -3881,7 +3881,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
isolate()->native_context()->jsfunction_result_caches());
|
||||
if (jsfunction_result_caches->length() <= cache_id) {
|
||||
__ Abort("Attempt to use undefined cache.");
|
||||
__ Abort(kAttemptToUseUndefinedCache);
|
||||
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
|
||||
context()->Plug(v0);
|
||||
return;
|
||||
@ -4063,7 +4063,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// element: Current array element.
|
||||
// elements_end: Array end.
|
||||
if (generate_debug_code_) {
|
||||
__ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
|
||||
__ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
|
||||
array_length, Operand(zero_reg));
|
||||
}
|
||||
__ bind(&loop);
|
||||
|
@ -91,7 +91,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
|
||||
}
|
||||
|
||||
|
||||
void LChunkBuilder::Abort(const char* reason) {
|
||||
void LChunkBuilder::Abort(BailoutReason reason) {
|
||||
info()->set_bailout_reason(reason);
|
||||
status_ = ABORTED;
|
||||
}
|
||||
@ -324,7 +324,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
|
||||
// end of the jump table.
|
||||
if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) +
|
||||
deopt_jump_table_.length() * 12)) {
|
||||
Abort("Generated code is too large");
|
||||
Abort(kGeneratedCodeIsTooLarge);
|
||||
}
|
||||
|
||||
if (deopt_jump_table_.length() > 0) {
|
||||
@ -411,7 +411,7 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
|
||||
ASSERT(constant->HasSmiValue());
|
||||
__ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
|
||||
} else if (r.IsDouble()) {
|
||||
Abort("EmitLoadRegister: Unsupported double immediate.");
|
||||
Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
|
||||
} else {
|
||||
ASSERT(r.IsTagged());
|
||||
__ LoadObject(scratch, literal);
|
||||
@ -449,9 +449,9 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
|
||||
__ cvt_d_w(dbl_scratch, flt_scratch);
|
||||
return dbl_scratch;
|
||||
} else if (r.IsDouble()) {
|
||||
Abort("unsupported double immediate");
|
||||
Abort(kUnsupportedDoubleImmediate);
|
||||
} else if (r.IsTagged()) {
|
||||
Abort("unsupported tagged immediate");
|
||||
Abort(kUnsupportedTaggedImmediate);
|
||||
}
|
||||
} else if (op->IsStackSlot() || op->IsArgument()) {
|
||||
MemOperand mem_op = ToMemOperand(op);
|
||||
@ -520,14 +520,14 @@ Operand LCodeGen::ToOperand(LOperand* op) {
|
||||
ASSERT(constant->HasInteger32Value());
|
||||
return Operand(constant->Integer32Value());
|
||||
} else if (r.IsDouble()) {
|
||||
Abort("ToOperand Unsupported double immediate.");
|
||||
Abort(kToOperandUnsupportedDoubleImmediate);
|
||||
}
|
||||
ASSERT(r.IsTagged());
|
||||
return Operand(constant->handle());
|
||||
} else if (op->IsRegister()) {
|
||||
return Operand(ToRegister(op));
|
||||
} else if (op->IsDoubleRegister()) {
|
||||
Abort("ToOperand IsDoubleRegister unimplemented");
|
||||
Abort(kToOperandIsDoubleRegisterUnimplemented);
|
||||
return Operand(0);
|
||||
}
|
||||
// Stack slots not implemented, use ToMemOperand instead.
|
||||
@ -748,7 +748,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
Address entry =
|
||||
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
|
||||
if (entry == NULL) {
|
||||
Abort("bailout was not prepared");
|
||||
Abort(kBailoutWasNotPrepared);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1770,7 +1770,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
|
||||
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
|
||||
__ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING
|
||||
? one_byte_seq_type : two_byte_seq_type));
|
||||
__ Check(eq, "Unexpected string type", at, Operand(zero_reg));
|
||||
__ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
|
||||
}
|
||||
|
||||
__ Addu(scratch,
|
||||
@ -3076,7 +3076,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
|
||||
if (key_is_constant) {
|
||||
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
|
||||
if (constant_key & 0xF0000000) {
|
||||
Abort("array index constant value too big.");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
} else {
|
||||
key = ToRegister(instr->key());
|
||||
@ -3162,7 +3162,7 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
|
||||
if (key_is_constant) {
|
||||
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
|
||||
if (constant_key & 0xF0000000) {
|
||||
Abort("array index constant value too big.");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
} else {
|
||||
key = ToRegister(instr->key());
|
||||
@ -3433,7 +3433,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
|
||||
void LCodeGen::DoPushArgument(LPushArgument* instr) {
|
||||
LOperand* argument = instr->value();
|
||||
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
|
||||
Abort("DoPushArgument not implemented for double type.");
|
||||
Abort(kDoPushArgumentNotImplementedForDoubleType);
|
||||
} else {
|
||||
Register argument_reg = EmitLoadRegister(argument, at);
|
||||
__ push(argument_reg);
|
||||
@ -4258,7 +4258,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
|
||||
if (key_is_constant) {
|
||||
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
|
||||
if (constant_key & 0xF0000000) {
|
||||
Abort("array index constant value too big.");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
} else {
|
||||
key = ToRegister(instr->key());
|
||||
@ -4336,7 +4336,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
|
||||
if (key_is_constant) {
|
||||
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
|
||||
if (constant_key & 0xF0000000) {
|
||||
Abort("array index constant value too big.");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
} else {
|
||||
key = ToRegister(instr->key());
|
||||
|
@ -213,7 +213,7 @@ class LCodeGen BASE_EMBEDDED {
|
||||
|
||||
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
|
||||
|
||||
void Abort(const char* reason);
|
||||
void Abort(BailoutReason reason);
|
||||
void FPRINTF_CHECKING Comment(const char* format, ...);
|
||||
|
||||
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
|
||||
|
@ -442,7 +442,7 @@ LPlatformChunk* LChunkBuilder::Build() {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::Abort(const char* reason) {
|
||||
void LCodeGen::Abort(BailoutReason reason) {
|
||||
info()->set_bailout_reason(reason);
|
||||
status_ = ABORTED;
|
||||
}
|
||||
@ -650,7 +650,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
|
||||
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
|
||||
int vreg = allocator_->GetVirtualRegister();
|
||||
if (!allocator_->AllocationOk()) {
|
||||
Abort("Out of virtual registers while trying to allocate temp register.");
|
||||
Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
|
||||
vreg = 0;
|
||||
}
|
||||
operand->set_virtual_register(vreg);
|
||||
@ -2365,7 +2365,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
|
||||
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
|
||||
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
|
||||
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
|
||||
Abort("Too many spill slots needed for OSR");
|
||||
Abort(kTooManySpillSlotsNeededForOSR);
|
||||
spill_index = 0;
|
||||
}
|
||||
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
|
||||
|
@ -2642,7 +2642,7 @@ class LChunkBuilder BASE_EMBEDDED {
|
||||
bool is_done() const { return status_ == DONE; }
|
||||
bool is_aborted() const { return status_ == ABORTED; }
|
||||
|
||||
void Abort(const char* reason);
|
||||
void Abort(BailoutReason reason);
|
||||
|
||||
// Methods for getting operands for Use / Define / Temp.
|
||||
LUnallocated* ToUnallocated(Register reg);
|
||||
|
@ -256,7 +256,7 @@ void MacroAssembler::RecordWrite(Register object,
|
||||
if (emit_debug_code()) {
|
||||
lw(at, MemOperand(address));
|
||||
Assert(
|
||||
eq, "Wrong address or value passed to RecordWrite", at, Operand(value));
|
||||
eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
|
||||
}
|
||||
|
||||
Label done;
|
||||
@ -358,7 +358,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
// In debug mode, make sure the lexical context is set.
|
||||
#ifdef DEBUG
|
||||
Check(ne, "we should not have an empty lexical context",
|
||||
Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
|
||||
scratch, Operand(zero_reg));
|
||||
#endif
|
||||
|
||||
@ -374,7 +374,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
// Read the first word and compare to the native_context_map.
|
||||
lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
|
||||
LoadRoot(at, Heap::kNativeContextMapRootIndex);
|
||||
Check(eq, "JSGlobalObject::native_context should be a native context.",
|
||||
Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
|
||||
holder_reg, Operand(at));
|
||||
pop(holder_reg); // Restore holder.
|
||||
}
|
||||
@ -388,12 +388,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
push(holder_reg); // Temporarily save holder on the stack.
|
||||
mov(holder_reg, at); // Move at to its holding place.
|
||||
LoadRoot(at, Heap::kNullValueRootIndex);
|
||||
Check(ne, "JSGlobalProxy::context() should not be null.",
|
||||
Check(ne, kJSGlobalProxyContextShouldNotBeNull,
|
||||
holder_reg, Operand(at));
|
||||
|
||||
lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
|
||||
LoadRoot(at, Heap::kNativeContextMapRootIndex);
|
||||
Check(eq, "JSGlobalObject::native_context should be a native context.",
|
||||
Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
|
||||
holder_reg, Operand(at));
|
||||
// Restore at is not needed. at is reloaded below.
|
||||
pop(holder_reg); // Restore holder.
|
||||
@ -2938,7 +2938,7 @@ void MacroAssembler::Allocate(int object_size,
|
||||
// immediately below so this use of t9 does not cause difference with
|
||||
// respect to register content between debug and release mode.
|
||||
lw(t9, MemOperand(topaddr));
|
||||
Check(eq, "Unexpected allocation top", result, Operand(t9));
|
||||
Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
|
||||
}
|
||||
// Load allocation limit into t9. Result already contains allocation top.
|
||||
lw(t9, MemOperand(topaddr, limit - top));
|
||||
@ -3008,7 +3008,7 @@ void MacroAssembler::Allocate(Register object_size,
|
||||
// immediately below so this use of t9 does not cause difference with
|
||||
// respect to register content between debug and release mode.
|
||||
lw(t9, MemOperand(topaddr));
|
||||
Check(eq, "Unexpected allocation top", result, Operand(t9));
|
||||
Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
|
||||
}
|
||||
// Load allocation limit into t9. Result already contains allocation top.
|
||||
lw(t9, MemOperand(topaddr, limit - top));
|
||||
@ -3028,7 +3028,7 @@ void MacroAssembler::Allocate(Register object_size,
|
||||
// Update allocation top. result temporarily holds the new top.
|
||||
if (emit_debug_code()) {
|
||||
And(t9, scratch2, Operand(kObjectAlignmentMask));
|
||||
Check(eq, "Unaligned allocation in new space", t9, Operand(zero_reg));
|
||||
Check(eq, kUnalignedAllocationInNewSpace, t9, Operand(zero_reg));
|
||||
}
|
||||
sw(scratch2, MemOperand(topaddr));
|
||||
|
||||
@ -3050,7 +3050,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object,
|
||||
// Check that the object un-allocated is below the current top.
|
||||
li(scratch, Operand(new_space_allocation_top));
|
||||
lw(scratch, MemOperand(scratch));
|
||||
Check(less, "Undo allocation of non allocated memory",
|
||||
Check(less, kUndoAllocationOfNonAllocatedMemory,
|
||||
object, Operand(scratch));
|
||||
#endif
|
||||
// Write the address of the object to un-allocate as the current top.
|
||||
@ -3303,7 +3303,7 @@ void MacroAssembler::CopyBytes(Register src,
|
||||
bind(&word_loop);
|
||||
if (emit_debug_code()) {
|
||||
And(scratch, src, kPointerSize - 1);
|
||||
Assert(eq, "Expecting alignment for CopyBytes",
|
||||
Assert(eq, kExpectingAlignmentForCopyBytes,
|
||||
scratch, Operand(zero_reg));
|
||||
}
|
||||
Branch(&byte_loop, lt, length, Operand(kPointerSize));
|
||||
@ -4029,7 +4029,7 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
|
||||
sw(s0, MemOperand(s3, kNextOffset));
|
||||
if (emit_debug_code()) {
|
||||
lw(a1, MemOperand(s3, kLevelOffset));
|
||||
Check(eq, "Unexpected level after return from api call", a1, Operand(s2));
|
||||
Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
|
||||
}
|
||||
Subu(s2, s2, Operand(1));
|
||||
sw(s2, MemOperand(s3, kLevelOffset));
|
||||
@ -4383,10 +4383,10 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
|
||||
// -----------------------------------------------------------------------------
|
||||
// Debugging.
|
||||
|
||||
void MacroAssembler::Assert(Condition cc, const char* msg,
|
||||
void MacroAssembler::Assert(Condition cc, BailoutReason reason,
|
||||
Register rs, Operand rt) {
|
||||
if (emit_debug_code())
|
||||
Check(cc, msg, rs, rt);
|
||||
Check(cc, reason, rs, rt);
|
||||
}
|
||||
|
||||
|
||||
@ -4394,7 +4394,7 @@ void MacroAssembler::AssertRegisterIsRoot(Register reg,
|
||||
Heap::RootListIndex index) {
|
||||
if (emit_debug_code()) {
|
||||
LoadRoot(at, index);
|
||||
Check(eq, "Register did not match expected root", reg, Operand(at));
|
||||
Check(eq, kRegisterDidNotMatchExpectedRoot, reg, Operand(at));
|
||||
}
|
||||
}
|
||||
|
||||
@ -4411,24 +4411,24 @@ void MacroAssembler::AssertFastElements(Register elements) {
|
||||
Branch(&ok, eq, elements, Operand(at));
|
||||
LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
|
||||
Branch(&ok, eq, elements, Operand(at));
|
||||
Abort("JSObject with fast elements map has slow elements");
|
||||
Abort(kJSObjectWithFastElementsMapHasSlowElements);
|
||||
bind(&ok);
|
||||
pop(elements);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Check(Condition cc, const char* msg,
|
||||
void MacroAssembler::Check(Condition cc, BailoutReason reason,
|
||||
Register rs, Operand rt) {
|
||||
Label L;
|
||||
Branch(&L, cc, rs, rt);
|
||||
Abort(msg);
|
||||
Abort(reason);
|
||||
// Will not return here.
|
||||
bind(&L);
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Abort(const char* msg) {
|
||||
void MacroAssembler::Abort(BailoutReason reason) {
|
||||
Label abort_start;
|
||||
bind(&abort_start);
|
||||
// We want to pass the msg string like a smi to avoid GC
|
||||
@ -4436,6 +4436,7 @@ void MacroAssembler::Abort(const char* msg) {
|
||||
// properly. Instead, we pass an aligned pointer that is
|
||||
// a proper v8 smi, but also pass the alignment difference
|
||||
// from the real pointer as a smi.
|
||||
const char* msg = GetBailoutReason(reason);
|
||||
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
|
||||
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
|
||||
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
|
||||
@ -4579,7 +4580,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
|
||||
CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
|
||||
Branch(&ok);
|
||||
bind(&fail);
|
||||
Abort("Global functions must have initial map");
|
||||
Abort(kGlobalFunctionsMustHaveInitialMap);
|
||||
bind(&ok);
|
||||
}
|
||||
}
|
||||
@ -4862,7 +4863,7 @@ void MacroAssembler::AssertNotSmi(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
andi(at, object, kSmiTagMask);
|
||||
Check(ne, "Operand is a smi", at, Operand(zero_reg));
|
||||
Check(ne, kOperandIsASmi, at, Operand(zero_reg));
|
||||
}
|
||||
}
|
||||
|
||||
@ -4871,7 +4872,7 @@ void MacroAssembler::AssertSmi(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
andi(at, object, kSmiTagMask);
|
||||
Check(eq, "Operand is a smi", at, Operand(zero_reg));
|
||||
Check(eq, kOperandIsASmi, at, Operand(zero_reg));
|
||||
}
|
||||
}
|
||||
|
||||
@ -4880,11 +4881,11 @@ void MacroAssembler::AssertString(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
And(t0, object, Operand(kSmiTagMask));
|
||||
Check(ne, "Operand is a smi and not a string", t0, Operand(zero_reg));
|
||||
Check(ne, kOperandIsASmiAndNotAString, t0, Operand(zero_reg));
|
||||
push(object);
|
||||
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
|
||||
Check(lo, "Operand is not a string", object, Operand(FIRST_NONSTRING_TYPE));
|
||||
Check(lo, kOperandIsNotAString, object, Operand(FIRST_NONSTRING_TYPE));
|
||||
pop(object);
|
||||
}
|
||||
}
|
||||
@ -4894,11 +4895,11 @@ void MacroAssembler::AssertName(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
And(t0, object, Operand(kSmiTagMask));
|
||||
Check(ne, "Operand is a smi and not a name", t0, Operand(zero_reg));
|
||||
Check(ne, kOperandIsASmiAndNotAName, t0, Operand(zero_reg));
|
||||
push(object);
|
||||
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
|
||||
lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
|
||||
Check(le, "Operand is not a name", object, Operand(LAST_NAME_TYPE));
|
||||
Check(le, kOperandIsNotAName, object, Operand(LAST_NAME_TYPE));
|
||||
pop(object);
|
||||
}
|
||||
}
|
||||
@ -4906,11 +4907,11 @@ void MacroAssembler::AssertName(Register object) {
|
||||
|
||||
void MacroAssembler::AssertRootValue(Register src,
|
||||
Heap::RootListIndex root_value_index,
|
||||
const char* message) {
|
||||
BailoutReason reason) {
|
||||
if (emit_debug_code()) {
|
||||
ASSERT(!src.is(at));
|
||||
LoadRoot(at, root_value_index);
|
||||
Check(eq, message, src, Operand(at));
|
||||
Check(eq, reason, src, Operand(at));
|
||||
}
|
||||
}
|
||||
|
||||
@ -5127,7 +5128,7 @@ void MacroAssembler::PatchRelocatedValue(Register li_location,
|
||||
// At this point scratch is a lui(at, ...) instruction.
|
||||
if (emit_debug_code()) {
|
||||
And(scratch, scratch, kOpcodeMask);
|
||||
Check(eq, "The instruction to patch should be a lui.",
|
||||
Check(eq, kTheInstructionToPatchShouldBeALui,
|
||||
scratch, Operand(LUI));
|
||||
lw(scratch, MemOperand(li_location));
|
||||
}
|
||||
@ -5139,7 +5140,7 @@ void MacroAssembler::PatchRelocatedValue(Register li_location,
|
||||
// scratch is now ori(at, ...).
|
||||
if (emit_debug_code()) {
|
||||
And(scratch, scratch, kOpcodeMask);
|
||||
Check(eq, "The instruction to patch should be an ori.",
|
||||
Check(eq, kTheInstructionToPatchShouldBeAnOri,
|
||||
scratch, Operand(ORI));
|
||||
lw(scratch, MemOperand(li_location, kInstrSize));
|
||||
}
|
||||
@ -5156,7 +5157,7 @@ void MacroAssembler::GetRelocatedValue(Register li_location,
|
||||
lw(value, MemOperand(li_location));
|
||||
if (emit_debug_code()) {
|
||||
And(value, value, kOpcodeMask);
|
||||
Check(eq, "The instruction should be a lui.",
|
||||
Check(eq, kTheInstructionShouldBeALui,
|
||||
value, Operand(LUI));
|
||||
lw(value, MemOperand(li_location));
|
||||
}
|
||||
@ -5167,7 +5168,7 @@ void MacroAssembler::GetRelocatedValue(Register li_location,
|
||||
lw(scratch, MemOperand(li_location, kInstrSize));
|
||||
if (emit_debug_code()) {
|
||||
And(scratch, scratch, kOpcodeMask);
|
||||
Check(eq, "The instruction should be an ori.",
|
||||
Check(eq, kTheInstructionShouldBeAnOri,
|
||||
scratch, Operand(ORI));
|
||||
lw(scratch, MemOperand(li_location, kInstrSize));
|
||||
}
|
||||
|
@ -1286,15 +1286,15 @@ class MacroAssembler: public Assembler {
|
||||
|
||||
// Calls Abort(msg) if the condition cc is not satisfied.
|
||||
// Use --debug_code to enable.
|
||||
void Assert(Condition cc, const char* msg, Register rs, Operand rt);
|
||||
void Assert(Condition cc, BailoutReason reason, Register rs, Operand rt);
|
||||
void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
|
||||
void AssertFastElements(Register elements);
|
||||
|
||||
// Like Assert(), but always enabled.
|
||||
void Check(Condition cc, const char* msg, Register rs, Operand rt);
|
||||
void Check(Condition cc, BailoutReason reason, Register rs, Operand rt);
|
||||
|
||||
// Print a message to stdout and abort execution.
|
||||
void Abort(const char* msg);
|
||||
void Abort(BailoutReason msg);
|
||||
|
||||
// Verify restrictions about code generated in stubs.
|
||||
void set_generating_stub(bool value) { generating_stub_ = value; }
|
||||
@ -1378,7 +1378,7 @@ class MacroAssembler: public Assembler {
|
||||
// enabled via --debug-code.
|
||||
void AssertRootValue(Register src,
|
||||
Heap::RootListIndex root_value_index,
|
||||
const char* message);
|
||||
BailoutReason reason);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// HeapNumber utilities.
|
||||
|
@ -9804,7 +9804,7 @@ void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
|
||||
}
|
||||
|
||||
|
||||
void SharedFunctionInfo::DisableOptimization(const char* reason) {
|
||||
void SharedFunctionInfo::DisableOptimization(BailoutReason reason) {
|
||||
// Disable optimization for the shared function info and mark the
|
||||
// code as non-optimizable. The marker on the shared function info
|
||||
// is there because we flush non-optimized code thereby loosing the
|
||||
@ -9822,7 +9822,7 @@ void SharedFunctionInfo::DisableOptimization(const char* reason) {
|
||||
if (FLAG_trace_opt) {
|
||||
PrintF("[disabled optimization for ");
|
||||
ShortPrint();
|
||||
PrintF(", reason: %s]\n", reason);
|
||||
PrintF(", reason: %s]\n", GetBailoutReason(reason));
|
||||
}
|
||||
}
|
||||
|
||||
@ -15964,4 +15964,15 @@ void PropertyCell::AddDependentCode(Handle<Code> code) {
|
||||
}
|
||||
|
||||
|
||||
const char* GetBailoutReason(BailoutReason reason) {
|
||||
ASSERT(reason < kLastErrorMessage);
|
||||
#define ERROR_MESSAGES_TEXTS(C, T) T,
|
||||
static const char* error_messages_[] = {
|
||||
ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)
|
||||
};
|
||||
#undef ERROR_MESSAGES_TEXTS
|
||||
return error_messages_[reason];
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
284
src/objects.h
284
src/objects.h
@ -1046,7 +1046,287 @@ class MaybeObject BASE_EMBEDDED {
|
||||
V(AccessCheckNeeded) \
|
||||
V(Cell) \
|
||||
V(PropertyCell) \
|
||||
V(ObjectHashTable) \
|
||||
V(ObjectHashTable)
|
||||
|
||||
|
||||
#define ERROR_MESSAGES_LIST(V) \
|
||||
V(kNoReason, "no reason") \
|
||||
\
|
||||
V(k32BitValueInRegisterIsNotZeroExtended, \
|
||||
"32 bit value in register is not zero-extended") \
|
||||
V(kAlignmentMarkerExpected, "alignment marker expected") \
|
||||
V(kAllocationIsNotDoubleAligned, "Allocation is not double aligned") \
|
||||
V(kAPICallReturnedInvalidObject, "API call returned invalid object") \
|
||||
V(kArgumentsObjectValueInATestContext, \
|
||||
"arguments object value in a test context") \
|
||||
V(kArrayBoilerplateCreationFailed, "array boilerplate creation failed") \
|
||||
V(kArrayIndexConstantValueTooBig, "array index constant value too big") \
|
||||
V(kAssignmentToArguments, "assignment to arguments") \
|
||||
V(kAssignmentToLetVariableBeforeInitialization, \
|
||||
"assignment to let variable before initialization") \
|
||||
V(kAssignmentToLOOKUPVariable, "assignment to LOOKUP variable") \
|
||||
V(kAssignmentToParameterFunctionUsesArgumentsObject, \
|
||||
"assignment to parameter, function uses arguments object") \
|
||||
V(kAssignmentToParameterInArgumentsObject, \
|
||||
"assignment to parameter in arguments object") \
|
||||
V(kAttemptToUseUndefinedCache, "Attempt to use undefined cache") \
|
||||
V(kBadValueContextForArgumentsObjectValue, \
|
||||
"bad value context for arguments object value") \
|
||||
V(kBadValueContextForArgumentsValue, \
|
||||
"bad value context for arguments value") \
|
||||
V(kBailedOutDueToDependentMap, "bailed out due to dependent map") \
|
||||
V(kBailoutWasNotPrepared, "bailout was not prepared") \
|
||||
V(kBinaryStubGenerateFloatingPointCode, \
|
||||
"BinaryStub_GenerateFloatingPointCode") \
|
||||
V(kBothRegistersWereSmisInSelectNonSmi, \
|
||||
"Both registers were smis in SelectNonSmi") \
|
||||
V(kCallToAJavaScriptRuntimeFunction, \
|
||||
"call to a JavaScript runtime function") \
|
||||
V(kCannotTranslatePositionInChangedArea, \
|
||||
"Cannot translate position in changed area") \
|
||||
V(kCodeGenerationFailed, "code generation failed") \
|
||||
V(kCodeObjectNotProperlyPatched, "code object not properly patched") \
|
||||
V(kCompoundAssignmentToLookupSlot, "compound assignment to lookup slot") \
|
||||
V(kContextAllocatedArguments, "context-allocated arguments") \
|
||||
V(kDebuggerIsActive, "debugger is active") \
|
||||
V(kDebuggerStatement, "DebuggerStatement") \
|
||||
V(kDeclarationInCatchContext, "Declaration in catch context") \
|
||||
V(kDeclarationInWithContext, "Declaration in with context") \
|
||||
V(kDefaultNaNModeNotSet, "Default NaN mode not set") \
|
||||
V(kDeleteWithGlobalVariable, "delete with global variable") \
|
||||
V(kDeleteWithNonGlobalVariable, "delete with non-global variable") \
|
||||
V(kDestinationOfCopyNotAligned, "Destination of copy not aligned") \
|
||||
V(kDontDeleteCellsCannotContainTheHole, \
|
||||
"DontDelete cells can't contain the hole") \
|
||||
V(kDoPushArgumentNotImplementedForDoubleType, \
|
||||
"DoPushArgument not implemented for double type") \
|
||||
V(kEmitLoadRegisterUnsupportedDoubleImmediate, \
|
||||
"EmitLoadRegister: Unsupported double immediate") \
|
||||
V(kEval, "eval") \
|
||||
V(kExpected0AsASmiSentinel, "Expected 0 as a Smi sentinel") \
|
||||
V(kExpectedAlignmentMarker, "expected alignment marker") \
|
||||
V(kExpectedPropertyCellInRegisterA2, \
|
||||
"Expected property cell in register a2") \
|
||||
V(kExpectedPropertyCellInRegisterEbx, \
|
||||
"Expected property cell in register ebx") \
|
||||
V(kExpectedPropertyCellInRegisterRbx, \
|
||||
"Expected property cell in register rbx") \
|
||||
V(kExpectingAlignmentForCopyBytes, \
|
||||
"Expecting alignment for CopyBytes") \
|
||||
V(kExternalStringExpectedButNotFound, \
|
||||
"external string expected, but not found") \
|
||||
V(kFailedBailedOutLastTime, "failed/bailed out last time") \
|
||||
V(kForInStatementIsNotFastCase, "ForInStatement is not fast case") \
|
||||
V(kForInStatementOptimizationIsDisabled, \
|
||||
"ForInStatement optimization is disabled") \
|
||||
V(kForInStatementWithNonLocalEachVariable, \
|
||||
"ForInStatement with non-local each variable") \
|
||||
V(kForOfStatement, "ForOfStatement") \
|
||||
V(kFrameIsExpectedToBeAligned, "frame is expected to be aligned") \
|
||||
V(kFunctionCallsEval, "function calls eval") \
|
||||
V(kFunctionIsAGenerator, "function is a generator") \
|
||||
V(kFunctionWithIllegalRedeclaration, "function with illegal redeclaration") \
|
||||
V(kGeneratedCodeIsTooLarge, "Generated code is too large") \
|
||||
V(kGeneratorFailedToResume, "Generator failed to resume") \
|
||||
V(kGenerator, "generator") \
|
||||
V(kGlobalFunctionsMustHaveInitialMap, \
|
||||
"Global functions must have initial map") \
|
||||
V(kHeapNumberMapRegisterClobbered, "HeapNumberMap register clobbered") \
|
||||
V(kImproperObjectOnPrototypeChainForStore, \
|
||||
"improper object on prototype chain for store") \
|
||||
V(kIndexIsNegative, "Index is negative") \
|
||||
V(kIndexIsTooLarge, "Index is too large") \
|
||||
V(kInlinedRuntimeFunctionClassOf, "inlined runtime function: ClassOf") \
|
||||
V(kInlinedRuntimeFunctionFastAsciiArrayJoin, \
|
||||
"inlined runtime function: FastAsciiArrayJoin") \
|
||||
V(kInlinedRuntimeFunctionGeneratorNext, \
|
||||
"inlined runtime function: GeneratorNext") \
|
||||
V(kInlinedRuntimeFunctionGeneratorThrow, \
|
||||
"inlined runtime function: GeneratorThrow") \
|
||||
V(kInlinedRuntimeFunctionGetFromCache, \
|
||||
"inlined runtime function: GetFromCache") \
|
||||
V(kInlinedRuntimeFunctionIsNonNegativeSmi, \
|
||||
"inlined runtime function: IsNonNegativeSmi") \
|
||||
V(kInlinedRuntimeFunctionIsRegExpEquivalent, \
|
||||
"inlined runtime function: IsRegExpEquivalent") \
|
||||
V(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf, \
|
||||
"inlined runtime function: IsStringWrapperSafeForDefaultValueOf") \
|
||||
V(kInliningBailedOut, "inlining bailed out") \
|
||||
V(kInputGPRIsExpectedToHaveUpper32Cleared, \
|
||||
"input GPR is expected to have upper32 cleared") \
|
||||
V(kInstanceofStubUnexpectedCallSiteCacheCheck, \
|
||||
"InstanceofStub unexpected call site cache (check)") \
|
||||
V(kInstanceofStubUnexpectedCallSiteCacheCmp1, \
|
||||
"InstanceofStub unexpected call site cache (cmp 1)") \
|
||||
V(kInstanceofStubUnexpectedCallSiteCacheCmp2, \
|
||||
"InstanceofStub unexpected call site cache (cmp 2)") \
|
||||
V(kInstanceofStubUnexpectedCallSiteCacheMov, \
|
||||
"InstanceofStub unexpected call site cache (mov)") \
|
||||
V(kInteger32ToSmiFieldWritingToNonSmiLocation, \
|
||||
"Integer32ToSmiField writing to non-smi location") \
|
||||
V(kInvalidCaptureReferenced, "Invalid capture referenced") \
|
||||
V(kInvalidElementsKindForInternalArrayOrInternalPackedArray, \
|
||||
"Invalid ElementsKind for InternalArray or InternalPackedArray") \
|
||||
V(kInvalidHandleScopeLevel, "Invalid HandleScope level") \
|
||||
V(kInvalidLeftHandSideInAssignment, "invalid left-hand side in assignment") \
|
||||
V(kInvalidLhsInCompoundAssignment, "invalid lhs in compound assignment") \
|
||||
V(kInvalidLhsInCountOperation, "invalid lhs in count operation") \
|
||||
V(kInvalidMinLength, "Invalid min_length") \
|
||||
V(kJSGlobalObjectNativeContextShouldBeANativeContext, \
|
||||
"JSGlobalObject::native_context should be a native context") \
|
||||
V(kJSGlobalProxyContextShouldNotBeNull, \
|
||||
"JSGlobalProxy::context() should not be null") \
|
||||
V(kJSObjectWithFastElementsMapHasSlowElements, \
|
||||
"JSObject with fast elements map has slow elements") \
|
||||
V(kLetBindingReInitialization, "Let binding re-initialization") \
|
||||
V(kLiveBytesCountOverflowChunkSize, "Live Bytes Count overflow chunk size") \
|
||||
V(kLiveEditFrameDroppingIsNotSupportedOnArm, \
|
||||
"LiveEdit frame dropping is not supported on arm") \
|
||||
V(kLiveEditFrameDroppingIsNotSupportedOnMips, \
|
||||
"LiveEdit frame dropping is not supported on mips") \
|
||||
V(kLiveEdit, "LiveEdit") \
|
||||
V(kLookupVariableInCountOperation, \
|
||||
"lookup variable in count operation") \
|
||||
V(kMapIsNoLongerInEax, "Map is no longer in eax") \
|
||||
V(kNoCasesLeft, "no cases left") \
|
||||
V(kNoEmptyArraysHereInEmitFastAsciiArrayJoin, \
|
||||
"No empty arrays here in EmitFastAsciiArrayJoin") \
|
||||
V(kNonInitializerAssignmentToConst, \
|
||||
"non-initializer assignment to const") \
|
||||
V(kNonSmiIndex, "Non-smi index") \
|
||||
V(kNonSmiKeyInArrayLiteral, "Non-smi key in array literal") \
|
||||
V(kNonSmiValue, "Non-smi value") \
|
||||
V(kNotEnoughVirtualRegistersForValues, \
|
||||
"not enough virtual registers for values") \
|
||||
V(kNotEnoughVirtualRegistersRegalloc, \
|
||||
"not enough virtual registers (regalloc)") \
|
||||
V(kObjectFoundInSmiOnlyArray, "object found in smi-only array") \
|
||||
V(kObjectLiteralWithComplexProperty, \
|
||||
"Object literal with complex property") \
|
||||
V(kOddballInStringTableIsNotUndefinedOrTheHole, \
|
||||
"oddball in string table is not undefined or the hole") \
|
||||
V(kOperandIsASmiAndNotAName, "Operand is a smi and not a name") \
|
||||
V(kOperandIsASmiAndNotAString, "Operand is a smi and not a string") \
|
||||
V(kOperandIsASmi, "Operand is a smi") \
|
||||
V(kOperandIsNotAName, "Operand is not a name") \
|
||||
V(kOperandIsNotANumber, "Operand is not a number") \
|
||||
V(kOperandIsNotASmi, "Operand is not a smi") \
|
||||
V(kOperandIsNotAString, "Operand is not a string") \
|
||||
V(kOperandIsNotSmi, "Operand is not smi") \
|
||||
V(kOperandNotANumber, "Operand not a number") \
|
||||
V(kOptimizedTooManyTimes, "optimized too many times") \
|
||||
V(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister, \
|
||||
"Out of virtual registers while trying to allocate temp register") \
|
||||
V(kParseScopeError, "parse/scope error") \
|
||||
V(kPossibleDirectCallToEval, "possible direct call to eval") \
|
||||
V(kPropertyAllocationCountFailed, "Property allocation count failed") \
|
||||
V(kReceivedInvalidReturnAddress, "Received invalid return address") \
|
||||
V(kReferenceToAVariableWhichRequiresDynamicLookup, \
|
||||
"reference to a variable which requires dynamic lookup") \
|
||||
V(kReferenceToGlobalLexicalVariable, \
|
||||
"reference to global lexical variable") \
|
||||
V(kReferenceToUninitializedVariable, "reference to uninitialized variable") \
|
||||
V(kRegisterDidNotMatchExpectedRoot, "Register did not match expected root") \
|
||||
V(kRegisterWasClobbered, "register was clobbered") \
|
||||
V(kScopedBlock, "ScopedBlock") \
|
||||
V(kSharedFunctionInfoLiteral, "SharedFunctionInfoLiteral") \
|
||||
V(kSmiAdditionOverflow, "Smi addition overflow") \
|
||||
V(kSmiSubtractionOverflow, "Smi subtraction overflow") \
|
||||
V(kStackFrameTypesMustMatch, "stack frame types must match") \
|
||||
V(kSwitchStatementMixedOrNonLiteralSwitchLabels, \
|
||||
"SwitchStatement: mixed or non-literal switch labels") \
|
||||
V(kSwitchStatementTooManyClauses, "SwitchStatement: too many clauses") \
|
||||
V(kTheInstructionShouldBeALui, "The instruction should be a lui") \
|
||||
V(kTheInstructionShouldBeAnOri, "The instruction should be an ori") \
|
||||
V(kTheInstructionToPatchShouldBeALoadFromPc, \
|
||||
"The instruction to patch should be a load from pc") \
|
||||
V(kTheInstructionToPatchShouldBeALui, \
|
||||
"The instruction to patch should be a lui") \
|
||||
V(kTheInstructionToPatchShouldBeAnOri, \
|
||||
"The instruction to patch should be an ori") \
|
||||
V(kTooManyParametersLocals, "too many parameters/locals") \
|
||||
V(kTooManyParameters, "too many parameters") \
|
||||
V(kTooManySpillSlotsNeededForOSR, "Too many spill slots needed for OSR") \
|
||||
V(kToOperandIsDoubleRegisterUnimplemented, \
|
||||
"ToOperand IsDoubleRegister unimplemented") \
|
||||
V(kToOperandUnsupportedDoubleImmediate, \
|
||||
"ToOperand Unsupported double immediate") \
|
||||
V(kTryCatchStatement, "TryCatchStatement") \
|
||||
V(kTryFinallyStatement, "TryFinallyStatement") \
|
||||
V(kUnableToEncodeValueAsSmi, "Unable to encode value as smi") \
|
||||
V(kUnalignedAllocationInNewSpace, "Unaligned allocation in new space") \
|
||||
V(kUndefinedValueNotLoaded, "Undefined value not loaded") \
|
||||
V(kUndoAllocationOfNonAllocatedMemory, \
|
||||
"Undo allocation of non allocated memory") \
|
||||
V(kUnexpectedAllocationTop, "Unexpected allocation top") \
|
||||
V(kUnexpectedElementsKindInArrayConstructor, \
|
||||
"Unexpected ElementsKind in array constructor") \
|
||||
V(kUnexpectedFallthroughFromCharCodeAtSlowCase, \
|
||||
"Unexpected fallthrough from CharCodeAt slow case") \
|
||||
V(kUnexpectedFallthroughFromCharFromCodeSlowCase, \
|
||||
"Unexpected fallthrough from CharFromCode slow case") \
|
||||
V(kUnexpectedFallThroughFromStringComparison, \
|
||||
"Unexpected fall-through from string comparison") \
|
||||
V(kUnexpectedFallThroughInBinaryStubGenerateFloatingPointCode, \
|
||||
"Unexpected fall-through in BinaryStub_GenerateFloatingPointCode") \
|
||||
V(kUnexpectedFallthroughToCharCodeAtSlowCase, \
|
||||
"Unexpected fallthrough to CharCodeAt slow case") \
|
||||
V(kUnexpectedFallthroughToCharFromCodeSlowCase, \
|
||||
"Unexpected fallthrough to CharFromCode slow case") \
|
||||
V(kUnexpectedFPUStackDepthAfterInstruction, \
|
||||
"Unexpected FPU stack depth after instruction") \
|
||||
V(kUnexpectedInitialMapForArrayFunction1, \
|
||||
"Unexpected initial map for Array function (1)") \
|
||||
V(kUnexpectedInitialMapForArrayFunction2, \
|
||||
"Unexpected initial map for Array function (2)") \
|
||||
V(kUnexpectedInitialMapForArrayFunction, \
|
||||
"Unexpected initial map for Array function") \
|
||||
V(kUnexpectedInitialMapForInternalArrayFunction, \
|
||||
"Unexpected initial map for InternalArray function") \
|
||||
V(kUnexpectedLevelAfterReturnFromApiCall, \
|
||||
"Unexpected level after return from api call") \
|
||||
V(kUnexpectedNumberOfPreAllocatedPropertyFields, \
|
||||
"Unexpected number of pre-allocated property fields") \
|
||||
V(kUnexpectedStringFunction, "Unexpected String function") \
|
||||
V(kUnexpectedStringType, "Unexpected string type") \
|
||||
V(kUnexpectedStringWrapperInstanceSize, \
|
||||
"Unexpected string wrapper instance size") \
|
||||
V(kUnexpectedTypeForRegExpDataFixedArrayExpected, \
|
||||
"Unexpected type for RegExp data, FixedArray expected") \
|
||||
V(kUnexpectedUnusedPropertiesOfStringWrapper, \
|
||||
"Unexpected unused properties of string wrapper") \
|
||||
V(kUninitializedKSmiConstantRegister, "Uninitialized kSmiConstantRegister") \
|
||||
V(kUnknown, "unknown") \
|
||||
V(kUnsupportedConstCompoundAssignment, \
|
||||
"unsupported const compound assignment") \
|
||||
V(kUnsupportedCountOperationWithConst, \
|
||||
"unsupported count operation with const") \
|
||||
V(kUnsupportedDoubleImmediate, "unsupported double immediate") \
|
||||
V(kUnsupportedLetCompoundAssignment, "unsupported let compound assignment") \
|
||||
V(kUnsupportedLookupSlotInDeclaration, \
|
||||
"unsupported lookup slot in declaration") \
|
||||
V(kUnsupportedNonPrimitiveCompare, "Unsupported non-primitive compare") \
|
||||
V(kUnsupportedPhiUseOfArguments, "Unsupported phi use of arguments") \
|
||||
V(kUnsupportedPhiUseOfConstVariable, \
|
||||
"Unsupported phi use of const variable") \
|
||||
V(kUnsupportedTaggedImmediate, "unsupported tagged immediate") \
|
||||
V(kVariableResolvedToWithContext, "Variable resolved to with context") \
|
||||
V(kWeShouldNotHaveAnEmptyLexicalContext, \
|
||||
"we should not have an empty lexical context") \
|
||||
V(kWithStatement, "WithStatement") \
|
||||
V(kWrongAddressOrValuePassedToRecordWrite, \
|
||||
"Wrong address or value passed to RecordWrite")
|
||||
|
||||
|
||||
#define ERROR_MESSAGES_CONSTANTS(C, T) C,
|
||||
enum BailoutReason {
|
||||
ERROR_MESSAGES_LIST(ERROR_MESSAGES_CONSTANTS)
|
||||
kLastErrorMessage
|
||||
};
|
||||
#undef ERROR_MESSAGES_CONSTANTS
|
||||
|
||||
|
||||
const char* GetBailoutReason(BailoutReason reason);
|
||||
|
||||
|
||||
// Object is the abstract superclass for all classes in the
|
||||
@ -6305,7 +6585,7 @@ class SharedFunctionInfo: public HeapObject {
|
||||
|
||||
// Disable (further) attempted optimization of all functions sharing this
|
||||
// shared function info.
|
||||
void DisableOptimization(const char* reason);
|
||||
void DisableOptimization(BailoutReason reason);
|
||||
|
||||
// Lookup the bailout ID and ASSERT that it exists in the non-optimized
|
||||
// code, returns whether it asserted (i.e., always true if assertions are
|
||||
|
@ -249,7 +249,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
if (FLAG_debug_code) {
|
||||
__ cmpq(rsi, rdi);
|
||||
__ Assert(less_equal,
|
||||
"Unexpected number of pre-allocated property fields.");
|
||||
kUnexpectedNumberOfPreAllocatedPropertyFields);
|
||||
}
|
||||
__ InitializeFieldsWithFiller(rcx, rsi, rdx);
|
||||
__ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
|
||||
@ -280,7 +280,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
||||
__ subq(rdx, rcx);
|
||||
// Done if no extra properties are to be allocated.
|
||||
__ j(zero, &allocated);
|
||||
__ Assert(positive, "Property allocation count failed.");
|
||||
__ Assert(positive, kPropertyAllocationCountFailed);
|
||||
|
||||
// Scale the number of elements by pointer size and add the header for
|
||||
// FixedArrays to the start of the next object calculation from above.
|
||||
@ -723,7 +723,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
|
||||
__ ret(2 * kPointerSize); // Remove state, rax.
|
||||
|
||||
__ bind(¬_tos_rax);
|
||||
__ Abort("no cases left");
|
||||
__ Abort(kNoCasesLeft);
|
||||
}
|
||||
|
||||
|
||||
@ -1113,9 +1113,9 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
|
||||
// Will both indicate a NULL and a Smi.
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
|
||||
__ Check(not_smi, "Unexpected initial map for InternalArray function");
|
||||
__ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
|
||||
__ CmpObjectType(rbx, MAP_TYPE, rcx);
|
||||
__ Check(equal, "Unexpected initial map for InternalArray function");
|
||||
__ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
|
||||
}
|
||||
|
||||
// Run the native code for the InternalArray function called as a normal
|
||||
@ -1143,9 +1143,9 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
|
||||
// Will both indicate a NULL and a Smi.
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
|
||||
__ Check(not_smi, "Unexpected initial map for Array function");
|
||||
__ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CmpObjectType(rbx, MAP_TYPE, rcx);
|
||||
__ Check(equal, "Unexpected initial map for Array function");
|
||||
__ Check(equal, kUnexpectedInitialMapForArrayFunction);
|
||||
}
|
||||
|
||||
// Run the native code for the Array function called as a normal function.
|
||||
@ -1173,7 +1173,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
|
||||
__ cmpq(rdi, rcx);
|
||||
__ Assert(equal, "Unexpected String function");
|
||||
__ Assert(equal, kUnexpectedStringFunction);
|
||||
}
|
||||
|
||||
// Load the first argument into rax and get rid of the rest
|
||||
@ -1219,9 +1219,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
|
||||
Immediate(JSValue::kSize >> kPointerSizeLog2));
|
||||
__ Assert(equal, "Unexpected string wrapper instance size");
|
||||
__ Assert(equal, kUnexpectedStringWrapperInstanceSize);
|
||||
__ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
|
||||
__ Assert(equal, "Unexpected unused properties of string wrapper");
|
||||
__ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
|
||||
}
|
||||
__ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
|
||||
|
||||
|
@ -511,9 +511,8 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
||||
Label after_sentinel;
|
||||
__ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
|
||||
if (FLAG_debug_code) {
|
||||
const char* message = "Expected 0 as a Smi sentinel";
|
||||
__ cmpq(rcx, Immediate(0));
|
||||
__ Assert(equal, message);
|
||||
__ Assert(equal, kExpected0AsASmiSentinel);
|
||||
}
|
||||
__ movq(rcx, GlobalObjectOperand());
|
||||
__ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
|
||||
@ -954,7 +953,7 @@ static void BinaryOpStub_GenerateFloatingPointCode(MacroAssembler* masm,
|
||||
// Set the map.
|
||||
__ AssertRootValue(heap_number_map,
|
||||
Heap::kHeapNumberMapRootIndex,
|
||||
"HeapNumberMap register clobbered.");
|
||||
kHeapNumberMapRegisterClobbered);
|
||||
__ movq(FieldOperand(rax, HeapObject::kMapOffset),
|
||||
heap_number_map);
|
||||
__ cvtqsi2sd(xmm0, rbx);
|
||||
@ -974,8 +973,7 @@ static void BinaryOpStub_GenerateFloatingPointCode(MacroAssembler* masm,
|
||||
}
|
||||
// No fall-through from this generated code.
|
||||
if (FLAG_debug_code) {
|
||||
__ Abort("Unexpected fall-through in "
|
||||
"BinaryStub_GenerateFloatingPointCode.");
|
||||
__ Abort(kUnexpectedFallThroughInBinaryStubGenerateFloatingPointCode);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2616,9 +2614,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
Condition is_smi = masm->CheckSmi(rax);
|
||||
__ Check(NegateCondition(is_smi),
|
||||
"Unexpected type for RegExp data, FixedArray expected");
|
||||
kUnexpectedTypeForRegExpDataFixedArrayExpected);
|
||||
__ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
|
||||
__ Check(equal, "Unexpected type for RegExp data, FixedArray expected");
|
||||
__ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
|
||||
}
|
||||
|
||||
// rax: RegExp data (FixedArray)
|
||||
@ -2984,7 +2982,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
||||
// Assert that we do not have a cons or slice (indirect strings) here.
|
||||
// Sequential strings have already been ruled out.
|
||||
__ testb(rbx, Immediate(kIsIndirectStringMask));
|
||||
__ Assert(zero, "external string expected, but not found");
|
||||
__ Assert(zero, kExternalStringExpectedButNotFound);
|
||||
}
|
||||
__ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
|
||||
// Move the pointer so that offset-wise, it looks like a sequential string.
|
||||
@ -3448,7 +3446,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
__ Abort("Unexpected fall-through from string comparison");
|
||||
__ Abort(kUnexpectedFallThroughFromStringComparison);
|
||||
#endif
|
||||
|
||||
__ bind(&check_unequal_objects);
|
||||
@ -4275,7 +4273,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ movl(rdi, Immediate(kWordBeforeMapCheckValue));
|
||||
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
|
||||
__ Assert(equal, "InstanceofStub unexpected call site cache (check).");
|
||||
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
|
||||
}
|
||||
__ movq(kScratchRegister,
|
||||
Operand(kScratchRegister, kOffsetToMapCheckValue));
|
||||
@ -4317,7 +4315,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ movl(rax, Immediate(kWordBeforeResultValue));
|
||||
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
|
||||
__ Assert(equal, "InstanceofStub unexpected call site cache (mov).");
|
||||
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
|
||||
}
|
||||
__ Set(rax, 0);
|
||||
}
|
||||
@ -4340,7 +4338,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
|
||||
if (FLAG_debug_code) {
|
||||
__ movl(rax, Immediate(kWordBeforeResultValue));
|
||||
__ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
|
||||
__ Assert(equal, "InstanceofStub unexpected call site cache (mov)");
|
||||
__ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
|
||||
}
|
||||
}
|
||||
__ ret(2 * kPointerSize + extra_stack_space);
|
||||
@ -4404,7 +4402,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
void StringCharCodeAtGenerator::GenerateSlow(
|
||||
MacroAssembler* masm,
|
||||
const RuntimeCallHelper& call_helper) {
|
||||
__ Abort("Unexpected fallthrough to CharCodeAt slow case");
|
||||
__ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
|
||||
|
||||
Factory* factory = masm->isolate()->factory();
|
||||
// Index is not a smi.
|
||||
@ -4454,7 +4452,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
|
||||
call_helper.AfterCall(masm);
|
||||
__ jmp(&exit_);
|
||||
|
||||
__ Abort("Unexpected fallthrough from CharCodeAt slow case");
|
||||
__ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
|
||||
}
|
||||
|
||||
|
||||
@ -4480,7 +4478,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
|
||||
void StringCharFromCodeGenerator::GenerateSlow(
|
||||
MacroAssembler* masm,
|
||||
const RuntimeCallHelper& call_helper) {
|
||||
__ Abort("Unexpected fallthrough to CharFromCode slow case");
|
||||
__ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
|
||||
|
||||
__ bind(&slow_case_);
|
||||
call_helper.BeforeCall(masm);
|
||||
@ -4492,7 +4490,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
|
||||
call_helper.AfterCall(masm);
|
||||
__ jmp(&exit_);
|
||||
|
||||
__ Abort("Unexpected fallthrough from CharFromCode slow case");
|
||||
__ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
|
||||
}
|
||||
|
||||
|
||||
@ -5040,7 +5038,7 @@ void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
|
||||
if (FLAG_debug_code) {
|
||||
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
|
||||
__ cmpq(kScratchRegister, candidate);
|
||||
__ Assert(equal, "oddball in string table is not undefined or the hole");
|
||||
__ Assert(equal, kOddballInStringTableIsNotUndefinedOrTheHole);
|
||||
}
|
||||
__ jmp(&next_probe[i]);
|
||||
|
||||
@ -6539,7 +6537,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// If we reached this point there is a problem.
|
||||
__ Abort("Unexpected ElementsKind in array constructor");
|
||||
__ Abort(kUnexpectedElementsKindInArrayConstructor);
|
||||
}
|
||||
|
||||
|
||||
@ -6602,7 +6600,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// If we reached this point there is a problem.
|
||||
__ Abort("Unexpected ElementsKind in array constructor");
|
||||
__ Abort(kUnexpectedElementsKindInArrayConstructor);
|
||||
}
|
||||
|
||||
|
||||
@ -6668,9 +6666,9 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
// Will both indicate a NULL and a Smi.
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
|
||||
__ Check(not_smi, "Unexpected initial map for Array function");
|
||||
__ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CmpObjectType(rcx, MAP_TYPE, rcx);
|
||||
__ Check(equal, "Unexpected initial map for Array function");
|
||||
__ Check(equal, kUnexpectedInitialMapForArrayFunction);
|
||||
|
||||
// We should either have undefined in rbx or a valid cell
|
||||
Label okay_here;
|
||||
@ -6678,7 +6676,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ Cmp(rbx, undefined_sentinel);
|
||||
__ j(equal, &okay_here);
|
||||
__ Cmp(FieldOperand(rbx, 0), cell_map);
|
||||
__ Assert(equal, "Expected property cell in register rbx");
|
||||
__ Assert(equal, kExpectedPropertyCellInRegisterRbx);
|
||||
__ bind(&okay_here);
|
||||
}
|
||||
|
||||
@ -6783,9 +6781,9 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
// Will both indicate a NULL and a Smi.
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
|
||||
__ Check(not_smi, "Unexpected initial map for Array function");
|
||||
__ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
|
||||
__ CmpObjectType(rcx, MAP_TYPE, rcx);
|
||||
__ Check(equal, "Unexpected initial map for Array function");
|
||||
__ Check(equal, kUnexpectedInitialMapForArrayFunction);
|
||||
}
|
||||
|
||||
// Figure out the right elements kind
|
||||
@ -6804,7 +6802,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
||||
__ j(equal, &done);
|
||||
__ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
|
||||
__ Assert(equal,
|
||||
"Invalid ElementsKind for InternalArray or InternalPackedArray");
|
||||
kInvalidElementsKindForInternalArrayOrInternalPackedArray);
|
||||
__ bind(&done);
|
||||
}
|
||||
|
||||
|
@ -394,7 +394,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
|
||||
|
||||
if (FLAG_debug_code) {
|
||||
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
|
||||
__ Assert(equal, "object found in smi-only array");
|
||||
__ Assert(equal, kObjectFoundInSmiOnlyArray);
|
||||
}
|
||||
|
||||
__ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15);
|
||||
@ -577,7 +577,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
|
||||
// Assert that we do not have a cons or slice (indirect strings) here.
|
||||
// Sequential strings have already been ruled out.
|
||||
__ testb(result, Immediate(kIsIndirectStringMask));
|
||||
__ Assert(zero, "external string expected, but not found");
|
||||
__ Assert(zero, kExternalStringExpectedButNotFound);
|
||||
}
|
||||
// Rule out short external strings.
|
||||
STATIC_CHECK(kShortExternalStringTag != 0);
|
||||
|
@ -753,9 +753,9 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
|
||||
// Check that we're not inside a with or catch context.
|
||||
__ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
|
||||
__ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
|
||||
__ Check(not_equal, "Declaration in with context.");
|
||||
__ Check(not_equal, kDeclarationInWithContext);
|
||||
__ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
|
||||
__ Check(not_equal, "Declaration in catch context.");
|
||||
__ Check(not_equal, kDeclarationInCatchContext);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2192,7 +2192,7 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
|
||||
__ Push(Smi::FromInt(resume_mode));
|
||||
__ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
|
||||
// Not reached: the runtime call returns elsewhere.
|
||||
__ Abort("Generator failed to resume.");
|
||||
__ Abort(kGeneratorFailedToResume);
|
||||
|
||||
// Throw error if we attempt to operate on a running generator.
|
||||
__ bind(&wrong_state);
|
||||
@ -2456,7 +2456,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
|
||||
// Check for an uninitialized let binding.
|
||||
__ movq(rdx, location);
|
||||
__ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
|
||||
__ Check(equal, "Let binding re-initialization.");
|
||||
__ Check(equal, kLetBindingReInitialization);
|
||||
}
|
||||
// Perform the assignment.
|
||||
__ movq(location, rax);
|
||||
@ -3398,14 +3398,14 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
|
||||
Register index,
|
||||
Register value,
|
||||
uint32_t encoding_mask) {
|
||||
__ Check(masm()->CheckSmi(index), "Non-smi index");
|
||||
__ Check(masm()->CheckSmi(value), "Non-smi value");
|
||||
__ Check(masm()->CheckSmi(index), kNonSmiIndex);
|
||||
__ Check(masm()->CheckSmi(value), kNonSmiValue);
|
||||
|
||||
__ SmiCompare(index, FieldOperand(string, String::kLengthOffset));
|
||||
__ Check(less, "Index is too large");
|
||||
__ Check(less, kIndexIsTooLarge);
|
||||
|
||||
__ SmiCompare(index, Smi::FromInt(0));
|
||||
__ Check(greater_equal, "Index is negative");
|
||||
__ Check(greater_equal, kIndexIsNegative);
|
||||
|
||||
__ push(value);
|
||||
__ movq(value, FieldOperand(string, HeapObject::kMapOffset));
|
||||
@ -3413,7 +3413,7 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
|
||||
|
||||
__ andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
|
||||
__ cmpq(value, Immediate(encoding_mask));
|
||||
__ Check(equal, "Unexpected string type");
|
||||
__ Check(equal, kUnexpectedStringType);
|
||||
__ pop(value);
|
||||
}
|
||||
|
||||
@ -3777,7 +3777,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
|
||||
Handle<FixedArray> jsfunction_result_caches(
|
||||
isolate()->native_context()->jsfunction_result_caches());
|
||||
if (jsfunction_result_caches->length() <= cache_id) {
|
||||
__ Abort("Attempt to use undefined cache.");
|
||||
__ Abort(kAttemptToUseUndefinedCache);
|
||||
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
|
||||
context()->Plug(rax);
|
||||
return;
|
||||
@ -3971,7 +3971,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
|
||||
// scratch, string_length(int32), elements(FixedArray*).
|
||||
if (generate_debug_code_) {
|
||||
__ cmpq(index, array_length);
|
||||
__ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
|
||||
__ Assert(below, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
|
||||
}
|
||||
__ bind(&loop);
|
||||
__ movq(string, FieldOperand(elements,
|
||||
|
@ -96,7 +96,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
|
||||
}
|
||||
|
||||
|
||||
void LChunkBuilder::Abort(const char* reason) {
|
||||
void LChunkBuilder::Abort(BailoutReason reason) {
|
||||
info()->set_bailout_reason(reason);
|
||||
status_ = ABORTED;
|
||||
}
|
||||
@ -661,7 +661,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
|
||||
Address entry =
|
||||
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
|
||||
if (entry == NULL) {
|
||||
Abort("bailout was not prepared");
|
||||
Abort(kBailoutWasNotPrepared);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1642,7 +1642,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
|
||||
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
|
||||
__ cmpq(value, Immediate(encoding == String::ONE_BYTE_ENCODING
|
||||
? one_byte_seq_type : two_byte_seq_type));
|
||||
__ Check(equal, "Unexpected string type");
|
||||
__ Check(equal, kUnexpectedStringType);
|
||||
__ pop(value);
|
||||
}
|
||||
|
||||
@ -3088,7 +3088,7 @@ Operand LCodeGen::BuildFastArrayOperand(
|
||||
if (key->IsConstantOperand()) {
|
||||
int constant_value = ToInteger32(LConstantOperand::cast(key));
|
||||
if (constant_value & 0xF0000000) {
|
||||
Abort("array index constant value too big");
|
||||
Abort(kArrayIndexConstantValueTooBig);
|
||||
}
|
||||
return Operand(elements_pointer_reg,
|
||||
((constant_value + additional_index) << shift_size)
|
||||
|
@ -179,7 +179,7 @@ class LCodeGen BASE_EMBEDDED {
|
||||
|
||||
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
|
||||
|
||||
void Abort(const char* reason);
|
||||
void Abort(BailoutReason reason);
|
||||
void FPRINTF_CHECKING Comment(const char* format, ...);
|
||||
|
||||
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
|
||||
|
@ -443,7 +443,7 @@ LPlatformChunk* LChunkBuilder::Build() {
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::Abort(const char* reason) {
|
||||
void LCodeGen::Abort(BailoutReason reason) {
|
||||
info()->set_bailout_reason(reason);
|
||||
status_ = ABORTED;
|
||||
}
|
||||
@ -654,7 +654,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
|
||||
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
|
||||
int vreg = allocator_->GetVirtualRegister();
|
||||
if (!allocator_->AllocationOk()) {
|
||||
Abort("Out of virtual registers while trying to allocate temp register.");
|
||||
Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
|
||||
vreg = 0;
|
||||
}
|
||||
operand->set_virtual_register(vreg);
|
||||
@ -2368,7 +2368,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
|
||||
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
|
||||
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
|
||||
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
|
||||
Abort("Too many spill slots needed for OSR");
|
||||
Abort(kTooManySpillSlotsNeededForOSR);
|
||||
spill_index = 0;
|
||||
}
|
||||
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
|
||||
|
@ -2573,7 +2573,7 @@ class LChunkBuilder BASE_EMBEDDED {
|
||||
bool is_done() const { return status_ == DONE; }
|
||||
bool is_aborted() const { return status_ == ABORTED; }
|
||||
|
||||
void Abort(const char* reason);
|
||||
void Abort(BailoutReason reason);
|
||||
|
||||
// Methods for getting operands for Use / Define / Temp.
|
||||
LUnallocated* ToUnallocated(Register reg);
|
||||
|
@ -449,8 +449,8 @@ void MacroAssembler::RecordWrite(Register object,
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Assert(Condition cc, const char* msg) {
|
||||
if (emit_debug_code()) Check(cc, msg);
|
||||
void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
|
||||
if (emit_debug_code()) Check(cc, reason);
|
||||
}
|
||||
|
||||
|
||||
@ -466,16 +466,16 @@ void MacroAssembler::AssertFastElements(Register elements) {
|
||||
CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
|
||||
Heap::kFixedCOWArrayMapRootIndex);
|
||||
j(equal, &ok, Label::kNear);
|
||||
Abort("JSObject with fast elements map has slow elements");
|
||||
Abort(kJSObjectWithFastElementsMapHasSlowElements);
|
||||
bind(&ok);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Check(Condition cc, const char* msg) {
|
||||
void MacroAssembler::Check(Condition cc, BailoutReason reason) {
|
||||
Label L;
|
||||
j(cc, &L, Label::kNear);
|
||||
Abort(msg);
|
||||
Abort(reason);
|
||||
// Control will not return here.
|
||||
bind(&L);
|
||||
}
|
||||
@ -508,12 +508,13 @@ void MacroAssembler::NegativeZeroTest(Register result,
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::Abort(const char* msg) {
|
||||
void MacroAssembler::Abort(BailoutReason reason) {
|
||||
// We want to pass the msg string like a smi to avoid GC
|
||||
// problems, however msg is not guaranteed to be aligned
|
||||
// properly. Instead, we pass an aligned pointer that is
|
||||
// a proper v8 smi, but also pass the alignment difference
|
||||
// from the real pointer as a smi.
|
||||
const char* msg = GetBailoutReason(reason);
|
||||
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
|
||||
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
|
||||
// Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
|
||||
@ -838,7 +839,7 @@ void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
|
||||
CompareRoot(return_value, Heap::kNullValueRootIndex);
|
||||
j(equal, &ok, Label::kNear);
|
||||
|
||||
Abort("API call returned invalid object");
|
||||
Abort(kAPICallReturnedInvalidObject);
|
||||
|
||||
bind(&ok);
|
||||
#endif
|
||||
@ -1038,7 +1039,7 @@ void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
|
||||
RelocInfo::NONE64);
|
||||
cmpq(dst, kSmiConstantRegister);
|
||||
if (allow_stub_calls()) {
|
||||
Assert(equal, "Uninitialized kSmiConstantRegister");
|
||||
Assert(equal, kUninitializedKSmiConstantRegister);
|
||||
} else {
|
||||
Label ok;
|
||||
j(equal, &ok, Label::kNear);
|
||||
@ -1106,7 +1107,7 @@ void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
|
||||
Label ok;
|
||||
j(zero, &ok, Label::kNear);
|
||||
if (allow_stub_calls()) {
|
||||
Abort("Integer32ToSmiField writing to non-smi location");
|
||||
Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
|
||||
} else {
|
||||
int3();
|
||||
}
|
||||
@ -1689,12 +1690,12 @@ void MacroAssembler::SmiAdd(Register dst,
|
||||
if (emit_debug_code()) {
|
||||
movq(kScratchRegister, src1);
|
||||
addq(kScratchRegister, src2);
|
||||
Check(no_overflow, "Smi addition overflow");
|
||||
Check(no_overflow, kSmiAdditionOverflow);
|
||||
}
|
||||
lea(dst, Operand(src1, src2, times_1, 0));
|
||||
} else {
|
||||
addq(dst, src2);
|
||||
Assert(no_overflow, "Smi addition overflow");
|
||||
Assert(no_overflow, kSmiAdditionOverflow);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1726,7 +1727,7 @@ void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
|
||||
movq(dst, src1);
|
||||
}
|
||||
subq(dst, src2);
|
||||
Assert(no_overflow, "Smi subtraction overflow");
|
||||
Assert(no_overflow, kSmiSubtractionOverflow);
|
||||
}
|
||||
|
||||
|
||||
@ -1758,7 +1759,7 @@ void MacroAssembler::SmiSub(Register dst,
|
||||
movq(dst, src1);
|
||||
}
|
||||
subq(dst, src2);
|
||||
Assert(no_overflow, "Smi subtraction overflow");
|
||||
Assert(no_overflow, kSmiSubtractionOverflow);
|
||||
}
|
||||
|
||||
|
||||
@ -2155,7 +2156,7 @@ void MacroAssembler::SelectNonSmi(Register dst,
|
||||
#ifdef DEBUG
|
||||
if (allow_stub_calls()) { // Check contains a stub call.
|
||||
Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
|
||||
Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
|
||||
Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
|
||||
}
|
||||
#endif
|
||||
STATIC_ASSERT(kSmiTag == 0);
|
||||
@ -2984,7 +2985,7 @@ void MacroAssembler::LoadUint32(XMMRegister dst,
|
||||
XMMRegister scratch) {
|
||||
if (FLAG_debug_code) {
|
||||
cmpq(src, Immediate(0xffffffff));
|
||||
Assert(below_equal, "input GPR is expected to have upper32 cleared");
|
||||
Assert(below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
|
||||
}
|
||||
cvtqsi2sd(dst, src);
|
||||
}
|
||||
@ -3033,7 +3034,7 @@ void MacroAssembler::AssertNumber(Register object) {
|
||||
j(is_smi, &ok, Label::kNear);
|
||||
Cmp(FieldOperand(object, HeapObject::kMapOffset),
|
||||
isolate()->factory()->heap_number_map());
|
||||
Check(equal, "Operand is not a number");
|
||||
Check(equal, kOperandIsNotANumber);
|
||||
bind(&ok);
|
||||
}
|
||||
}
|
||||
@ -3042,7 +3043,7 @@ void MacroAssembler::AssertNumber(Register object) {
|
||||
void MacroAssembler::AssertNotSmi(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
Condition is_smi = CheckSmi(object);
|
||||
Check(NegateCondition(is_smi), "Operand is a smi");
|
||||
Check(NegateCondition(is_smi), kOperandIsASmi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3050,7 +3051,7 @@ void MacroAssembler::AssertNotSmi(Register object) {
|
||||
void MacroAssembler::AssertSmi(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
Condition is_smi = CheckSmi(object);
|
||||
Check(is_smi, "Operand is not a smi");
|
||||
Check(is_smi, kOperandIsNotASmi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3058,7 +3059,7 @@ void MacroAssembler::AssertSmi(Register object) {
|
||||
void MacroAssembler::AssertSmi(const Operand& object) {
|
||||
if (emit_debug_code()) {
|
||||
Condition is_smi = CheckSmi(object);
|
||||
Check(is_smi, "Operand is not a smi");
|
||||
Check(is_smi, kOperandIsNotASmi);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3068,7 +3069,7 @@ void MacroAssembler::AssertZeroExtended(Register int32_register) {
|
||||
ASSERT(!int32_register.is(kScratchRegister));
|
||||
movq(kScratchRegister, 0x100000000l, RelocInfo::NONE64);
|
||||
cmpq(kScratchRegister, int32_register);
|
||||
Check(above_equal, "32 bit value in register is not zero-extended");
|
||||
Check(above_equal, k32BitValueInRegisterIsNotZeroExtended);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3076,12 +3077,12 @@ void MacroAssembler::AssertZeroExtended(Register int32_register) {
|
||||
void MacroAssembler::AssertString(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
testb(object, Immediate(kSmiTagMask));
|
||||
Check(not_equal, "Operand is a smi and not a string");
|
||||
Check(not_equal, kOperandIsASmiAndNotAString);
|
||||
push(object);
|
||||
movq(object, FieldOperand(object, HeapObject::kMapOffset));
|
||||
CmpInstanceType(object, FIRST_NONSTRING_TYPE);
|
||||
pop(object);
|
||||
Check(below, "Operand is not a string");
|
||||
Check(below, kOperandIsNotAString);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3089,24 +3090,24 @@ void MacroAssembler::AssertString(Register object) {
|
||||
void MacroAssembler::AssertName(Register object) {
|
||||
if (emit_debug_code()) {
|
||||
testb(object, Immediate(kSmiTagMask));
|
||||
Check(not_equal, "Operand is a smi and not a name");
|
||||
Check(not_equal, kOperandIsASmiAndNotAName);
|
||||
push(object);
|
||||
movq(object, FieldOperand(object, HeapObject::kMapOffset));
|
||||
CmpInstanceType(object, LAST_NAME_TYPE);
|
||||
pop(object);
|
||||
Check(below_equal, "Operand is not a name");
|
||||
Check(below_equal, kOperandIsNotAName);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::AssertRootValue(Register src,
|
||||
Heap::RootListIndex root_value_index,
|
||||
const char* message) {
|
||||
BailoutReason reason) {
|
||||
if (emit_debug_code()) {
|
||||
ASSERT(!src.is(kScratchRegister));
|
||||
LoadRoot(kScratchRegister, root_value_index);
|
||||
cmpq(src, kScratchRegister);
|
||||
Check(equal, message);
|
||||
Check(equal, reason);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3457,7 +3458,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
|
||||
isolate()->factory()->undefined_value(),
|
||||
RelocInfo::EMBEDDED_OBJECT);
|
||||
cmpq(Operand(rsp, 0), kScratchRegister);
|
||||
Check(not_equal, "code object not properly patched");
|
||||
Check(not_equal, kCodeObjectNotProperlyPatched);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3466,7 +3467,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
|
||||
if (emit_debug_code()) {
|
||||
Move(kScratchRegister, Smi::FromInt(type));
|
||||
cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
|
||||
Check(equal, "stack frame types must match");
|
||||
Check(equal, kStackFrameTypesMustMatch);
|
||||
}
|
||||
movq(rsp, rbp);
|
||||
pop(rbp);
|
||||
@ -3612,7 +3613,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
// When generating debug code, make sure the lexical context is set.
|
||||
if (emit_debug_code()) {
|
||||
cmpq(scratch, Immediate(0));
|
||||
Check(not_equal, "we should not have an empty lexical context");
|
||||
Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
|
||||
}
|
||||
// Load the native context of the current context.
|
||||
int offset =
|
||||
@ -3624,7 +3625,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
if (emit_debug_code()) {
|
||||
Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
|
||||
isolate()->factory()->native_context_map());
|
||||
Check(equal, "JSGlobalObject::native_context should be a native context.");
|
||||
Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
|
||||
}
|
||||
|
||||
// Check if both contexts are the same.
|
||||
@ -3643,12 +3644,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
|
||||
movq(holder_reg,
|
||||
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
|
||||
CompareRoot(holder_reg, Heap::kNullValueRootIndex);
|
||||
Check(not_equal, "JSGlobalProxy::context() should not be null.");
|
||||
Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
|
||||
|
||||
// Read the first word and compare to native_context_map(),
|
||||
movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
|
||||
CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
|
||||
Check(equal, "JSGlobalObject::native_context should be a native context.");
|
||||
Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
|
||||
pop(holder_reg);
|
||||
}
|
||||
|
||||
@ -3794,7 +3795,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
|
||||
// Assert that result actually contains top on entry.
|
||||
Operand top_operand = ExternalOperand(allocation_top);
|
||||
cmpq(result, top_operand);
|
||||
Check(equal, "Unexpected allocation top");
|
||||
Check(equal, kUnexpectedAllocationTop);
|
||||
#endif
|
||||
return;
|
||||
}
|
||||
@ -3815,7 +3816,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
|
||||
AllocationFlags flags) {
|
||||
if (emit_debug_code()) {
|
||||
testq(result_end, Immediate(kObjectAlignmentMask));
|
||||
Check(zero, "Unaligned allocation in new space");
|
||||
Check(zero, kUnalignedAllocationInNewSpace);
|
||||
}
|
||||
|
||||
ExternalReference allocation_top =
|
||||
@ -3862,7 +3863,7 @@ void MacroAssembler::Allocate(int object_size,
|
||||
// always safe because the limit of the heap is always aligned.
|
||||
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
||||
testq(result, Immediate(kDoubleAlignmentMask));
|
||||
Check(zero, "Allocation is not double aligned");
|
||||
Check(zero, kAllocationIsNotDoubleAligned);
|
||||
}
|
||||
|
||||
// Calculate new top and bail out if new space is exhausted.
|
||||
@ -3941,7 +3942,7 @@ void MacroAssembler::Allocate(Register object_size,
|
||||
// always safe because the limit of the heap is always aligned.
|
||||
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
|
||||
testq(result, Immediate(kDoubleAlignmentMask));
|
||||
Check(zero, "Allocation is not double aligned");
|
||||
Check(zero, kAllocationIsNotDoubleAligned);
|
||||
}
|
||||
|
||||
// Calculate new top and bail out if new space is exhausted.
|
||||
@ -3975,7 +3976,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) {
|
||||
Operand top_operand = ExternalOperand(new_space_allocation_top);
|
||||
#ifdef DEBUG
|
||||
cmpq(object, top_operand);
|
||||
Check(below, "Undo allocation of non allocated memory");
|
||||
Check(below, kUndoAllocationOfNonAllocatedMemory);
|
||||
#endif
|
||||
movq(top_operand, object);
|
||||
}
|
||||
@ -4165,7 +4166,7 @@ void MacroAssembler::CopyBytes(Register destination,
|
||||
ASSERT(min_length >= 0);
|
||||
if (emit_debug_code()) {
|
||||
cmpl(length, Immediate(min_length));
|
||||
Assert(greater_equal, "Invalid min_length");
|
||||
Assert(greater_equal, kInvalidMinLength);
|
||||
}
|
||||
Label loop, done, short_string, short_loop;
|
||||
|
||||
@ -4249,7 +4250,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
|
||||
if (emit_debug_code()) {
|
||||
CompareRoot(FieldOperand(dst, HeapObject::kMapOffset),
|
||||
Heap::kWithContextMapRootIndex);
|
||||
Check(not_equal, "Variable resolved to with context.");
|
||||
Check(not_equal, kVariableResolvedToWithContext);
|
||||
}
|
||||
}
|
||||
|
||||
@ -4340,7 +4341,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
|
||||
CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
|
||||
jmp(&ok);
|
||||
bind(&fail);
|
||||
Abort("Global functions must have initial map");
|
||||
Abort(kGlobalFunctionsMustHaveInitialMap);
|
||||
bind(&ok);
|
||||
}
|
||||
}
|
||||
|
@ -1002,7 +1002,7 @@ class MacroAssembler: public Assembler {
|
||||
// enabled via --debug-code.
|
||||
void AssertRootValue(Register src,
|
||||
Heap::RootListIndex root_value_index,
|
||||
const char* message);
|
||||
BailoutReason reason);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Exception handling
|
||||
@ -1319,15 +1319,15 @@ class MacroAssembler: public Assembler {
|
||||
|
||||
// Calls Abort(msg) if the condition cc is not satisfied.
|
||||
// Use --debug_code to enable.
|
||||
void Assert(Condition cc, const char* msg);
|
||||
void Assert(Condition cc, BailoutReason reason);
|
||||
|
||||
void AssertFastElements(Register elements);
|
||||
|
||||
// Like Assert(), but always enabled.
|
||||
void Check(Condition cc, const char* msg);
|
||||
void Check(Condition cc, BailoutReason reason);
|
||||
|
||||
// Print a message to stdout and abort execution.
|
||||
void Abort(const char* msg);
|
||||
void Abort(BailoutReason msg);
|
||||
|
||||
// Check that the stack is aligned.
|
||||
void CheckStackAlignment();
|
||||
|
@ -397,7 +397,7 @@ void RegExpMacroAssemblerX64::CheckNotBackReference(
|
||||
// Fail on partial or illegal capture (start of capture after end of capture).
|
||||
// This must not happen (no back-reference can reference a capture that wasn't
|
||||
// closed before in the reg-exp).
|
||||
__ Check(greater_equal, "Invalid capture referenced");
|
||||
__ Check(greater_equal, kInvalidCaptureReferenced);
|
||||
|
||||
// Succeed on empty capture (including non-participating capture)
|
||||
__ j(equal, &fallthrough);
|
||||
|
@ -2938,7 +2938,7 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
|
||||
__ j(equal, &miss);
|
||||
} else if (FLAG_debug_code) {
|
||||
__ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
|
||||
__ Check(not_equal, "DontDelete cells can't contain the hole");
|
||||
__ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
|
||||
}
|
||||
|
||||
HandlerFrontendFooter(name, &success, &miss);
|
||||
|
@ -94,7 +94,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||
Register reg = Register::from_code(reg_num);
|
||||
if (!reg.is(esp) && !reg.is(ebp) && !reg.is(destination_reg)) {
|
||||
__ cmp(reg, MemOperand(esp, 0));
|
||||
__ Assert(equal, "register was clobbered");
|
||||
__ Assert(equal, kRegisterWasClobbered);
|
||||
__ add(esp, Immediate(kPointerSize));
|
||||
}
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
|
||||
Register reg = Register::from_code(reg_num);
|
||||
if (!reg.is(rsp) && !reg.is(rbp) && !reg.is(destination_reg)) {
|
||||
__ cmpq(reg, MemOperand(rsp, 0));
|
||||
__ Assert(equal, "register was clobbered");
|
||||
__ Assert(equal, kRegisterWasClobbered);
|
||||
__ addq(rsp, Immediate(kPointerSize));
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user