Fix materialization of accessor frames with captured receivers
I have fixed skipping of the receiver object to materialize captured objects. This is done with a new DoTranslateSkip method. We should consider unifying DoTranslateSkip, DoTranslateObject and DoTranslateCommand as they do the almost the same thing - they only differ in where they store the result. The change also turns bunch of ASSERTs into CHECKs. R=mstarzinger@chromium.org BUG=359441 TEST=test/mjsunit/regress/regress-359441.js LOG=N Review URL: https://codereview.chromium.org/225283006 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20978 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
19aa122d87
commit
ff884e06ae
@ -87,7 +87,7 @@ Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
|
||||
Object* element = native_context->DeoptimizedCodeListHead();
|
||||
while (!element->IsUndefined()) {
|
||||
Code* code = Code::cast(element);
|
||||
ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
if (code->contains(addr)) return code;
|
||||
element = code->next_code_link();
|
||||
}
|
||||
@ -111,7 +111,7 @@ Deoptimizer* Deoptimizer::New(JSFunction* function,
|
||||
from,
|
||||
fp_to_sp_delta,
|
||||
NULL);
|
||||
ASSERT(isolate->deoptimizer_data()->current_ == NULL);
|
||||
CHECK(isolate->deoptimizer_data()->current_ == NULL);
|
||||
isolate->deoptimizer_data()->current_ = deoptimizer;
|
||||
return deoptimizer;
|
||||
}
|
||||
@ -133,7 +133,7 @@ size_t Deoptimizer::GetMaxDeoptTableSize() {
|
||||
|
||||
Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
|
||||
Deoptimizer* result = isolate->deoptimizer_data()->current_;
|
||||
ASSERT(result != NULL);
|
||||
CHECK_NE(result, NULL);
|
||||
result->DeleteFrameDescriptions();
|
||||
isolate->deoptimizer_data()->current_ = NULL;
|
||||
return result;
|
||||
@ -160,8 +160,8 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
|
||||
JavaScriptFrame* frame,
|
||||
int jsframe_index,
|
||||
Isolate* isolate) {
|
||||
ASSERT(frame->is_optimized());
|
||||
ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
|
||||
CHECK(frame->is_optimized());
|
||||
CHECK(isolate->deoptimizer_data()->deoptimized_frame_info_ == NULL);
|
||||
|
||||
// Get the function and code from the frame.
|
||||
JSFunction* function = frame->function();
|
||||
@ -171,7 +171,7 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
|
||||
// return address must be at a place in the code with deoptimization support.
|
||||
SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
|
||||
int deoptimization_index = safepoint_entry.deoptimization_index();
|
||||
ASSERT(deoptimization_index != Safepoint::kNoDeoptimizationIndex);
|
||||
CHECK_NE(deoptimization_index, Safepoint::kNoDeoptimizationIndex);
|
||||
|
||||
// Always use the actual stack slots when calculating the fp to sp
|
||||
// delta adding two for the function and context.
|
||||
@ -194,7 +194,7 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
|
||||
|
||||
// Create the GC safe output frame information and register it for GC
|
||||
// handling.
|
||||
ASSERT_LT(jsframe_index, deoptimizer->jsframe_count());
|
||||
CHECK_LT(jsframe_index, deoptimizer->jsframe_count());
|
||||
|
||||
// Convert JS frame index into frame index.
|
||||
int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
|
||||
@ -246,7 +246,7 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
|
||||
|
||||
void Deoptimizer::DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
|
||||
Isolate* isolate) {
|
||||
ASSERT(isolate->deoptimizer_data()->deoptimized_frame_info_ == info);
|
||||
CHECK_EQ(isolate->deoptimizer_data()->deoptimized_frame_info_, info);
|
||||
delete info;
|
||||
isolate->deoptimizer_data()->deoptimized_frame_info_ = NULL;
|
||||
}
|
||||
@ -264,7 +264,7 @@ void Deoptimizer::VisitAllOptimizedFunctionsForContext(
|
||||
Context* context, OptimizedFunctionVisitor* visitor) {
|
||||
DisallowHeapAllocation no_allocation;
|
||||
|
||||
ASSERT(context->IsNativeContext());
|
||||
CHECK(context->IsNativeContext());
|
||||
|
||||
visitor->EnterContext(context);
|
||||
|
||||
@ -287,13 +287,13 @@ void Deoptimizer::VisitAllOptimizedFunctionsForContext(
|
||||
context->SetOptimizedFunctionsListHead(next);
|
||||
}
|
||||
// The visitor should not alter the link directly.
|
||||
ASSERT(function->next_function_link() == next);
|
||||
CHECK_EQ(function->next_function_link(), next);
|
||||
// Set the next function link to undefined to indicate it is no longer
|
||||
// in the optimized functions list.
|
||||
function->set_next_function_link(context->GetHeap()->undefined_value());
|
||||
} else {
|
||||
// The visitor should not alter the link directly.
|
||||
ASSERT(function->next_function_link() == next);
|
||||
CHECK_EQ(function->next_function_link(), next);
|
||||
// preserve this element.
|
||||
prev = function;
|
||||
}
|
||||
@ -395,7 +395,7 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
|
||||
Object* element = context->OptimizedCodeListHead();
|
||||
while (!element->IsUndefined()) {
|
||||
Code* code = Code::cast(element);
|
||||
ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
|
||||
Object* next = code->next_code_link();
|
||||
if (code->marked_for_deoptimization()) {
|
||||
// Put the code into the list for later patching.
|
||||
@ -483,7 +483,7 @@ void Deoptimizer::DeoptimizeGlobalObject(JSObject* object) {
|
||||
}
|
||||
if (object->IsJSGlobalProxy()) {
|
||||
Object* proto = object->GetPrototype();
|
||||
ASSERT(proto->IsJSGlobalObject());
|
||||
CHECK(proto->IsJSGlobalObject());
|
||||
Context* native_context = GlobalObject::cast(proto)->native_context();
|
||||
MarkAllCodeForContext(native_context);
|
||||
DeoptimizeMarkedCodeForContext(native_context);
|
||||
@ -499,7 +499,7 @@ void Deoptimizer::MarkAllCodeForContext(Context* context) {
|
||||
Object* element = context->OptimizedCodeListHead();
|
||||
while (!element->IsUndefined()) {
|
||||
Code* code = Code::cast(element);
|
||||
ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
|
||||
code->set_marked_for_deoptimization(true);
|
||||
element = code->next_code_link();
|
||||
}
|
||||
@ -534,7 +534,7 @@ bool Deoptimizer::TraceEnabledFor(BailoutType deopt_type,
|
||||
? FLAG_trace_stub_failures
|
||||
: FLAG_trace_deopt;
|
||||
}
|
||||
UNREACHABLE();
|
||||
FATAL("Unsupported deopt type");
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -546,7 +546,7 @@ const char* Deoptimizer::MessageFor(BailoutType type) {
|
||||
case LAZY: return "lazy";
|
||||
case DEBUGGER: return "debugger";
|
||||
}
|
||||
UNREACHABLE();
|
||||
FATAL("Unsupported deopt type");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@ -636,7 +636,7 @@ Code* Deoptimizer::FindOptimizedCode(JSFunction* function,
|
||||
ASSERT(optimized_code->contains(from_));
|
||||
return optimized_code;
|
||||
}
|
||||
UNREACHABLE();
|
||||
FATAL("Could not find code for optimized function");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@ -679,15 +679,15 @@ Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
|
||||
int id,
|
||||
BailoutType type,
|
||||
GetEntryMode mode) {
|
||||
ASSERT(id >= 0);
|
||||
CHECK_GE(id, 0);
|
||||
if (id >= kMaxNumberOfEntries) return NULL;
|
||||
if (mode == ENSURE_ENTRY_CODE) {
|
||||
EnsureCodeForDeoptimizationEntry(isolate, type, id);
|
||||
} else {
|
||||
ASSERT(mode == CALCULATE_ENTRY_ADDRESS);
|
||||
CHECK_EQ(mode, CALCULATE_ENTRY_ADDRESS);
|
||||
}
|
||||
DeoptimizerData* data = isolate->deoptimizer_data();
|
||||
ASSERT(type < kBailoutTypesWithCodeEntry);
|
||||
CHECK_LT(type, kBailoutTypesWithCodeEntry);
|
||||
MemoryChunk* base = data->deopt_entry_code_[type];
|
||||
return base->area_start() + (id * table_entry_size_);
|
||||
}
|
||||
@ -850,7 +850,7 @@ void Deoptimizer::DoComputeOutputFrames() {
|
||||
case Translation::LITERAL:
|
||||
case Translation::ARGUMENTS_OBJECT:
|
||||
default:
|
||||
UNREACHABLE();
|
||||
FATAL("Unsupported translation");
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -889,7 +889,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
} else {
|
||||
int closure_id = iterator->Next();
|
||||
USE(closure_id);
|
||||
ASSERT_EQ(Translation::kSelfLiteralId, closure_id);
|
||||
CHECK_EQ(Translation::kSelfLiteralId, closure_id);
|
||||
function = function_;
|
||||
}
|
||||
unsigned height = iterator->Next();
|
||||
@ -914,8 +914,8 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
|
||||
bool is_bottommost = (0 == frame_index);
|
||||
bool is_topmost = (output_count_ - 1 == frame_index);
|
||||
ASSERT(frame_index >= 0 && frame_index < output_count_);
|
||||
ASSERT(output_[frame_index] == NULL);
|
||||
CHECK(frame_index >= 0 && frame_index < output_count_);
|
||||
CHECK_EQ(output_[frame_index], NULL);
|
||||
output_[frame_index] = output_frame;
|
||||
|
||||
// The top address for the bottommost output frame can be computed from
|
||||
@ -1059,7 +1059,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
output_offset -= kPointerSize;
|
||||
DoTranslateCommand(iterator, frame_index, output_offset);
|
||||
}
|
||||
ASSERT(0 == output_offset);
|
||||
CHECK_EQ(0, output_offset);
|
||||
|
||||
// Compute this frame's PC, state, and continuation.
|
||||
Code* non_optimized_code = function->shared()->code();
|
||||
@ -1096,7 +1096,7 @@ void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
|
||||
} else if (bailout_type_ == SOFT) {
|
||||
continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
|
||||
} else {
|
||||
ASSERT(bailout_type_ == EAGER);
|
||||
CHECK_EQ(bailout_type_, EAGER);
|
||||
}
|
||||
output_frame->SetContinuation(
|
||||
reinterpret_cast<intptr_t>(continuation->entry()));
|
||||
@ -1123,8 +1123,8 @@ void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
|
||||
output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
|
||||
|
||||
// Arguments adaptor can not be topmost or bottommost.
|
||||
ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
|
||||
ASSERT(output_[frame_index] == NULL);
|
||||
CHECK(frame_index > 0 && frame_index < output_count_ - 1);
|
||||
CHECK(output_[frame_index] == NULL);
|
||||
output_[frame_index] = output_frame;
|
||||
|
||||
// The top address of the frame is computed from the previous
|
||||
@ -1270,7 +1270,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
|
||||
// receiver parameter through the translation. It might be encoding
|
||||
// a captured object, patch the slot address for a captured object.
|
||||
if (i == 0 && deferred_objects_.length() > deferred_object_index) {
|
||||
ASSERT(!deferred_objects_[deferred_object_index].is_arguments());
|
||||
CHECK(!deferred_objects_[deferred_object_index].is_arguments());
|
||||
deferred_objects_[deferred_object_index].patch_slot_address(top_address);
|
||||
}
|
||||
}
|
||||
@ -1381,7 +1381,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
|
||||
top_address + output_offset, output_offset, value);
|
||||
}
|
||||
|
||||
ASSERT(0 == output_offset);
|
||||
CHECK_EQ(0, output_offset);
|
||||
|
||||
intptr_t pc = reinterpret_cast<intptr_t>(
|
||||
construct_stub->instruction_start() +
|
||||
@ -1427,8 +1427,8 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
|
||||
output_frame->SetFrameType(StackFrame::INTERNAL);
|
||||
|
||||
// A frame for an accessor stub can not be the topmost or bottommost one.
|
||||
ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
|
||||
ASSERT(output_[frame_index] == NULL);
|
||||
CHECK(frame_index > 0 && frame_index < output_count_ - 1);
|
||||
CHECK_EQ(output_[frame_index], NULL);
|
||||
output_[frame_index] = output_frame;
|
||||
|
||||
// The top address of the frame is computed from the previous frame's top and
|
||||
@ -1512,9 +1512,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
|
||||
}
|
||||
|
||||
// Skip receiver.
|
||||
Translation::Opcode opcode =
|
||||
static_cast<Translation::Opcode>(iterator->Next());
|
||||
iterator->Skip(Translation::NumberOfOperandsFor(opcode));
|
||||
DoTranslateObjectAndSkip(iterator);
|
||||
|
||||
if (is_setter_stub_frame) {
|
||||
// The implicit return value was part of the artificial setter stub
|
||||
@ -1523,7 +1521,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslationIterator* iterator,
|
||||
DoTranslateCommand(iterator, frame_index, output_offset);
|
||||
}
|
||||
|
||||
ASSERT(0 == output_offset);
|
||||
CHECK_EQ(output_offset, 0);
|
||||
|
||||
Smi* offset = is_setter_stub_frame ?
|
||||
isolate_->heap()->setter_stub_deopt_pc_offset() :
|
||||
@ -1573,8 +1571,8 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
// reg = JSFunction context
|
||||
//
|
||||
|
||||
ASSERT(compiled_code_->is_crankshafted() &&
|
||||
compiled_code_->kind() != Code::OPTIMIZED_FUNCTION);
|
||||
CHECK(compiled_code_->is_crankshafted() &&
|
||||
compiled_code_->kind() != Code::OPTIMIZED_FUNCTION);
|
||||
int major_key = compiled_code_->major_key();
|
||||
CodeStubInterfaceDescriptor* descriptor =
|
||||
isolate_->code_stub_interface_descriptor(major_key);
|
||||
@ -1583,7 +1581,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
// and the standard stack frame slots. Include space for an argument
|
||||
// object to the callee and optionally the space to pass the argument
|
||||
// object to the stub failure handler.
|
||||
ASSERT(descriptor->register_param_count_ >= 0);
|
||||
CHECK_GE(descriptor->register_param_count_, 0);
|
||||
int height_in_bytes = kPointerSize * descriptor->register_param_count_ +
|
||||
sizeof(Arguments) + kPointerSize;
|
||||
int fixed_frame_size = StandardFrameConstants::kFixedFrameSize;
|
||||
@ -1600,7 +1598,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
FrameDescription* output_frame =
|
||||
new(output_frame_size) FrameDescription(output_frame_size, NULL);
|
||||
output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
|
||||
ASSERT(frame_index == 0);
|
||||
CHECK_EQ(frame_index, 0);
|
||||
output_[frame_index] = output_frame;
|
||||
|
||||
// The top address for the output frame can be computed from the input
|
||||
@ -1658,7 +1656,7 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
output_frame->SetRegister(context_reg.code(), value);
|
||||
output_frame_offset -= kPointerSize;
|
||||
output_frame->SetFrameSlot(output_frame_offset, value);
|
||||
ASSERT(reinterpret_cast<Object*>(value)->IsContext());
|
||||
CHECK(reinterpret_cast<Object*>(value)->IsContext());
|
||||
if (trace_scope_ != NULL) {
|
||||
PrintF(trace_scope_->file(),
|
||||
" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
|
||||
@ -1736,10 +1734,10 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
}
|
||||
}
|
||||
|
||||
ASSERT(0 == output_frame_offset);
|
||||
CHECK_EQ(output_frame_offset, 0);
|
||||
|
||||
if (!arg_count_known) {
|
||||
ASSERT(arguments_length_offset >= 0);
|
||||
CHECK_GE(arguments_length_offset, 0);
|
||||
// We know it's a smi because 1) the code stub guarantees the stack
|
||||
// parameter count is in smi range, and 2) the DoTranslateCommand in the
|
||||
// parameter loop above translated that to a tagged value.
|
||||
@ -1823,7 +1821,7 @@ Handle<Object> Deoptimizer::MaterializeNextHeapObject() {
|
||||
Handle<JSObject> arguments =
|
||||
isolate_->factory()->NewArgumentsObject(function, length);
|
||||
Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
|
||||
ASSERT(array->length() == length);
|
||||
ASSERT_EQ(array->length(), length);
|
||||
arguments->set_elements(*array);
|
||||
materialized_objects_->Add(arguments);
|
||||
for (int i = 0; i < length; ++i) {
|
||||
@ -1889,7 +1887,7 @@ Handle<Object> Deoptimizer::MaterializeNextHeapObject() {
|
||||
default:
|
||||
PrintF(stderr,
|
||||
"[couldn't handle instance type %d]\n", map->instance_type());
|
||||
UNREACHABLE();
|
||||
FATAL("Unsupported instance type");
|
||||
}
|
||||
}
|
||||
|
||||
@ -1983,7 +1981,9 @@ void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
|
||||
// materialize a new instance of the object if necessary. Store
|
||||
// the materialized object into the frame slot.
|
||||
Handle<Object> object = MaterializeNextHeapObject();
|
||||
Memory::Object_at(descriptor.slot_address()) = *object;
|
||||
if (descriptor.slot_address() != NULL) {
|
||||
Memory::Object_at(descriptor.slot_address()) = *object;
|
||||
}
|
||||
if (trace_scope_ != NULL) {
|
||||
if (descriptor.is_arguments()) {
|
||||
PrintF(trace_scope_->file(),
|
||||
@ -2002,8 +2002,8 @@ void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
|
||||
}
|
||||
}
|
||||
|
||||
ASSERT(materialization_object_index_ == materialized_objects_->length());
|
||||
ASSERT(materialization_value_index_ == materialized_values_->length());
|
||||
CHECK_EQ(materialization_object_index_, materialized_objects_->length());
|
||||
CHECK_EQ(materialization_value_index_, materialized_values_->length());
|
||||
}
|
||||
|
||||
if (prev_materialized_count_ > 0) {
|
||||
@ -2018,7 +2018,7 @@ void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
|
||||
Address expressions_top,
|
||||
uint32_t expressions_size,
|
||||
DeoptimizedFrameInfo* info) {
|
||||
ASSERT_EQ(DEBUGGER, bailout_type_);
|
||||
CHECK_EQ(DEBUGGER, bailout_type_);
|
||||
Address parameters_bottom = parameters_top + parameters_size;
|
||||
Address expressions_bottom = expressions_top + expressions_size;
|
||||
for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
|
||||
@ -2075,6 +2075,73 @@ static const char* TraceValueType(bool is_smi) {
|
||||
}
|
||||
|
||||
|
||||
void Deoptimizer::DoTranslateObjectAndSkip(TranslationIterator* iterator) {
|
||||
Translation::Opcode opcode =
|
||||
static_cast<Translation::Opcode>(iterator->Next());
|
||||
|
||||
switch (opcode) {
|
||||
case Translation::BEGIN:
|
||||
case Translation::JS_FRAME:
|
||||
case Translation::ARGUMENTS_ADAPTOR_FRAME:
|
||||
case Translation::CONSTRUCT_STUB_FRAME:
|
||||
case Translation::GETTER_STUB_FRAME:
|
||||
case Translation::SETTER_STUB_FRAME:
|
||||
case Translation::COMPILED_STUB_FRAME: {
|
||||
FATAL("Unexpected frame start translation opcode");
|
||||
return;
|
||||
}
|
||||
|
||||
case Translation::REGISTER:
|
||||
case Translation::INT32_REGISTER:
|
||||
case Translation::UINT32_REGISTER:
|
||||
case Translation::DOUBLE_REGISTER:
|
||||
case Translation::STACK_SLOT:
|
||||
case Translation::INT32_STACK_SLOT:
|
||||
case Translation::UINT32_STACK_SLOT:
|
||||
case Translation::DOUBLE_STACK_SLOT:
|
||||
case Translation::LITERAL: {
|
||||
// The value is not part of any materialized object, so we can ignore it.
|
||||
iterator->Skip(Translation::NumberOfOperandsFor(opcode));
|
||||
return;
|
||||
}
|
||||
|
||||
case Translation::DUPLICATED_OBJECT: {
|
||||
int object_index = iterator->Next();
|
||||
if (trace_scope_ != NULL) {
|
||||
PrintF(trace_scope_->file(), " skipping object ");
|
||||
PrintF(trace_scope_->file(),
|
||||
" ; duplicate of object #%d\n", object_index);
|
||||
}
|
||||
AddObjectDuplication(0, object_index);
|
||||
return;
|
||||
}
|
||||
|
||||
case Translation::ARGUMENTS_OBJECT:
|
||||
case Translation::CAPTURED_OBJECT: {
|
||||
int length = iterator->Next();
|
||||
bool is_args = opcode == Translation::ARGUMENTS_OBJECT;
|
||||
if (trace_scope_ != NULL) {
|
||||
PrintF(trace_scope_->file(), " skipping object ");
|
||||
PrintF(trace_scope_->file(),
|
||||
" ; object (length = %d, is_args = %d)\n", length, is_args);
|
||||
}
|
||||
|
||||
AddObjectStart(0, length, is_args);
|
||||
|
||||
// We save the object values on the side and materialize the actual
|
||||
// object after the deoptimized frame is built.
|
||||
int object_index = deferred_objects_.length() - 1;
|
||||
for (int i = 0; i < length; i++) {
|
||||
DoTranslateObject(iterator, object_index, i);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
FATAL("Unexpected translation opcode");
|
||||
}
|
||||
|
||||
|
||||
void Deoptimizer::DoTranslateObject(TranslationIterator* iterator,
|
||||
int object_index,
|
||||
int field_index) {
|
||||
@ -2092,7 +2159,7 @@ void Deoptimizer::DoTranslateObject(TranslationIterator* iterator,
|
||||
case Translation::GETTER_STUB_FRAME:
|
||||
case Translation::SETTER_STUB_FRAME:
|
||||
case Translation::COMPILED_STUB_FRAME:
|
||||
UNREACHABLE();
|
||||
FATAL("Unexpected frame start translation opcode");
|
||||
return;
|
||||
|
||||
case Translation::REGISTER: {
|
||||
@ -2332,6 +2399,8 @@ void Deoptimizer::DoTranslateObject(TranslationIterator* iterator,
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
FATAL("Unexpected translation opcode");
|
||||
}
|
||||
|
||||
|
||||
@ -2353,7 +2422,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
|
||||
case Translation::GETTER_STUB_FRAME:
|
||||
case Translation::SETTER_STUB_FRAME:
|
||||
case Translation::COMPILED_STUB_FRAME:
|
||||
UNREACHABLE();
|
||||
FATAL("Unexpected translation opcode");
|
||||
return;
|
||||
|
||||
case Translation::REGISTER: {
|
||||
@ -2646,7 +2715,7 @@ unsigned Deoptimizer::ComputeIncomingArgumentSize(JSFunction* function) const {
|
||||
// The incoming arguments is the values for formal parameters and
|
||||
// the receiver. Every slot contains a pointer.
|
||||
if (function->IsSmi()) {
|
||||
ASSERT(Smi::cast(function) == Smi::FromInt(StackFrame::STUB));
|
||||
CHECK_EQ(Smi::cast(function), Smi::FromInt(StackFrame::STUB));
|
||||
return 0;
|
||||
}
|
||||
unsigned arguments = function->shared()->formal_parameter_count() + 1;
|
||||
@ -2711,13 +2780,13 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
||||
// cause us to emit relocation information for the external
|
||||
// references. This is fine because the deoptimizer's code section
|
||||
// isn't meant to be serialized at all.
|
||||
ASSERT(type == EAGER || type == SOFT || type == LAZY);
|
||||
CHECK(type == EAGER || type == SOFT || type == LAZY);
|
||||
DeoptimizerData* data = isolate->deoptimizer_data();
|
||||
int entry_count = data->deopt_entry_code_entries_[type];
|
||||
if (max_entry_id < entry_count) return;
|
||||
entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
|
||||
while (max_entry_id >= entry_count) entry_count *= 2;
|
||||
ASSERT(entry_count <= Deoptimizer::kMaxNumberOfEntries);
|
||||
CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries);
|
||||
|
||||
MacroAssembler masm(isolate, NULL, 16 * KB);
|
||||
masm.set_emit_debug_code(false);
|
||||
@ -2727,8 +2796,8 @@ void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
|
||||
ASSERT(!RelocInfo::RequiresRelocation(desc));
|
||||
|
||||
MemoryChunk* chunk = data->deopt_entry_code_[type];
|
||||
ASSERT(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
|
||||
desc.instr_size);
|
||||
CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
|
||||
desc.instr_size);
|
||||
chunk->CommitArea(desc.instr_size);
|
||||
CopyBytes(chunk->area_start(), desc.buffer,
|
||||
static_cast<size_t>(desc.instr_size));
|
||||
@ -2795,15 +2864,15 @@ int FrameDescription::ComputeParametersCount() {
|
||||
case StackFrame::STUB:
|
||||
return -1; // Minus receiver.
|
||||
default:
|
||||
UNREACHABLE();
|
||||
FATAL("Unexpected stack frame type");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Object* FrameDescription::GetParameter(int index) {
|
||||
ASSERT(index >= 0);
|
||||
ASSERT(index < ComputeParametersCount());
|
||||
CHECK_GE(index, 0);
|
||||
CHECK_LT(index, ComputeParametersCount());
|
||||
// The slot indexes for incoming arguments are negative.
|
||||
unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
|
||||
return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
|
||||
@ -2811,7 +2880,7 @@ Object* FrameDescription::GetParameter(int index) {
|
||||
|
||||
|
||||
unsigned FrameDescription::GetExpressionCount() {
|
||||
ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
|
||||
CHECK_EQ(StackFrame::JAVA_SCRIPT, type_);
|
||||
unsigned size = GetFrameSize() - ComputeFixedSize();
|
||||
return size / kPointerSize;
|
||||
}
|
||||
@ -3013,7 +3082,7 @@ int Translation::NumberOfOperandsFor(Opcode opcode) {
|
||||
case JS_FRAME:
|
||||
return 3;
|
||||
}
|
||||
UNREACHABLE();
|
||||
FATAL("Unexpected translation type");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -3123,14 +3192,13 @@ SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame,
|
||||
TranslationIterator it(data->TranslationByteArray(),
|
||||
data->TranslationIndex(deopt_index)->value());
|
||||
Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
|
||||
ASSERT(opcode == Translation::BEGIN);
|
||||
CHECK_EQ(opcode, Translation::BEGIN);
|
||||
it.Next(); // Drop frame count.
|
||||
|
||||
stack_frame_id_ = frame->fp();
|
||||
|
||||
int jsframe_count = it.Next();
|
||||
USE(jsframe_count);
|
||||
ASSERT(jsframe_count > inlined_jsframe_index);
|
||||
CHECK_GT(jsframe_count, inlined_jsframe_index);
|
||||
int jsframes_to_skip = inlined_jsframe_index;
|
||||
int number_of_slots = -1; // Number of slots inside our frame (yet unknown)
|
||||
bool should_deopt = false;
|
||||
@ -3139,7 +3207,7 @@ SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame,
|
||||
bool processed = false;
|
||||
if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) {
|
||||
if (jsframes_to_skip == 0) {
|
||||
ASSERT(Translation::NumberOfOperandsFor(opcode) == 2);
|
||||
CHECK_EQ(Translation::NumberOfOperandsFor(opcode), 2);
|
||||
|
||||
it.Skip(1); // literal id
|
||||
int height = it.Next();
|
||||
@ -3186,7 +3254,7 @@ SlotRefValueBuilder::SlotRefValueBuilder(JavaScriptFrame* frame,
|
||||
// the nested slots of captured objects
|
||||
number_of_slots--;
|
||||
SlotRef& slot = slot_refs_.last();
|
||||
ASSERT(slot.Representation() != SlotRef::ARGUMENTS_OBJECT);
|
||||
CHECK_NE(slot.Representation(), SlotRef::ARGUMENTS_OBJECT);
|
||||
number_of_slots += slot.GetChildrenCount();
|
||||
if (slot.Representation() == SlotRef::DEFERRED_OBJECT ||
|
||||
slot.Representation() == SlotRef::DUPLICATE_OBJECT) {
|
||||
@ -3260,7 +3328,7 @@ void SlotRefValueBuilder::Prepare(Isolate* isolate) {
|
||||
while (current_slot_ < first_slot_index_) {
|
||||
GetNext(isolate, 0);
|
||||
}
|
||||
ASSERT(current_slot_ == first_slot_index_);
|
||||
CHECK_EQ(current_slot_, first_slot_index_);
|
||||
}
|
||||
|
||||
|
||||
@ -3322,8 +3390,8 @@ Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) {
|
||||
}
|
||||
case SlotRef::DEFERRED_OBJECT: {
|
||||
int length = slot.GetChildrenCount();
|
||||
ASSERT(slot_refs_[current_slot_].Representation() == SlotRef::LITERAL ||
|
||||
slot_refs_[current_slot_].Representation() == SlotRef::TAGGED);
|
||||
CHECK(slot_refs_[current_slot_].Representation() == SlotRef::LITERAL ||
|
||||
slot_refs_[current_slot_].Representation() == SlotRef::TAGGED);
|
||||
|
||||
int object_index = materialized_objects_.length();
|
||||
if (object_index < prev_materialized_count_) {
|
||||
@ -3384,6 +3452,7 @@ Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) {
|
||||
break;
|
||||
}
|
||||
UNREACHABLE();
|
||||
break;
|
||||
}
|
||||
|
||||
case SlotRef::DUPLICATE_OBJECT: {
|
||||
@ -3404,7 +3473,7 @@ Handle<Object> SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) {
|
||||
|
||||
void SlotRefValueBuilder::Finish(Isolate* isolate) {
|
||||
// We should have processed all the slots
|
||||
ASSERT(slot_refs_.length() == current_slot_);
|
||||
CHECK_EQ(slot_refs_.length(), current_slot_);
|
||||
|
||||
if (materialized_objects_.length() > prev_materialized_count_) {
|
||||
// We have materialized some new objects, so we have to store them
|
||||
@ -3425,7 +3494,7 @@ Handle<FixedArray> MaterializedObjectStore::Get(Address fp) {
|
||||
return Handle<FixedArray>::null();
|
||||
}
|
||||
Handle<FixedArray> array = GetStackEntries();
|
||||
ASSERT(array->length() > index);
|
||||
CHECK_GT(array->length(), index);
|
||||
return Handle<FixedArray>::cast(Handle<Object>(array->get(index),
|
||||
isolate()));
|
||||
}
|
||||
@ -3446,11 +3515,11 @@ void MaterializedObjectStore::Set(Address fp,
|
||||
|
||||
void MaterializedObjectStore::Remove(Address fp) {
|
||||
int index = StackIdToIndex(fp);
|
||||
ASSERT(index >= 0);
|
||||
CHECK_GE(index, 0);
|
||||
|
||||
frame_fps_.Remove(index);
|
||||
Handle<FixedArray> array = GetStackEntries();
|
||||
ASSERT(array->length() > index);
|
||||
CHECK_LT(index, array->length());
|
||||
for (int i = index; i < frame_fps_.length(); i++) {
|
||||
array->set(i, array->get(i + 1));
|
||||
}
|
||||
@ -3517,7 +3586,7 @@ DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
|
||||
|
||||
if (has_arguments_adaptor) {
|
||||
output_frame = deoptimizer->output_[frame_index - 1];
|
||||
ASSERT(output_frame->GetFrameType() == StackFrame::ARGUMENTS_ADAPTOR);
|
||||
CHECK_EQ(output_frame->GetFrameType(), StackFrame::ARGUMENTS_ADAPTOR);
|
||||
}
|
||||
|
||||
parameters_count_ = output_frame->ComputeParametersCount();
|
||||
|
@ -326,14 +326,21 @@ class Deoptimizer : public Malloced {
|
||||
void DoComputeCompiledStubFrame(TranslationIterator* iterator,
|
||||
int frame_index);
|
||||
|
||||
// Translate object, store the result into an auxiliary array
|
||||
// (deferred_objects_tagged_values_).
|
||||
void DoTranslateObject(TranslationIterator* iterator,
|
||||
int object_index,
|
||||
int field_index);
|
||||
|
||||
// Translate value, store the result into the given frame slot.
|
||||
void DoTranslateCommand(TranslationIterator* iterator,
|
||||
int frame_index,
|
||||
unsigned output_offset);
|
||||
|
||||
// Translate object, do not store the result anywhere (but do update
|
||||
// the deferred materialization array).
|
||||
void DoTranslateObjectAndSkip(TranslationIterator* iterator);
|
||||
|
||||
unsigned ComputeInputFrameSize() const;
|
||||
unsigned ComputeFixedSize(JSFunction* function) const;
|
||||
|
||||
|
23
test/mjsunit/regress/regress-359441.js
Normal file
23
test/mjsunit/regress/regress-359441.js
Normal file
@ -0,0 +1,23 @@
|
||||
// Copyright 2014 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Flags: --allow-natives-syntax
|
||||
|
||||
function g() {
|
||||
this.x = {};
|
||||
}
|
||||
|
||||
function f() {
|
||||
new g();
|
||||
}
|
||||
|
||||
function deopt(x) {
|
||||
%DeoptimizeFunction(f);
|
||||
}
|
||||
|
||||
f();
|
||||
f();
|
||||
%OptimizeFunctionOnNextCall(f);
|
||||
Object.prototype.__defineSetter__('x', deopt);
|
||||
f();
|
Loading…
Reference in New Issue
Block a user