Store OSR'd optimized code on the native context.

Since we OSR code rarely, it makes sense to store it and look for it on the native context rather than the SharedFunctionInfo. This makes the OptimizedCodeMap data structure more space efficient, as it doesn't have to store an ast ID for the OSR entry point.

BUG=

Review-Url: https://codereview.chromium.org/2549753002
Cr-Commit-Position: refs/heads/master@{#41554}
This commit is contained in:
mvstanton 2016-12-07 07:10:45 -08:00 committed by Commit bot
parent fd12750209
commit 378b6b22fb
17 changed files with 272 additions and 91 deletions

View File

@ -1128,6 +1128,8 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
Isolate* isolate = global_object->GetIsolate();
Factory* factory = isolate->factory();
native_context()->set_osr_code_table(*factory->empty_fixed_array());
Handle<ScriptContextTable> script_context_table =
factory->NewScriptContextTable();
native_context()->set_script_context_table(*script_context_table);

View File

@ -1370,12 +1370,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ cmp(temp, native_context);
__ b(ne, &loop_bottom);
// OSR id set to none?
__ ldr(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousOsrAstId));
const int bailout_id = BailoutId::None().ToInt();
__ cmp(temp, Operand(Smi::FromInt(bailout_id)));
__ b(ne, &loop_bottom);
// Literals available?
__ ldr(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));

View File

@ -1374,12 +1374,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ Cmp(temp, native_context);
__ B(ne, &loop_bottom);
// OSR id set to none?
__ Ldr(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousOsrAstId));
const int bailout_id = BailoutId::None().ToInt();
__ Cmp(temp, Operand(Smi::FromInt(bailout_id)));
__ B(ne, &loop_bottom);
// Literals available?
__ Ldr(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));

View File

@ -1055,12 +1055,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
__ cmp(temp, native_context);
__ j(not_equal, &loop_bottom);
// OSR id set to none?
__ mov(temp, FieldOperand(map, index, times_half_pointer_size,
SharedFunctionInfo::kOffsetToPreviousOsrAstId));
const int bailout_id = BailoutId::None().ToInt();
__ cmp(temp, Immediate(Smi::FromInt(bailout_id)));
__ j(not_equal, &loop_bottom);
// Literals available?
__ mov(temp, FieldOperand(map, index, times_half_pointer_size,
SharedFunctionInfo::kOffsetToPreviousLiterals));

View File

@ -1380,11 +1380,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
SharedFunctionInfo::kOffsetToPreviousContext));
__ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ Branch(&loop_bottom, ne, temp, Operand(native_context));
// OSR id set to none?
__ lw(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousOsrAstId));
const int bailout_id = BailoutId::None().ToInt();
__ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
// Literals available?
__ lw(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));

View File

@ -1371,11 +1371,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
SharedFunctionInfo::kOffsetToPreviousContext));
__ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
__ Branch(&loop_bottom, ne, temp, Operand(native_context));
// OSR id set to none?
__ ld(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousOsrAstId));
const int bailout_id = BailoutId::None().ToInt();
__ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
// Literals available?
__ ld(temp, FieldMemOperand(array_pointer,
SharedFunctionInfo::kOffsetToPreviousLiterals));

View File

@ -1019,13 +1019,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
__ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
__ cmpp(temp, native_context);
__ j(not_equal, &loop_bottom);
// OSR id set to none?
__ movp(temp, FieldOperand(map, index, times_pointer_size,
SharedFunctionInfo::kOffsetToPreviousOsrAstId));
__ SmiToInteger32(temp, temp);
const int bailout_id = BailoutId::None().ToInt();
__ cmpl(temp, Immediate(bailout_id));
__ j(not_equal, &loop_bottom);
// Literals available?
__ movp(temp, FieldOperand(map, index, times_pointer_size,
SharedFunctionInfo::kOffsetToPreviousLiterals));

View File

@ -124,6 +124,9 @@ bool Context::IsScriptContext() {
return map == map->GetHeap()->script_context_map();
}
bool Context::OptimizedCodeMapIsCleared() {
return osr_code_table() == GetHeap()->empty_fixed_array();
}
bool Context::HasSameSecurityTokenAs(Context* that) {
return this->native_context()->security_token() ==

View File

@ -408,6 +408,182 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
return Handle<Object>::null();
}
static const int kSharedOffset = 0;
static const int kCachedCodeOffset = 1;
static const int kLiteralsOffset = 2;
static const int kOsrAstIdOffset = 3;
static const int kEntryLength = 4;
static const int kInitialLength = kEntryLength;
int Context::SearchOptimizedCodeMapEntry(SharedFunctionInfo* shared,
BailoutId osr_ast_id) {
DisallowHeapAllocation no_gc;
DCHECK(this->IsNativeContext());
if (!OptimizedCodeMapIsCleared()) {
FixedArray* optimized_code_map = this->osr_code_table();
int length = optimized_code_map->length();
Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
for (int i = 0; i < length; i += kEntryLength) {
if (WeakCell::cast(optimized_code_map->get(i + kSharedOffset))->value() ==
shared &&
optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
return i;
}
}
}
return -1;
}
void Context::SearchOptimizedCodeMap(SharedFunctionInfo* shared,
BailoutId osr_ast_id, Code** pcode,
LiteralsArray** pliterals) {
DCHECK(this->IsNativeContext());
int entry = SearchOptimizedCodeMapEntry(shared, osr_ast_id);
if (entry != -1) {
FixedArray* code_map = osr_code_table();
DCHECK_LE(entry + kEntryLength, code_map->length());
WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset));
WeakCell* literals_cell =
WeakCell::cast(code_map->get(entry + kLiteralsOffset));
*pcode = cell->cleared() ? nullptr : Code::cast(cell->value());
*pliterals = literals_cell->cleared()
? nullptr
: LiteralsArray::cast(literals_cell->value());
} else {
*pcode = nullptr;
*pliterals = nullptr;
}
}
void Context::AddToOptimizedCodeMap(Handle<Context> native_context,
Handle<SharedFunctionInfo> shared,
Handle<Code> code,
Handle<LiteralsArray> literals,
BailoutId osr_ast_id) {
DCHECK(native_context->IsNativeContext());
Isolate* isolate = native_context->GetIsolate();
if (isolate->serializer_enabled()) return;
STATIC_ASSERT(kEntryLength == 4);
Handle<FixedArray> new_code_map;
int entry;
if (native_context->OptimizedCodeMapIsCleared()) {
new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED);
entry = 0;
} else {
Handle<FixedArray> old_code_map(native_context->osr_code_table(), isolate);
entry = native_context->SearchOptimizedCodeMapEntry(*shared, osr_ast_id);
if (entry >= 0) {
// Just set the code and literals of the entry.
Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
old_code_map->set(entry + kCachedCodeOffset, *code_cell);
Handle<WeakCell> literals_cell =
isolate->factory()->NewWeakCell(literals);
old_code_map->set(entry + kLiteralsOffset, *literals_cell);
return;
}
// Can we reuse an entry?
DCHECK(entry < 0);
int length = old_code_map->length();
for (int i = 0; i < length; i += kEntryLength) {
if (WeakCell::cast(old_code_map->get(i + kSharedOffset))->cleared()) {
new_code_map = old_code_map;
entry = i;
break;
}
}
if (entry < 0) {
// Copy old optimized code map and append one new entry.
new_code_map = isolate->factory()->CopyFixedArrayAndGrow(
old_code_map, kEntryLength, TENURED);
entry = old_code_map->length();
}
}
Handle<WeakCell> code_cell = isolate->factory()->NewWeakCell(code);
Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals);
Handle<WeakCell> shared_cell = isolate->factory()->NewWeakCell(shared);
new_code_map->set(entry + kSharedOffset, *shared_cell);
new_code_map->set(entry + kCachedCodeOffset, *code_cell);
new_code_map->set(entry + kLiteralsOffset, *literals_cell);
new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt()));
#ifdef DEBUG
for (int i = 0; i < new_code_map->length(); i += kEntryLength) {
WeakCell* cell = WeakCell::cast(new_code_map->get(i + kSharedOffset));
DCHECK(cell->cleared() || cell->value()->IsSharedFunctionInfo());
cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset));
DCHECK(cell->cleared() ||
(cell->value()->IsCode() &&
Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION));
cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset));
DCHECK(cell->cleared() || cell->value()->IsFixedArray());
DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
}
#endif
FixedArray* old_code_map = native_context->osr_code_table();
if (old_code_map != *new_code_map) {
native_context->set_osr_code_table(*new_code_map);
}
}
void Context::EvictFromOptimizedCodeMap(Code* optimized_code,
const char* reason) {
DCHECK(IsNativeContext());
DisallowHeapAllocation no_gc;
if (OptimizedCodeMapIsCleared()) return;
Heap* heap = GetHeap();
FixedArray* code_map = osr_code_table();
int dst = 0;
int length = code_map->length();
for (int src = 0; src < length; src += kEntryLength) {
if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() ==
optimized_code) {
BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
if (FLAG_trace_opt) {
PrintF(
"[evicting entry from native context optimizing code map (%s) for ",
reason);
ShortPrint();
DCHECK(!osr.IsNone());
PrintF(" (osr ast id %d)]\n", osr.ToInt());
}
// Evict the src entry by not copying it to the dst entry.
continue;
}
// Keep the src entry by copying it to the dst entry.
if (dst != src) {
code_map->set(dst + kSharedOffset, code_map->get(src + kSharedOffset));
code_map->set(dst + kCachedCodeOffset,
code_map->get(src + kCachedCodeOffset));
code_map->set(dst + kLiteralsOffset,
code_map->get(src + kLiteralsOffset));
code_map->set(dst + kOsrAstIdOffset,
code_map->get(src + kOsrAstIdOffset));
}
dst += kEntryLength;
}
if (dst != length) {
// Always trim even when array is cleared because of heap verifier.
heap->RightTrimFixedArray(code_map, length - dst);
if (code_map->length() == 0) {
ClearOptimizedCodeMap();
}
}
}
void Context::ClearOptimizedCodeMap() {
DCHECK(IsNativeContext());
FixedArray* empty_fixed_array = GetHeap()->empty_fixed_array();
set_osr_code_table(empty_fixed_array);
}
void Context::AddOptimizedFunction(JSFunction* function) {
DCHECK(IsNativeContext());

View File

@ -282,6 +282,7 @@ enum ContextLookupFlags {
V(OBJECT_FUNCTION_INDEX, JSFunction, object_function) \
V(OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX, Map, object_function_prototype_map) \
V(OPAQUE_REFERENCE_FUNCTION_INDEX, JSFunction, opaque_reference_function) \
V(OSR_CODE_TABLE_INDEX, FixedArray, osr_code_table) \
V(PROXY_CALLABLE_MAP_INDEX, Map, proxy_callable_map) \
V(PROXY_CONSTRUCTOR_MAP_INDEX, Map, proxy_constructor_map) \
V(PROXY_FUNCTION_INDEX, JSFunction, proxy_function) \
@ -557,6 +558,27 @@ class Context: public FixedArray {
inline bool HasSameSecurityTokenAs(Context* that);
// Removes a specific optimized code object from the optimized code map.
// In case of non-OSR the code reference is cleared from the cache entry but
// the entry itself is left in the map in order to proceed sharing literals.
void EvictFromOptimizedCodeMap(Code* optimized_code, const char* reason);
// Clear optimized code map.
void ClearOptimizedCodeMap();
// A native context keeps track of all osrd optimized functions.
inline bool OptimizedCodeMapIsCleared();
void SearchOptimizedCodeMap(SharedFunctionInfo* shared, BailoutId osr_ast_id,
Code** pcode, LiteralsArray** pliterals);
int SearchOptimizedCodeMapEntry(SharedFunctionInfo* shared,
BailoutId osr_ast_id);
static void AddToOptimizedCodeMap(Handle<Context> native_context,
Handle<SharedFunctionInfo> shared,
Handle<Code> code,
Handle<LiteralsArray> literals,
BailoutId osr_ast_id);
// A native context holds a list of all functions with optimized code.
void AddOptimizedFunction(JSFunction* function);
void RemoveOptimizedFunction(JSFunction* function);

View File

@ -1250,6 +1250,9 @@ bool Debug::PrepareFunctionForBreakPoints(Handle<SharedFunctionInfo> shared) {
}
}
// The native context also has a list of OSR'd optimized code. Clear it.
isolate_->ClearOSROptimizedCode();
// Make sure we abort incremental marking.
isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
GarbageCollectionReason::kDebugger);

View File

@ -2822,6 +2822,26 @@ bool Isolate::IsArrayOrObjectPrototype(Object* object) {
return false;
}
void Isolate::ClearOSROptimizedCode() {
DisallowHeapAllocation no_gc;
Object* context = heap()->native_contexts_list();
while (!context->IsUndefined(this)) {
Context* current_context = Context::cast(context);
current_context->ClearOptimizedCodeMap();
context = current_context->next_context_link();
}
}
void Isolate::EvictOSROptimizedCode(Code* code, const char* reason) {
DisallowHeapAllocation no_gc;
Object* context = heap()->native_contexts_list();
while (!context->IsUndefined(this)) {
Context* current_context = Context::cast(context);
current_context->EvictFromOptimizedCodeMap(code, reason);
context = current_context->next_context_link();
}
}
bool Isolate::IsInAnyContext(Object* object, uint32_t index) {
DisallowHeapAllocation no_gc;
Object* context = heap()->native_contexts_list();

View File

@ -1164,6 +1164,12 @@ class Isolate {
return compiler_dispatcher_tracer_;
}
// Clear all optimized code stored in native contexts.
void ClearOSROptimizedCode();
// Ensure that a particular optimized code is evicted.
void EvictOSROptimizedCode(Code* code, const char* reason);
bool IsInAnyContext(Object* object, uint32_t index);
void SetRAILMode(RAILMode rail_mode);

View File

@ -12403,16 +12403,23 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
DCHECK(code.is_null() ||
code.ToHandleChecked()->kind() == Code::OPTIMIZED_FUNCTION);
DCHECK(native_context->IsNativeContext());
STATIC_ASSERT(kEntryLength == 4);
STATIC_ASSERT(kEntryLength == 3);
Handle<FixedArray> new_code_map;
int entry;
if (!osr_ast_id.IsNone()) {
Context::AddToOptimizedCodeMap(
native_context, shared, code.ToHandleChecked(), literals, osr_ast_id);
return;
}
DCHECK(osr_ast_id.IsNone());
if (shared->OptimizedCodeMapIsCleared()) {
new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED);
entry = kEntriesStart;
} else {
Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate);
entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id);
entry = shared->SearchOptimizedCodeMapEntry(*native_context);
if (entry >= kEntriesStart) {
// Just set the code and literals of the entry.
if (!code.is_null()) {
@ -12459,7 +12466,6 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
new_code_map->set(entry + kContextOffset, context_cell);
new_code_map->set(entry + kCachedCodeOffset, *code_cell);
new_code_map->set(entry + kLiteralsOffset, *literals_cell);
new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt()));
#ifdef DEBUG
for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
@ -12471,7 +12477,6 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION));
cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset));
DCHECK(cell->cleared() || cell->value()->IsFixedArray());
DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
}
#endif
@ -12493,53 +12498,31 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
DisallowHeapAllocation no_gc;
if (OptimizedCodeMapIsCleared()) return;
Heap* heap = GetHeap();
Isolate* isolate = GetIsolate();
Heap* heap = isolate->heap();
FixedArray* code_map = optimized_code_map();
int dst = kEntriesStart;
int length = code_map->length();
bool found = false;
for (int src = kEntriesStart; src < length; src += kEntryLength) {
DCHECK(WeakCell::cast(code_map->get(src))->cleared() ||
WeakCell::cast(code_map->get(src))->value()->IsNativeContext());
if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() ==
optimized_code) {
BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
found = WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() ==
optimized_code;
if (found) {
if (FLAG_trace_opt) {
PrintF("[evicting entry from optimizing code map (%s) for ", reason);
ShortPrint();
if (osr.IsNone()) {
PrintF("]\n");
} else {
PrintF(" (osr ast id %d)]\n", osr.ToInt());
}
}
if (!osr.IsNone()) {
// Evict the src entry by not copying it to the dst entry.
continue;
}
// In case of non-OSR entry just clear the code in order to proceed
// sharing literals.
// Just clear the code in order to continue sharing literals.
code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(),
SKIP_WRITE_BARRIER);
}
}
// Keep the src entry by copying it to the dst entry.
if (dst != src) {
code_map->set(dst + kContextOffset, code_map->get(src + kContextOffset));
code_map->set(dst + kCachedCodeOffset,
code_map->get(src + kCachedCodeOffset));
code_map->set(dst + kLiteralsOffset,
code_map->get(src + kLiteralsOffset));
code_map->set(dst + kOsrAstIdOffset,
code_map->get(src + kOsrAstIdOffset));
}
dst += kEntryLength;
}
if (dst != length) {
// Always trim even when array is cleared because of heap verifier.
heap->RightTrimFixedArray(code_map, length - dst);
if (code_map->length() == kEntriesStart) {
ClearOptimizedCodeMap();
}
if (!found) {
// We didn't find the code in here. It must be osr'd code.
isolate->EvictOSROptimizedCode(optimized_code, reason);
}
}
@ -14076,19 +14059,15 @@ void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
}
}
int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context,
BailoutId osr_ast_id) {
int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context) {
DisallowHeapAllocation no_gc;
DCHECK(native_context->IsNativeContext());
if (!OptimizedCodeMapIsCleared()) {
FixedArray* optimized_code_map = this->optimized_code_map();
int length = optimized_code_map->length();
Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
for (int i = kEntriesStart; i < length; i += kEntryLength) {
if (WeakCell::cast(optimized_code_map->get(i + kContextOffset))
->value() == native_context &&
optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
->value() == native_context) {
return i;
}
}
@ -14111,7 +14090,16 @@ void SharedFunctionInfo::ClearCodeFromOptimizedCodeMap() {
CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
Context* native_context, BailoutId osr_ast_id) {
CodeAndLiterals result = {nullptr, nullptr};
int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id);
if (!osr_ast_id.IsNone()) {
Code* code;
LiteralsArray* literals;
native_context->SearchOptimizedCodeMap(this, osr_ast_id, &code, &literals);
result = {code, literals};
return result;
}
DCHECK(osr_ast_id.IsNone());
int entry = SearchOptimizedCodeMapEntry(native_context);
if (entry != kNotFound) {
FixedArray* code_map = optimized_code_map();
DCHECK_LE(entry + kEntryLength, code_map->length());

View File

@ -7484,8 +7484,7 @@ class SharedFunctionInfo: public HeapObject {
static const int kContextOffset = 0;
static const int kCachedCodeOffset = 1;
static const int kLiteralsOffset = 2;
static const int kOsrAstIdOffset = 3;
static const int kEntryLength = 4;
static const int kEntryLength = 3;
static const int kInitialLength = kEntriesStart + kEntryLength;
static const int kNotFound = -1;
@ -7499,8 +7498,6 @@ class SharedFunctionInfo: public HeapObject {
kPointerSize * (kCachedCodeOffset - kEntryLength);
static const int kOffsetToPreviousLiterals =
FixedArray::kHeaderSize + kPointerSize * (kLiteralsOffset - kEntryLength);
static const int kOffsetToPreviousOsrAstId =
FixedArray::kHeaderSize + kPointerSize * (kOsrAstIdOffset - kEntryLength);
// [scope_info]: Scope info.
DECL_ACCESSORS(scope_info, ScopeInfo)
@ -8096,11 +8093,10 @@ class SharedFunctionInfo: public HeapObject {
#undef BYTE_OFFSET
private:
// Returns entry from optimized code map for specified context and OSR entry.
// Returns entry from optimized code map for specified context.
// The result is either kNotFound, or a start index of the context-dependent
// entry.
int SearchOptimizedCodeMapEntry(Context* native_context,
BailoutId osr_ast_id);
int SearchOptimizedCodeMapEntry(Context* native_context);
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedFunctionInfo);
};

View File

@ -79,7 +79,7 @@ bytecodes: [
B(Star), R(0),
B(CreateArrayLiteral), U8(0), U8(0), U8(9),
B(Star), R(1),
B(CallJSRuntime), U8(155), R(0), U8(2),
B(CallJSRuntime), U8(156), R(0), U8(2),
/* 44 S> */ B(Return),
]
constant pool: [

View File

@ -126,14 +126,14 @@ bytecodes: [
B(LdaUndefined),
B(Star), R(11),
B(Mov), R(2), R(12),
/* 152 E> */ B(CallJSRuntime), U8(155), R(11), U8(2),
/* 152 E> */ B(CallJSRuntime), U8(156), R(11), U8(2),
B(Star), R(9),
B(CreateArrayLiteral), U8(1), U8(1), U8(9),
B(Star), R(10),
B(CallJSRuntime), U8(154), R(7), U8(4),
B(CallJSRuntime), U8(155), R(7), U8(4),
B(Star), R(5),
B(Mov), R(0), R(6),
/* 140 E> */ B(CallJSRuntime), U8(151), R(3), U8(4),
/* 140 E> */ B(CallJSRuntime), U8(152), R(3), U8(4),
B(Star), R(3),
B(Ldar), R(this),
B(JumpIfNotHole), U8(4),