Reland^2: Remove SFI code field

This is a reland of d8f564eac6

TBR=mstarzinger@chromium.org,yangguo@chromium.org,jgruber@chromium.org

Original change's description:
> Reland: Remove SFI code field
>
> Remove the SharedFunctionInfo code field, inferring the code object
> from the function_data field instead. In some cases, the function_data
> field can now hold a Code object (e.g. some WASM cases).
>
> (Reland of https://chromium-review.googlesource.com/952452)
>
> TBR=mstarzinger@chromium.org
>
> Bug: chromium:783853
> Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng
> Change-Id: I10ea5be7ceed1b51362a2fad9be7397624d69343
> Reviewed-on: https://chromium-review.googlesource.com/970649
> Commit-Queue: Leszek Swirski <leszeks@chromium.org>
> Reviewed-by: Yang Guo <yangguo@chromium.org>
> Reviewed-by: Jakob Gruber <jgruber@chromium.org>
> Reviewed-by: Leszek Swirski <leszeks@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#52136}

Bug: chromium:783853
Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng;master.tryserver.blink:linux_trusty_blink_rel
Change-Id: I5187851b923e9a92f43daf8cb99e662786cbb839
Reviewed-on: https://chromium-review.googlesource.com/975942
Commit-Queue: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52159}
This commit is contained in:
Leszek Swirski 2018-03-22 16:09:55 +00:00 committed by Commit Bot
parent 111048619d
commit 51ded9d3c0
59 changed files with 908 additions and 526 deletions

View File

@ -732,15 +732,33 @@ StartupData SnapshotCreator::CreateBlob(
i::SerializedHandleChecker handle_checker(isolate, &contexts);
CHECK(handle_checker.CheckGlobalAndEternalHandles());
// Complete in-object slack tracking for all functions.
i::HeapIterator heap_iterator(isolate->heap());
while (i::HeapObject* current_obj = heap_iterator.next()) {
if (!current_obj->IsJSFunction()) continue;
i::JSFunction* fun = i::JSFunction::cast(current_obj);
fun->CompleteInobjectSlackTrackingIfActive();
// Complete in-object slack tracking for all functions.
if (current_obj->IsJSFunction()) {
i::JSFunction* fun = i::JSFunction::cast(current_obj);
fun->CompleteInobjectSlackTrackingIfActive();
}
// Clear out re-compilable data from all shared function infos. Any
// JSFunctions using these SFIs will have their code pointers reset by the
// partial serializer.
if (current_obj->IsSharedFunctionInfo() &&
function_code_handling == FunctionCodeHandling::kClear) {
i::SharedFunctionInfo* shared = i::SharedFunctionInfo::cast(current_obj);
if (shared->HasBytecodeArray()) {
shared->ClearBytecodeArray();
} else if (shared->HasAsmWasmData()) {
shared->ClearAsmWasmData();
} else if (shared->HasPreParsedScopeData()) {
shared->ClearPreParsedScopeData();
}
DCHECK(shared->HasCodeObject() || shared->HasBuiltinId() ||
shared->IsApiFunction());
}
}
i::StartupSerializer startup_serializer(isolate, function_code_handling);
i::StartupSerializer startup_serializer(isolate);
startup_serializer.SerializeStrongReferences();
// Serialize each context with a new partial serializer.
@ -9692,9 +9710,8 @@ Local<Function> debug::GetBuiltin(Isolate* v8_isolate, Builtin builtin) {
}
i::Handle<i::String> name = isolate->factory()->empty_string();
i::Handle<i::Code> code(isolate->builtins()->builtin(builtin_id));
i::NewFunctionArgs args = i::NewFunctionArgs::ForBuiltinWithoutPrototype(
name, code, builtin_id, i::LanguageMode::kSloppy);
name, builtin_id, i::LanguageMode::kSloppy);
i::Handle<i::JSFunction> fun = isolate->factory()->NewFunction(args);
fun->shared()->DontAdaptArguments();

View File

@ -65,21 +65,20 @@ bool AreStdlibMembersValid(Isolate* isolate, Handle<JSReceiver> stdlib,
Handle<Object> value = JSReceiver::GetDataProperty(stdlib, name);
if (!value->IsNaN()) return false;
}
#define STDLIB_MATH_FUNC(fname, FName, ignore1, ignore2) \
if (members.Contains(wasm::AsmJsParser::StandardMember::kMath##FName)) { \
members.Remove(wasm::AsmJsParser::StandardMember::kMath##FName); \
Handle<Name> name(isolate->factory()->InternalizeOneByteString( \
STATIC_CHAR_VECTOR(#fname))); \
Handle<Object> value = StdlibMathMember(isolate, stdlib, name); \
if (!value->IsJSFunction()) return false; \
SharedFunctionInfo* shared = Handle<JSFunction>::cast(value)->shared(); \
if (shared->HasLazyDeserializationBuiltinId()) { \
if (shared->lazy_deserialization_builtin_id() != Builtins::kMath##FName) \
return false; \
} else if (shared->code() != \
isolate->builtins()->builtin(Builtins::kMath##FName)) { \
return false; \
} \
#define STDLIB_MATH_FUNC(fname, FName, ignore1, ignore2) \
if (members.Contains(wasm::AsmJsParser::StandardMember::kMath##FName)) { \
members.Remove(wasm::AsmJsParser::StandardMember::kMath##FName); \
Handle<Name> name(isolate->factory()->InternalizeOneByteString( \
STATIC_CHAR_VECTOR(#fname))); \
Handle<Object> value = StdlibMathMember(isolate, stdlib, name); \
if (!value->IsJSFunction()) return false; \
SharedFunctionInfo* shared = Handle<JSFunction>::cast(value)->shared(); \
if (!shared->HasBuiltinId() || \
shared->builtin_id() != Builtins::kMath##FName) { \
return false; \
} \
DCHECK_EQ(shared->GetCode(), \
isolate->builtins()->builtin(Builtins::kMath##FName)); \
}
STDLIB_MATH_FUNCTION_LIST(STDLIB_MATH_FUNC)
#undef STDLIB_MATH_FUNC
@ -302,7 +301,6 @@ CompilationJob::Status AsmJsCompilationJob::FinalizeJobImpl(Isolate* isolate) {
result->set(kWasmDataCompiledModule, *compiled);
result->set(kWasmDataUsesBitSet, *uses_bitset);
compilation_info()->SetAsmWasmData(result);
compilation_info()->SetCode(BUILTIN_CODE(isolate, InstantiateAsmJs));
RecordHistograms(isolate);
ReportCompilationSuccess(parse_info()->script(),

View File

@ -32,6 +32,7 @@ namespace internal {
V(kInvalidHandleScopeLevel, "Invalid HandleScope level") \
V(kInvalidJumpTableIndex, "Invalid jump table index") \
V(kInvalidRegisterFileInGenerator, "invalid register file in generator") \
V(kInvalidSharedFunctionInfoData, "Invalid SharedFunctionInfo data") \
V(kMissingBytecodeArray, "Missing bytecode array from function") \
V(kObjectNotTagged, "The object is not tagged") \
V(kObjectTagged, "The object is tagged") \

View File

@ -359,10 +359,10 @@ namespace {
// Non-construct case.
V8_NOINLINE Handle<SharedFunctionInfo> SimpleCreateSharedFunctionInfo(
Isolate* isolate, Builtins::Name builtin_id, Handle<String> name, int len) {
Handle<Code> code = isolate->builtins()->builtin_handle(builtin_id);
const bool kNotConstructor = false;
Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
name, code, kNotConstructor, kNormalFunction, builtin_id);
Handle<SharedFunctionInfo> shared =
isolate->factory()->NewSharedFunctionInfoForBuiltin(
name, builtin_id, kNotConstructor, kNormalFunction);
shared->set_internal_formal_parameter_count(len);
shared->set_length(len);
return shared;
@ -373,10 +373,10 @@ V8_NOINLINE Handle<SharedFunctionInfo>
SimpleCreateConstructorSharedFunctionInfo(Isolate* isolate,
Builtins::Name builtin_id,
Handle<String> name, int len) {
Handle<Code> code = isolate->builtins()->builtin_handle(builtin_id);
const bool kIsConstructor = true;
Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
name, code, kIsConstructor, kNormalFunction, builtin_id);
Handle<SharedFunctionInfo> shared =
isolate->factory()->NewSharedFunctionInfoForBuiltin(
name, builtin_id, kIsConstructor, kNormalFunction);
shared->SetConstructStub(*BUILTIN_CODE(isolate, JSBuiltinsConstructStub));
shared->set_internal_formal_parameter_count(len);
shared->set_length(len);
@ -402,14 +402,13 @@ V8_NOINLINE Handle<JSFunction> CreateFunction(
Isolate* isolate, Handle<String> name, InstanceType type, int instance_size,
int inobject_properties, MaybeHandle<Object> maybe_prototype,
Builtins::Name builtin_id) {
Handle<Code> code(isolate->builtins()->builtin(builtin_id));
Handle<Object> prototype;
Handle<JSFunction> result;
if (maybe_prototype.ToHandle(&prototype)) {
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithPrototype(
name, code, prototype, type, instance_size, inobject_properties,
builtin_id, IMMUTABLE);
name, prototype, type, instance_size, inobject_properties, builtin_id,
IMMUTABLE);
result = isolate->factory()->NewFunction(args);
// Make the JSFunction's prototype object fast.
@ -417,7 +416,7 @@ V8_NOINLINE Handle<JSFunction> CreateFunction(
kStartAtReceiver, isolate);
} else {
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithoutPrototype(
name, code, builtin_id, LanguageMode::kStrict);
name, builtin_id, LanguageMode::kStrict);
result = isolate->factory()->NewFunction(args);
}
@ -608,10 +607,8 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
// Allocate the empty function as the prototype for function according to
// ES#sec-properties-of-the-function-prototype-object
Handle<Code> code(BUILTIN_CODE(isolate, EmptyFunction));
NewFunctionArgs args =
NewFunctionArgs::ForBuiltin(factory->empty_string(), code,
empty_function_map, Builtins::kEmptyFunction);
NewFunctionArgs args = NewFunctionArgs::ForBuiltin(
factory->empty_string(), empty_function_map, Builtins::kEmptyFunction);
Handle<JSFunction> empty_function = factory->NewFunction(args);
// --- E m p t y ---
@ -663,9 +660,8 @@ Handle<JSFunction> Genesis::GetThrowTypeErrorIntrinsic() {
return restricted_properties_thrower_;
}
Handle<String> name(factory()->empty_string());
Handle<Code> code = BUILTIN_CODE(isolate(), StrictPoisonPillThrower);
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithoutPrototype(
name, code, Builtins::kStrictPoisonPillThrower, i::LanguageMode::kStrict);
name, Builtins::kStrictPoisonPillThrower, i::LanguageMode::kStrict);
Handle<JSFunction> function = factory()->NewFunction(args);
function->shared()->DontAdaptArguments();
@ -1228,11 +1224,10 @@ Handle<JSGlobalObject> Genesis::CreateNewGlobals(
if (js_global_object_template.is_null()) {
Handle<String> name(factory()->empty_string());
Handle<Code> code = BUILTIN_CODE(isolate(), Illegal);
Handle<JSObject> prototype =
factory()->NewFunctionPrototype(isolate()->object_function());
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithPrototype(
name, code, prototype, JS_GLOBAL_OBJECT_TYPE, JSGlobalObject::kSize, 0,
name, prototype, JS_GLOBAL_OBJECT_TYPE, JSGlobalObject::kSize, 0,
Builtins::kIllegal, MUTABLE);
js_global_object_function = factory()->NewFunction(args);
#ifdef DEBUG
@ -1261,9 +1256,8 @@ Handle<JSGlobalObject> Genesis::CreateNewGlobals(
Handle<JSFunction> global_proxy_function;
if (global_proxy_template.IsEmpty()) {
Handle<String> name(factory()->empty_string());
Handle<Code> code = BUILTIN_CODE(isolate(), Illegal);
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithPrototype(
name, code, factory()->the_hole_value(), JS_GLOBAL_PROXY_TYPE,
name, factory()->the_hole_value(), JS_GLOBAL_PROXY_TYPE,
JSGlobalProxy::SizeWithEmbedderFields(0), 0, Builtins::kIllegal,
MUTABLE);
global_proxy_function = factory()->NewFunction(args);
@ -1399,11 +1393,9 @@ static void InstallError(Isolate* isolate, Handle<JSObject> global,
namespace {
void InstallMakeError(Isolate* isolate, int builtin_id, int context_index) {
Handle<Code> code(isolate->builtins()->builtin(builtin_id));
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithPrototype(
isolate->factory()->empty_string(), code,
isolate->factory()->the_hole_value(), JS_OBJECT_TYPE,
JSObject::kHeaderSize, 0, builtin_id, MUTABLE);
isolate->factory()->empty_string(), isolate->factory()->the_hole_value(),
JS_OBJECT_TYPE, JSObject::kHeaderSize, 0, builtin_id, MUTABLE);
Handle<JSFunction> function = isolate->factory()->NewFunction(args);
function->shared()->DontAdaptArguments();
@ -3281,10 +3273,9 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
proxy_function_map->SetInObjectUnusedPropertyFields(unused_property_fields);
Handle<String> name = factory->Proxy_string();
Handle<Code> code(BUILTIN_CODE(isolate, ProxyConstructor));
NewFunctionArgs args = NewFunctionArgs::ForBuiltin(
name, code, proxy_function_map, Builtins::kProxyConstructor);
name, proxy_function_map, Builtins::kProxyConstructor);
Handle<JSFunction> proxy_function = factory->NewFunction(args);
JSFunction::SetInitialMap(proxy_function, isolate->proxy_map(),
@ -3388,9 +3379,9 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
{ // --- sloppy arguments map
Handle<String> arguments_string = factory->Arguments_string();
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithPrototype(
arguments_string, BUILTIN_CODE(isolate, Illegal),
isolate->initial_object_prototype(), JS_ARGUMENTS_TYPE,
JSSloppyArgumentsObject::kSize, 2, Builtins::kIllegal, MUTABLE);
arguments_string, isolate->initial_object_prototype(),
JS_ARGUMENTS_TYPE, JSSloppyArgumentsObject::kSize, 2,
Builtins::kIllegal, MUTABLE);
Handle<JSFunction> function = factory->NewFunction(args);
Handle<Map> map(function->initial_map());

View File

@ -1245,10 +1245,8 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
}
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
// Set the code slot inside the JSFunction to the trampoline to the
// interpreter entry.
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
// Set the code slot inside the JSFunction to CompileLazy.
__ Move(r2, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ str(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
__ RecordWriteField(r1, JSFunction::kCodeOffset, r2, r4, kLRHasNotBeenSaved,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
@ -1256,6 +1254,66 @@ void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
Generate_CompileLazy(masm);
}
static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
Register scratch1) {
// Figure out the SFI's code object.
Label done;
Label check_is_bytecode_array;
Label check_is_code;
Label check_is_fixed_array;
Label check_is_pre_parsed_scope_data;
Label check_is_function_template_info;
Register data_type = scratch1;
// IsSmi: Is builtin
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
__ Move(scratch1,
Operand(ExternalReference::builtins_address(masm->isolate())));
__ ldr(sfi_data, MemOperand::PointerAddressFromSmiKey(scratch1, sfi_data));
__ b(&done);
// Get map for subsequent checks.
__ bind(&check_is_bytecode_array);
__ ldr(data_type, FieldMemOperand(sfi_data, HeapObject::kMapOffset));
__ ldrh(data_type, FieldMemOperand(data_type, Map::kInstanceTypeOffset));
// IsBytecodeArray: Interpret bytecode
__ cmp(data_type, Operand(BYTECODE_ARRAY_TYPE));
__ b(ne, &check_is_code);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
__ b(&done);
// IsCode: Run code
__ bind(&check_is_code);
__ cmp(data_type, Operand(CODE_TYPE));
__ b(eq, &done);
// IsFixedArray: Instantiate using AsmWasmData,
__ bind(&check_is_fixed_array);
__ cmp(data_type, Operand(FIXED_ARRAY_TYPE));
__ b(ne, &check_is_pre_parsed_scope_data);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
__ b(&done);
// IsPreParsedScopeData: Compile lazy
__ bind(&check_is_pre_parsed_scope_data);
__ cmp(data_type, Operand(TUPLE2_TYPE));
__ b(ne, &check_is_function_template_info);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ b(&done);
// IsFunctionTemplateInfo: API call
__ bind(&check_is_function_template_info);
if (FLAG_debug_code) {
__ cmp(data_type, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
__ Assert(eq, AbortReason::kInvalidSharedFunctionInfoData);
}
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
__ bind(&done);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : argument count (preserved for callee)
@ -1278,13 +1336,15 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r4, r6, r5);
// We found no optimized code.
// We found no optimized code. Infer the code object needed for the SFI.
Register entry = r4;
__ ldr(entry,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ ldr(entry,
FieldMemOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
GetSharedFunctionInfoCode(masm, entry, r5);
// If SFI points to anything other than CompileLazy, install that.
__ ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
// If code entry points to anything other than CompileLazy, install that.
__ Move(r5, masm->CodeObject());
__ cmp(entry, r5);
__ b(eq, &gotta_call_runtime);
@ -1354,25 +1414,9 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
{
// If we've reached this spot, the target builtin has been deserialized and
// we simply need to copy it over. First to the shared function info.
// we simply need to copy it over to the target function.
Register target_builtin = scratch1;
Register shared = scratch0;
__ ldr(shared,
FieldMemOperand(target, JSFunction::kSharedFunctionInfoOffset));
CHECK(r5 != target && r5 != scratch0 && r5 != scratch1);
CHECK(r9 != target && r9 != scratch0 && r9 != scratch1);
__ str(target_builtin,
FieldMemOperand(shared, SharedFunctionInfo::kCodeOffset));
__ mov(r9, target_builtin); // Write barrier clobbers r9 below.
__ RecordWriteField(shared, SharedFunctionInfo::kCodeOffset, r9, r5,
kLRHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// And second to the target function.
__ str(target_builtin, FieldMemOperand(target, JSFunction::kCodeOffset));
__ mov(r9, target_builtin); // Write barrier clobbers r9 below.

View File

@ -1365,10 +1365,8 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
}
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
// Set the code slot inside the JSFunction to the trampoline to the
// interpreter entry.
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
// Set the code slot inside the JSFunction to CompileLazy.
__ Mov(x2, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ Str(x2, FieldMemOperand(x1, JSFunction::kCodeOffset));
__ RecordWriteField(x1, JSFunction::kCodeOffset, x2, x5, kLRHasNotBeenSaved,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
@ -1376,6 +1374,66 @@ void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
Generate_CompileLazy(masm);
}
static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
Register scratch1) {
// Figure out the SFI's code object.
Label done;
Label check_is_bytecode_array;
Label check_is_code;
Label check_is_fixed_array;
Label check_is_pre_parsed_scope_data;
Label check_is_function_template_info;
Register data_type = scratch1;
// IsSmi: Is builtin
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
__ Mov(scratch1, ExternalReference::builtins_address(masm->isolate()));
__ Mov(sfi_data, Operand::UntagSmiAndScale(sfi_data, kPointerSizeLog2));
__ Ldr(sfi_data, MemOperand(scratch1, sfi_data));
__ B(&done);
// Get map for subsequent checks.
__ Bind(&check_is_bytecode_array);
__ Ldr(data_type, FieldMemOperand(sfi_data, HeapObject::kMapOffset));
__ Ldrh(data_type, FieldMemOperand(data_type, Map::kInstanceTypeOffset));
// IsBytecodeArray: Interpret bytecode
__ Cmp(data_type, Operand(BYTECODE_ARRAY_TYPE));
__ B(ne, &check_is_code);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
__ B(&done);
// IsCode: Run code
__ Bind(&check_is_code);
__ Cmp(data_type, Operand(CODE_TYPE));
__ B(eq, &done);
// IsFixedArray: Instantiate using AsmWasmData,
__ Bind(&check_is_fixed_array);
__ Cmp(data_type, Operand(FIXED_ARRAY_TYPE));
__ B(ne, &check_is_pre_parsed_scope_data);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
__ B(&done);
// IsPreParsedScopeData: Compile lazy
__ Bind(&check_is_pre_parsed_scope_data);
__ Cmp(data_type, Operand(TUPLE2_TYPE));
__ B(ne, &check_is_function_template_info);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ B(&done);
// IsFunctionTemplateInfo: API call
__ Bind(&check_is_function_template_info);
if (FLAG_debug_code) {
__ Cmp(data_type, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
__ Assert(eq, AbortReason::kInvalidSharedFunctionInfoData);
}
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
__ Bind(&done);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argument count (preserved for callee)
@ -1398,13 +1456,15 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, x7, x4, x5);
// We found no optimized code.
// We found no optimized code. Infer the code object needed for the SFI.
Register entry = x7;
__ Ldr(entry,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(entry,
FieldMemOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
GetSharedFunctionInfoCode(masm, entry, x5);
// If SFI points to anything other than CompileLazy, install that.
__ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
// If code entry points to anything other than CompileLazy, install that.
__ Move(x5, masm->CodeObject());
__ Cmp(entry, x5);
__ B(eq, &gotta_call_runtime);
@ -1473,25 +1533,9 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
{
// If we've reached this spot, the target builtin has been deserialized and
// we simply need to copy it over. First to the shared function info.
// we simply need to copy it over to the target function.
Register target_builtin = scratch1;
Register shared = scratch0;
__ Ldr(shared,
FieldMemOperand(target, JSFunction::kSharedFunctionInfoOffset));
CHECK(!x5.is(target) && !x5.is(scratch0) && !x5.is(scratch1));
CHECK(!x9.is(target) && !x9.is(scratch0) && !x9.is(scratch1));
__ Str(target_builtin,
FieldMemOperand(shared, SharedFunctionInfo::kCodeOffset));
__ Mov(x9, target_builtin); // Write barrier clobbers x9 below.
__ RecordWriteField(shared, SharedFunctionInfo::kCodeOffset, x9, x5,
kLRHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// And second to the target function.
__ Str(target_builtin, FieldMemOperand(target, JSFunction::kCodeOffset));
__ Mov(x9, target_builtin); // Write barrier clobbers x9 below.

View File

@ -112,13 +112,11 @@ void InstallContextFunction(Handle<JSObject> target, const char* name,
Handle<Object> context_name) {
Factory* const factory = target->GetIsolate()->factory();
Handle<Code> code(target->GetIsolate()->builtins()->builtin(builtin_id));
Handle<String> name_string =
Name::ToFunctionName(factory->InternalizeUtf8String(name))
.ToHandleChecked();
NewFunctionArgs args = NewFunctionArgs::ForBuiltinWithoutPrototype(
name_string, code, builtin_id, i::LanguageMode::kSloppy);
name_string, builtin_id, i::LanguageMode::kSloppy);
Handle<JSFunction> fun = factory->NewFunction(args);
fun->shared()->set_native(true);

View File

@ -203,8 +203,7 @@ TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
BIND(&tailcall_to_shared);
// Tail call into code object on the SharedFunctionInfo.
TNode<Code> code =
CAST(LoadObjectField(shared, SharedFunctionInfo::kCodeOffset));
TNode<Code> code = GetSharedFunctionInfoCode(shared);
// Use the ConstructTrampolineDescriptor because it passes new.target too in
// case this is called during construct.
CSA_ASSERT(this, IsCode(code));

View File

@ -260,6 +260,7 @@ bool Builtins::IsLazy(int index) {
case kFunctionPrototypeHasInstance: // https://crbug.com/v8/6786.
case kHandleApiCall:
case kIllegal:
case kInstantiateAsmJs:
case kInterpreterEnterBytecodeAdvance:
case kInterpreterEnterBytecodeDispatch:
case kInterpreterEntryTrampoline:

View File

@ -1303,10 +1303,8 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
}
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
// Set the code slot inside the JSFunction to the trampoline to the
// interpreter entry.
__ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
__ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset));
// Set the code slot inside the JSFunction to CompileLazy.
__ Move(ecx, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ mov(FieldOperand(edi, JSFunction::kCodeOffset), ecx);
__ RecordWriteField(edi, JSFunction::kCodeOffset, ecx, ebx, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
@ -1314,6 +1312,68 @@ void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
Generate_CompileLazy(masm);
}
static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
Register scratch1) {
// Figure out the SFI's code object.
Label done;
Label check_is_bytecode_array;
Label check_is_code;
Label check_is_fixed_array;
Label check_is_pre_parsed_scope_data;
Label check_is_function_template_info;
Register data_type = scratch1;
// IsSmi: Is builtin
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
__ mov(scratch1,
Immediate(ExternalReference::builtins_address(masm->isolate())));
// Avoid untagging the Smi unnecessarily.
STATIC_ASSERT(times_2 == times_pointer_size - kSmiTagSize);
__ mov(sfi_data, Operand(scratch1, sfi_data, times_2, 0));
__ jmp(&done);
// Get map for subsequent checks.
__ bind(&check_is_bytecode_array);
__ mov(data_type, FieldOperand(sfi_data, HeapObject::kMapOffset));
__ mov(data_type, FieldOperand(data_type, Map::kInstanceTypeOffset));
// IsBytecodeArray: Interpret bytecode
__ cmpw(data_type, Immediate(BYTECODE_ARRAY_TYPE));
__ j(not_equal, &check_is_code);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
__ jmp(&done);
// IsCode: Run code
__ bind(&check_is_code);
__ cmpw(data_type, Immediate(CODE_TYPE));
__ j(equal, &done);
// IsFixedArray: Instantiate using AsmWasmData,
__ bind(&check_is_fixed_array);
__ cmpw(data_type, Immediate(FIXED_ARRAY_TYPE));
__ j(not_equal, &check_is_pre_parsed_scope_data);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
__ jmp(&done);
// IsPreParsedScopeData: Compile lazy
__ bind(&check_is_pre_parsed_scope_data);
__ cmpw(data_type, Immediate(TUPLE2_TYPE));
__ j(not_equal, &check_is_function_template_info);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ jmp(&done);
// IsFunctionTemplateInfo: API call
__ bind(&check_is_function_template_info);
if (FLAG_debug_code) {
__ cmpw(data_type, Immediate(FUNCTION_TEMPLATE_INFO_TYPE));
__ Check(equal, AbortReason::kInvalidSharedFunctionInfoData);
}
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
__ bind(&done);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argument count (preserved for callee)
@ -1336,12 +1396,13 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, ecx);
// We found no optimized code.
// We found no optimized code. Infer the code object needed for the SFI.
Register entry = ecx;
__ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ mov(entry, FieldOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
GetSharedFunctionInfoCode(masm, entry, ebx);
// If SFI points to anything other than CompileLazy, install that.
__ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
// If code entry points to anything other than CompileLazy, install that.
__ Move(ebx, masm->CodeObject());
__ cmp(entry, ebx);
__ j(equal, &gotta_call_runtime);
@ -1409,26 +1470,13 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
{
// If we've reached this spot, the target builtin has been deserialized and
// we simply need to copy it over. First to the shared function info.
// we simply need to copy it over to the target function.
Register target_builtin = scratch1;
Register shared = scratch0;
__ mov(shared, FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));
__ mov(FieldOperand(shared, SharedFunctionInfo::kCodeOffset),
target_builtin);
__ push(eax); // Write barrier clobbers these below.
__ push(target_builtin);
__ RecordWriteField(shared, SharedFunctionInfo::kCodeOffset, target_builtin,
eax, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ pop(target_builtin); // eax is popped later, shared is now available.
// And second to the target function.
__ mov(FieldOperand(target, JSFunction::kCodeOffset), target_builtin);
__ push(target_builtin); // Write barrier clobbers these below.
__ push(eax); // Write barrier clobbers these below.
__ push(target_builtin);
__ RecordWriteField(target, JSFunction::kCodeOffset, target_builtin, eax,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
__ pop(target_builtin);

View File

@ -1252,10 +1252,8 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
}
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
// Set the code slot inside the JSFunction to the trampoline to the
// interpreter entry.
__ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
// Set the code slot inside the JSFunction to CompileLazy.
__ Move(a2, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ sw(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
__ RecordWriteField(a1, JSFunction::kCodeOffset, a2, t0, kRAHasNotBeenSaved,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
@ -1263,6 +1261,68 @@ void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
Generate_CompileLazy(masm);
}
static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
Register scratch1) {
// Figure out the SFI's code object.
Label done;
Label check_is_bytecode_array;
Label check_is_code;
Label check_is_fixed_array;
Label check_is_pre_parsed_scope_data;
Label check_is_function_template_info;
Register data_type = scratch1;
// IsSmi: Is builtin
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
__ li(scratch1,
Operand(ExternalReference::builtins_address(masm->isolate())));
// Avoid untagging the Smi.
STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
STATIC_ASSERT(kSmiShiftSize == 0);
__ Lsa(scratch1, scratch1, sfi_data, kPointerSizeLog2 - kSmiTagSize);
__ lw(sfi_data, MemOperand(scratch1));
__ Branch(&done);
// Get map for subsequent checks.
__ bind(&check_is_bytecode_array);
__ lw(data_type, FieldMemOperand(sfi_data, HeapObject::kMapOffset));
__ lhu(data_type, FieldMemOperand(data_type, Map::kInstanceTypeOffset));
// IsBytecodeArray: Interpret bytecode
__ Branch(&check_is_code, ne, data_type, Operand(BYTECODE_ARRAY_TYPE));
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
__ Branch(&done);
// IsCode: Run code
__ bind(&check_is_code);
__ Branch(&done, eq, data_type, Operand(CODE_TYPE));
// IsFixedArray: Instantiate using AsmWasmData,
__ bind(&check_is_fixed_array);
__ Branch(&check_is_pre_parsed_scope_data, ne, data_type,
Operand(FIXED_ARRAY_TYPE));
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
__ Branch(&done);
// IsPreParsedScopeData: Compile lazy
__ bind(&check_is_pre_parsed_scope_data);
__ Branch(&check_is_function_template_info, ne, data_type,
Operand(TUPLE2_TYPE));
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ Branch(&done);
// IsFunctionTemplateInfo: API call
__ bind(&check_is_function_template_info);
if (FLAG_debug_code) {
__ Assert(eq, AbortReason::kInvalidSharedFunctionInfoData, data_type,
Operand(FUNCTION_TEMPLATE_INFO_TYPE));
}
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
__ bind(&done);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argument count (preserved for callee)
@ -1285,12 +1345,13 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, t0, t3, t1);
// We found no optimized code.
// We found no optimized code. Infer the code object needed for the SFI.
Register entry = t0;
__ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
GetSharedFunctionInfoCode(masm, entry, t1);
// If SFI points to anything other than CompileLazy, install that.
__ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
// If code entry points to anything other than CompileLazy, install that.
__ Move(t1, masm->CodeObject());
__ Branch(&gotta_call_runtime, eq, entry, Operand(t1));
@ -1358,25 +1419,9 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
{
// If we've reached this spot, the target builtin has been deserialized and
// we simply need to copy it over. First to the shared function info.
// we simply need to copy it over to the target function.
Register target_builtin = scratch1;
Register shared = scratch0;
__ lw(shared,
FieldMemOperand(target, JSFunction::kSharedFunctionInfoOffset));
CHECK(t1 != target && t1 != scratch0 && t1 != scratch1);
CHECK(t3 != target && t3 != scratch0 && t3 != scratch1);
__ sw(target_builtin,
FieldMemOperand(shared, SharedFunctionInfo::kCodeOffset));
__ mov(t3, target_builtin); // Write barrier clobbers t3 below.
__ RecordWriteField(shared, SharedFunctionInfo::kCodeOffset, t3, t1,
kRAHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// And second to the target function.
__ sw(target_builtin, FieldMemOperand(target, JSFunction::kCodeOffset));
__ mov(t3, target_builtin); // Write barrier clobbers t3 below.

View File

@ -1249,10 +1249,8 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
}
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
// Set the code slot inside the JSFunction to the trampoline to the
// interpreter entry.
__ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ Ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
// Set the code slot inside the JSFunction to CompileLazy.
__ Move(a2, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ Sd(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
__ RecordWriteField(a1, JSFunction::kCodeOffset, a2, a4, kRAHasNotBeenSaved,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
@ -1260,6 +1258,68 @@ void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
Generate_CompileLazy(masm);
}
static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
Register scratch1) {
// Figure out the SFI's code object.
Label done;
Label check_is_bytecode_array;
Label check_is_code;
Label check_is_fixed_array;
Label check_is_pre_parsed_scope_data;
Label check_is_function_template_info;
Register data_type = scratch1;
// IsSmi: Is builtin
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
__ li(scratch1,
Operand(ExternalReference::builtins_address(masm->isolate())));
// Avoid untagging the Smi by merging the shift
STATIC_ASSERT(kPointerSizeLog2 < kSmiShift);
__ dsrl(sfi_data, sfi_data, kSmiShift - kPointerSizeLog2);
__ Daddu(scratch1, scratch1, sfi_data);
__ Ld(sfi_data, MemOperand(scratch1));
__ Branch(&done);
// Get map for subsequent checks.
__ bind(&check_is_bytecode_array);
__ Ld(data_type, FieldMemOperand(sfi_data, HeapObject::kMapOffset));
__ Lhu(data_type, FieldMemOperand(data_type, Map::kInstanceTypeOffset));
// IsBytecodeArray: Interpret bytecode
__ Branch(&check_is_code, ne, data_type, Operand(BYTECODE_ARRAY_TYPE));
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
__ Branch(&done);
// IsCode: Run code
__ bind(&check_is_code);
__ Branch(&done, eq, data_type, Operand(CODE_TYPE));
// IsFixedArray: Instantiate using AsmWasmData,
__ bind(&check_is_fixed_array);
__ Branch(&check_is_pre_parsed_scope_data, ne, data_type,
Operand(FIXED_ARRAY_TYPE));
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
__ Branch(&done);
// IsPreParsedScopeData: Compile lazy
__ bind(&check_is_pre_parsed_scope_data);
__ Branch(&check_is_function_template_info, ne, data_type,
Operand(TUPLE2_TYPE));
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ Branch(&done);
// IsFunctionTemplateInfo: API call
__ bind(&check_is_function_template_info);
if (FLAG_debug_code) {
__ Assert(eq, AbortReason::kInvalidSharedFunctionInfoData, data_type,
Operand(FUNCTION_TEMPLATE_INFO_TYPE));
}
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
__ bind(&done);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : argument count (preserved for callee)
@ -1282,12 +1342,12 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, a4, t3, a5);
// We found no optimized code.
// We found no optimized code. Infer the code object needed for the SFI.
Register entry = a4;
__ Ld(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ Ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
GetSharedFunctionInfoCode(masm, entry, t1);
// If SFI points to anything other than CompileLazy, install that.
__ Ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
__ Move(t1, masm->CodeObject());
__ Branch(&gotta_call_runtime, eq, entry, Operand(t1));
@ -1356,25 +1416,9 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
{
// If we've reached this spot, the target builtin has been deserialized and
// we simply need to copy it over. First to the shared function info.
// we simply need to copy it over to the target function.
Register target_builtin = scratch1;
Register shared = scratch0;
__ Ld(shared,
FieldMemOperand(target, JSFunction::kSharedFunctionInfoOffset));
CHECK(t1 != target && t1 != scratch0 && t1 != scratch1);
CHECK(t3 != target && t3 != scratch0 && t3 != scratch1);
__ Sd(target_builtin,
FieldMemOperand(shared, SharedFunctionInfo::kCodeOffset));
__ mov(t3, target_builtin); // Write barrier clobbers t3 below.
__ RecordWriteField(shared, SharedFunctionInfo::kCodeOffset, t3, t1,
kRAHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// And second to the target function.
__ Sd(target_builtin, FieldMemOperand(target, JSFunction::kCodeOffset));
__ mov(t3, target_builtin); // Write barrier clobbers t3 below.

View File

@ -1279,17 +1279,75 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// builtin does not set the code field in the JS function. If there isn't then
// we do not need this builtin and can jump directly to CompileLazy.
void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
// Set the code slot inside the JSFunction to the trampoline to the
// interpreter entry.
__ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
__ movq(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset));
__ movq(FieldOperand(rdi, JSFunction::kCodeOffset), rcx);
// Set the code slot inside the JSFunction to CompileLazy.
__ Move(rcx, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ movp(FieldOperand(rdi, JSFunction::kCodeOffset), rcx);
__ RecordWriteField(rdi, JSFunction::kCodeOffset, rcx, r15, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// Jump to compile lazy.
Generate_CompileLazy(masm);
}
static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
Register scratch1) {
// Figure out the SFI's code object.
Label done;
Label check_is_bytecode_array;
Label check_is_code;
Label check_is_fixed_array;
Label check_is_pre_parsed_scope_data;
Label check_is_function_template_info;
Register data_type = scratch1;
// IsSmi: Is builtin
__ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
__ Move(scratch1, ExternalReference::builtins_address(masm->isolate()));
SmiIndex index = masm->SmiToIndex(sfi_data, sfi_data, kPointerSizeLog2);
__ movp(sfi_data, Operand(scratch1, index.reg, index.scale, 0));
__ j(always, &done);
// Get map for subsequent checks.
__ bind(&check_is_bytecode_array);
__ movp(data_type, FieldOperand(sfi_data, HeapObject::kMapOffset));
__ movw(data_type, FieldOperand(data_type, Map::kInstanceTypeOffset));
// IsBytecodeArray: Interpret bytecode
__ cmpw(data_type, Immediate(BYTECODE_ARRAY_TYPE));
__ j(not_equal, &check_is_code);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
__ j(always, &done);
// IsCode: Run code
__ bind(&check_is_code);
__ cmpw(data_type, Immediate(CODE_TYPE));
__ j(equal, &done);
// IsFixedArray: Instantiate using AsmWasmData,
__ bind(&check_is_fixed_array);
__ cmpw(data_type, Immediate(FIXED_ARRAY_TYPE));
__ j(not_equal, &check_is_pre_parsed_scope_data);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
__ j(always, &done);
// IsPreParsedScopeData: Compile lazy
__ bind(&check_is_pre_parsed_scope_data);
__ cmpw(data_type, Immediate(TUPLE2_TYPE));
__ j(not_equal, &check_is_function_template_info);
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
__ j(always, &done);
// IsFunctionTemplateInfo: API call
__ bind(&check_is_function_template_info);
if (FLAG_debug_code) {
__ cmpw(data_type, Immediate(FUNCTION_TEMPLATE_INFO_TYPE));
__ Check(equal, AbortReason::kInvalidSharedFunctionInfoData);
}
__ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
__ bind(&done);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : argument count (preserved for callee)
@ -1312,12 +1370,13 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
// We found no optimized code.
// We found no optimized code. Infer the code object needed for the SFI.
Register entry = rcx;
__ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ movp(entry, FieldOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
GetSharedFunctionInfoCode(masm, entry, rbx);
// If SFI points to anything other than CompileLazy, install that.
__ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
// If code entry points to anything other than CompileLazy, install that.
__ Move(rbx, masm->CodeObject());
__ cmpp(entry, rbx);
__ j(equal, &gotta_call_runtime);
@ -1385,24 +1444,9 @@ void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
{
// If we've reached this spot, the target builtin has been deserialized and
// we simply need to copy it over. First to the shared function info.
// we simply need to copy it over to the target function.
Register target_builtin = scratch1;
Register shared = scratch0;
__ movp(shared,
FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));
CHECK(r14 != target && r14 != scratch0 && r14 != scratch1);
CHECK(r15 != target && r15 != scratch0 && r15 != scratch1);
__ movp(FieldOperand(shared, SharedFunctionInfo::kCodeOffset),
target_builtin);
__ movp(r14, target_builtin); // Write barrier clobbers r14 below.
__ RecordWriteField(shared, SharedFunctionInfo::kCodeOffset, r14, r15,
kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
// And second to the target function.
__ movp(FieldOperand(target, JSFunction::kCodeOffset), target_builtin);
__ movp(r14, target_builtin); // Write barrier clobbers r14 below.

View File

@ -10778,13 +10778,98 @@ Node* CodeStubAssembler::IsPromiseHookEnabledOrDebugIsActive() {
return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0));
}
TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
CSA_ASSERT(this, SmiGreaterThanOrEqual(builtin_id, SmiConstant(0)));
CSA_ASSERT(this,
SmiLessThan(builtin_id, SmiConstant(Builtins::builtin_count)));
int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
int index_shift = kPointerSizeLog2 - kSmiShiftBits;
TNode<WordT> table_index =
index_shift >= 0 ? WordShl(BitcastTaggedToWord(builtin_id), index_shift)
: WordSar(BitcastTaggedToWord(builtin_id), -index_shift);
return CAST(
Load(MachineType::TaggedPointer(),
ExternalConstant(ExternalReference::builtins_address(isolate())),
table_index));
}
TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
SloppyTNode<SharedFunctionInfo> shared_info) {
TNode<Object> sfi_data =
LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
TYPED_VARIABLE_DEF(Code, sfi_code, this);
Label done(this);
Label check_instance_type(this);
// IsSmi: Is builtin
GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
sfi_code = LoadBuiltin(CAST(sfi_data));
Goto(&done);
// Switch on data's instance type.
BIND(&check_instance_type);
TNode<Int32T> data_type = LoadInstanceType(CAST(sfi_data));
int32_t case_values[] = {BYTECODE_ARRAY_TYPE, CODE_TYPE, FIXED_ARRAY_TYPE,
TUPLE2_TYPE};
Label check_is_bytecode_array(this);
Label check_is_code(this);
Label check_is_fixed_array(this);
Label check_is_pre_parsed_scope_data(this);
Label check_is_function_template_info(this);
Label* case_labels[] = {&check_is_bytecode_array, &check_is_code,
&check_is_fixed_array,
&check_is_pre_parsed_scope_data};
STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
Switch(data_type, &check_is_function_template_info, case_values, case_labels,
arraysize(case_labels));
// IsBytecodeArray: Interpret bytecode
BIND(&check_is_bytecode_array);
DCHECK(!Builtins::IsLazy(Builtins::kInterpreterEntryTrampoline));
sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
Goto(&done);
// IsCode: Run code
BIND(&check_is_code);
sfi_code = CAST(sfi_data);
Goto(&done);
// IsFixedArray: Instantiate using AsmWasmData,
BIND(&check_is_fixed_array);
DCHECK(!Builtins::IsLazy(Builtins::kInstantiateAsmJs));
sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
Goto(&done);
// IsPreParsedScopeData: Compile lazy
BIND(&check_is_pre_parsed_scope_data);
DCHECK(!Builtins::IsLazy(Builtins::kCompileLazy));
sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
Goto(&done);
// IsFunctionTemplateInfo: API call
BIND(&check_is_function_template_info);
// This is the default branch, so assert that we have the expected data type.
CSA_ASSERT(
this, Word32Equal(data_type, Int32Constant(FUNCTION_TEMPLATE_INFO_TYPE)));
DCHECK(!Builtins::IsLazy(Builtins::kHandleApiCall));
sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
Goto(&done);
BIND(&done);
return sfi_code.value();
}
Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
Node* shared_info,
Node* context) {
CSA_SLOW_ASSERT(this, IsMap(map));
Node* const code =
LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset);
Node* const code = GetSharedFunctionInfoCode(shared_info);
// TODO(ishell): All the callers of this function pass map loaded from
// Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove

View File

@ -1933,6 +1933,13 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
Node* ElementOffsetFromIndex(Node* index, ElementsKind kind,
ParameterMode mode, int base_size = 0);
// Load a builtin's code from the builtin array in the isolate.
TNode<Code> LoadBuiltin(TNode<Smi> builtin_id);
// Figure out the SFI's code object using its data field.
TNode<Code> GetSharedFunctionInfoCode(
SloppyTNode<SharedFunctionInfo> shared_info);
Node* AllocateFunctionWithMapAndContext(Node* map, Node* shared_info,
Node* context);

View File

@ -21,7 +21,7 @@ namespace {
void DisposeCompilationJob(CompilationJob* job, bool restore_function_code) {
if (restore_function_code) {
Handle<JSFunction> function = job->compilation_info()->closure();
function->set_code(function->shared()->code());
function->set_code(function->shared()->GetCode());
if (function->IsInOptimizationQueue()) {
function->ClearOptimizationMarker();
}

View File

@ -143,7 +143,8 @@ void CompilationJob::RecordUnoptimizedCompilationStats(Isolate* isolate) const {
if (compilation_info()->has_bytecode_array()) {
code_size = compilation_info()->bytecode_array()->SizeIncludingMetadata();
} else {
code_size = compilation_info()->code()->SizeIncludingMetadata();
DCHECK(compilation_info()->has_asm_wasm_data());
code_size = compilation_info()->asm_wasm_data()->Size();
}
Counters* counters = isolate->counters();
@ -192,10 +193,17 @@ void CompilationJob::RecordFunctionCompilation(
Handle<SharedFunctionInfo> shared = compilation_info->shared_info();
Handle<Script> script = parse_info()->script();
Handle<AbstractCode> abstract_code =
compilation_info->has_bytecode_array()
? Handle<AbstractCode>::cast(compilation_info->bytecode_array())
: Handle<AbstractCode>::cast(compilation_info->code());
Handle<AbstractCode> abstract_code;
if (compilation_info->has_bytecode_array()) {
abstract_code =
Handle<AbstractCode>::cast(compilation_info->bytecode_array());
} else if (compilation_info->has_asm_wasm_data()) {
abstract_code =
Handle<AbstractCode>::cast(BUILTIN_CODE(isolate, InstantiateAsmJs));
} else {
DCHECK(!compilation_info->code().is_null());
abstract_code = Handle<AbstractCode>::cast(compilation_info->code());
}
if (abstract_code.is_identical_to(BUILTIN_CODE(isolate, CompileLazy))) {
return;
@ -296,13 +304,14 @@ void InstallUnoptimizedCode(CompilationInfo* compilation_info,
Scope* outer_scope = compilation_info->scope()->GetOuterScopeWithContext();
if (outer_scope) shared->set_outer_scope_info(*outer_scope->scope_info());
DCHECK(!compilation_info->code().is_null());
shared->set_code(*compilation_info->code());
// We shouldn't have a code object, just bytecode or asm-wasm data.
DCHECK(compilation_info->code().is_null());
if (compilation_info->has_bytecode_array()) {
DCHECK(!shared->HasBytecodeArray()); // Only compiled once.
DCHECK(!compilation_info->has_asm_wasm_data());
shared->set_bytecode_array(*compilation_info->bytecode_array());
} else if (compilation_info->has_asm_wasm_data()) {
} else {
DCHECK(compilation_info->has_asm_wasm_data());
shared->set_asm_wasm_data(*compilation_info->asm_wasm_data());
}
@ -755,7 +764,7 @@ CompilationJob::Status FinalizeOptimizedCompilationJob(CompilationJob* job,
PrintF(" because: %s]\n",
GetBailoutReason(compilation_info->bailout_reason()));
}
compilation_info->closure()->set_code(shared->code());
compilation_info->closure()->set_code(shared->GetCode());
// Clear the InOptimizationQueue marker, if it exists.
if (compilation_info->closure()->IsInOptimizationQueue()) {
compilation_info->closure()->ClearOptimizationMarker();
@ -1067,7 +1076,7 @@ bool Compiler::Compile(Handle<JSFunction> function, ClearExceptionFlag flag) {
// Ensure shared function info is compiled.
if (!shared_info->is_compiled() && !Compile(shared_info, flag)) return false;
Handle<Code> code = handle(shared_info->code(), isolate);
Handle<Code> code = handle(shared_info->GetCode(), isolate);
// Allocate FeedbackVector for the JSFunction.
JSFunction::EnsureFeedbackVector(function);
@ -1109,7 +1118,8 @@ bool Compiler::CompileOptimized(Handle<JSFunction> function,
// already if we are optimizing.
DCHECK(!isolate->has_pending_exception());
DCHECK(function->shared()->is_compiled());
code = handle(function->shared()->code(), isolate);
DCHECK(function->shared()->IsInterpreted());
code = BUILTIN_CODE(isolate, InterpreterEntryTrampoline);
}
// Install code on closure.

View File

@ -585,6 +585,10 @@ TNode<WordT> CodeAssembler::WordShr(SloppyTNode<WordT> value, int shift) {
return (shift != 0) ? WordShr(value, IntPtrConstant(shift)) : value;
}
TNode<WordT> CodeAssembler::WordSar(SloppyTNode<WordT> value, int shift) {
return (shift != 0) ? WordSar(value, IntPtrConstant(shift)) : value;
}
TNode<Word32T> CodeAssembler::Word32Shr(SloppyTNode<Word32T> value, int shift) {
return (shift != 0) ? Word32Shr(value, Int32Constant(shift)) : value;
}

View File

@ -859,6 +859,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
TNode<WordT> WordShl(SloppyTNode<WordT> value, int shift);
TNode<WordT> WordShr(SloppyTNode<WordT> value, int shift);
TNode<WordT> WordSar(SloppyTNode<WordT> value, int shift);
TNode<IntPtrT> WordShr(TNode<IntPtrT> value, int shift) {
return UncheckedCast<IntPtrT>(WordShr(static_cast<Node*>(value), shift));
}

View File

@ -3285,7 +3285,10 @@ Reduction JSCallReducer::ReduceJSCall(Node* node,
}
// Check for known builtin functions.
switch (shared->code()->builtin_index()) {
int builtin_id =
shared->HasBuiltinId() ? shared->builtin_id() : Builtins::kNoBuiltinId;
switch (builtin_id) {
case Builtins::kArrayConstructor:
return ReduceArrayConstructor(node);
case Builtins::kBooleanConstructor:
@ -3624,7 +3627,10 @@ Reduction JSCallReducer::ReduceJSConstruct(Node* node) {
if (function->native_context() != *native_context()) return NoChange();
// Check for known builtin functions.
switch (function->shared()->code()->builtin_index()) {
int builtin_id = function->shared()->HasBuiltinId()
? function->shared()->builtin_id()
: Builtins::kNoBuiltinId;
switch (builtin_id) {
case Builtins::kArrayConstructor: {
// TODO(bmeurer): Deal with Array subclasses here.
Handle<AllocationSite> site;
@ -5372,7 +5378,7 @@ Reduction JSCallReducer::ReducePromiseConstructor(Node* node) {
Node* resolve = effect =
graph()->NewNode(javascript()->CreateClosure(
resolve_shared, factory()->many_closures_cell(),
handle(resolve_shared->code(), isolate())),
handle(resolve_shared->GetCode(), isolate())),
promise_context, effect, control);
// Allocate the closure for the reject case.
@ -5382,7 +5388,7 @@ Reduction JSCallReducer::ReducePromiseConstructor(Node* node) {
Node* reject = effect =
graph()->NewNode(javascript()->CreateClosure(
reject_shared, factory()->many_closures_cell(),
handle(reject_shared->code(), isolate())),
handle(reject_shared->GetCode(), isolate())),
promise_context, effect, control);
// Re-use the params from above, but actually set the promise parameter now.
@ -5677,7 +5683,7 @@ Reduction JSCallReducer::ReducePromisePrototypeFinally(Node* node) {
catch_true = etrue =
graph()->NewNode(javascript()->CreateClosure(
catch_finally, factory()->many_closures_cell(),
handle(catch_finally->code(), isolate())),
handle(catch_finally->GetCode(), isolate())),
context, etrue, if_true);
// Allocate the closure for the fulfill case.
@ -5686,7 +5692,7 @@ Reduction JSCallReducer::ReducePromisePrototypeFinally(Node* node) {
then_true = etrue =
graph()->NewNode(javascript()->CreateClosure(
then_finally, factory()->many_closures_cell(),
handle(then_finally->code(), isolate())),
handle(then_finally->GetCode(), isolate())),
context, etrue, if_true);
}

View File

@ -1683,9 +1683,6 @@ Reduction JSTypedLowering::ReduceJSCall(Node* node) {
return NoChange();
}
const int builtin_index = shared->code()->builtin_index();
const bool is_builtin = (builtin_index != -1);
// Class constructors are callable, but [[Call]] will raise an exception.
// See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList ).
if (IsClassConstructor(shared->kind())) return NoChange();
@ -1729,9 +1726,11 @@ Reduction JSTypedLowering::ReduceJSCall(Node* node) {
node, common()->Call(Linkage::GetStubCallDescriptor(
isolate(), graph()->zone(), callable.descriptor(),
1 + arity, flags)));
} else if (is_builtin && Builtins::HasCppImplementation(builtin_index)) {
} else if (shared->HasBuiltinId() &&
Builtins::HasCppImplementation(shared->builtin_id())) {
// Patch {node} to a direct CEntryStub call.
ReduceBuiltin(isolate(), jsgraph(), node, builtin_index, arity, flags);
ReduceBuiltin(isolate(), jsgraph(), node, shared->builtin_id(), arity,
flags);
} else {
// Patch {node} to a direct call.
node->InsertInput(graph()->zone(), arity + 2, new_target);

View File

@ -1715,10 +1715,11 @@ Node* WasmGraphBuilder::BuildI32Ctz(Node* input) {
}
Node* WasmGraphBuilder::BuildI64Ctz(Node* input) {
return Unop(wasm::kExprI64UConvertI32,
BuildBitCountingCall(input, ExternalReference::wasm_word64_ctz(
jsgraph()->isolate()),
MachineRepresentation::kWord64));
return Unop(
wasm::kExprI64UConvertI32,
BuildBitCountingCall(
input, ExternalReference::wasm_word64_ctz(jsgraph()->isolate()),
MachineRepresentation::kWord64));
}
Node* WasmGraphBuilder::BuildI32Popcnt(Node* input) {
@ -1728,10 +1729,11 @@ Node* WasmGraphBuilder::BuildI32Popcnt(Node* input) {
}
Node* WasmGraphBuilder::BuildI64Popcnt(Node* input) {
return Unop(wasm::kExprI64UConvertI32,
BuildBitCountingCall(input, ExternalReference::wasm_word64_popcnt(
jsgraph()->isolate()),
MachineRepresentation::kWord64));
return Unop(
wasm::kExprI64UConvertI32,
BuildBitCountingCall(
input, ExternalReference::wasm_word64_popcnt(jsgraph()->isolate()),
MachineRepresentation::kWord64));
}
Node* WasmGraphBuilder::BuildF32Trunc(Node* input) {
@ -3203,7 +3205,7 @@ bool WasmGraphBuilder::BuildWasmToJSWrapper(
Callable callable = CodeFactory::Call(isolate);
args[pos++] = jsgraph()->HeapConstant(callable.code());
args[pos++] = LoadImportData(index, kFunction, table); // target callable.
args[pos++] = jsgraph()->Int32Constant(wasm_count); // argument count
args[pos++] = jsgraph()->Int32Constant(wasm_count); // argument count
args[pos++] = jsgraph()->Constant(
handle(isolate->heap()->undefined_value(), isolate)); // receiver
@ -4779,40 +4781,38 @@ Handle<Code> CompileWasmToJSWrapper(
}
}
if (FLAG_trace_turbo_graph) { // Simple textual RPO.
OFStream os(stdout);
os << "-- Graph after change lowering -- " << std::endl;
os << AsRPO(graph);
}
if (FLAG_trace_turbo_graph) { // Simple textual RPO.
OFStream os(stdout);
os << "-- Graph after change lowering -- " << std::endl;
os << AsRPO(graph);
}
// Schedule and compile to machine code.
CallDescriptor* incoming = GetWasmCallDescriptor(&zone, sig);
if (machine.Is32()) {
incoming = GetI32WasmCallDescriptor(&zone, incoming);
}
// Schedule and compile to machine code.
CallDescriptor* incoming = GetWasmCallDescriptor(&zone, sig);
if (machine.Is32()) {
incoming = GetI32WasmCallDescriptor(&zone, incoming);
}
#ifdef DEBUG
EmbeddedVector<char, 32> func_name;
static unsigned id = 0;
func_name.Truncate(SNPrintF(func_name, "wasm-to-js#%d", id++));
EmbeddedVector<char, 32> func_name;
static unsigned id = 0;
func_name.Truncate(SNPrintF(func_name, "wasm-to-js#%d", id++));
#else
Vector<const char> func_name = CStrVector("wasm-to-js");
Vector<const char> func_name = CStrVector("wasm-to-js");
#endif
CompilationInfo info(func_name, &zone, Code::WASM_TO_JS_FUNCTION);
Handle<Code> code = Pipeline::GenerateCodeForTesting(
&info, isolate, incoming, &graph, nullptr, source_position_table);
ValidateImportWrapperReferencesImmovables(code);
Handle<FixedArray> deopt_data =
isolate->factory()->NewFixedArray(2, TENURED);
intptr_t loc =
reinterpret_cast<intptr_t>(global_js_imports_table.location());
Handle<Object> loc_handle = isolate->factory()->NewHeapNumberFromBits(loc);
deopt_data->set(0, *loc_handle);
Handle<Object> index_handle = isolate->factory()->NewNumberFromInt(
OffsetForImportData(index, WasmGraphBuilder::kFunction));
deopt_data->set(1, *index_handle);
code->set_deoptimization_data(*deopt_data);
CompilationInfo info(func_name, &zone, Code::WASM_TO_JS_FUNCTION);
Handle<Code> code = Pipeline::GenerateCodeForTesting(
&info, isolate, incoming, &graph, nullptr, source_position_table);
ValidateImportWrapperReferencesImmovables(code);
Handle<FixedArray> deopt_data = isolate->factory()->NewFixedArray(2, TENURED);
intptr_t loc = reinterpret_cast<intptr_t>(global_js_imports_table.location());
Handle<Object> loc_handle = isolate->factory()->NewHeapNumberFromBits(loc);
deopt_data->set(0, *loc_handle);
Handle<Object> index_handle = isolate->factory()->NewNumberFromInt(
OffsetForImportData(index, WasmGraphBuilder::kFunction));
deopt_data->set(1, *index_handle);
code->set_deoptimization_data(*deopt_data);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_opt_code && !code.is_null()) {
CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
@ -5029,7 +5029,6 @@ Handle<Code> CompileCWasmEntry(Isolate* isolate, wasm::FunctionSig* sig) {
SourcePositionTable* WasmCompilationUnit::BuildGraphForWasmFunction(
double* decode_ms) {
base::ElapsedTimer decode_timer;
if (FLAG_trace_wasm_decode_time) {
decode_timer.Start();

View File

@ -883,9 +883,8 @@ bool DebugEvaluate::FunctionHasNoSideEffect(Handle<SharedFunctionInfo> info) {
return true;
} else {
// Check built-ins against whitelist.
int builtin_index = info->HasLazyDeserializationBuiltinId()
? info->lazy_deserialization_builtin_id()
: info->code()->builtin_index();
int builtin_index =
info->HasBuiltinId() ? info->builtin_id() : Builtins::kNoBuiltinId;
DCHECK_NE(Builtins::kDeserializeLazy, builtin_index);
if (Builtins::IsBuiltinId(builtin_index) &&
BuiltinHasNoSideEffect(static_cast<Builtins::Name>(builtin_index))) {

View File

@ -1408,7 +1408,7 @@ bool Debug::EnsureBreakInfo(Handle<SharedFunctionInfo> shared) {
!Compiler::Compile(shared, Compiler::CLEAR_EXCEPTION)) {
return false;
}
if (shared->code() ==
if (shared->GetCode() ==
isolate_->builtins()->builtin(Builtins::kDeserializeLazy)) {
Snapshot::EnsureBuiltinIsDeserialized(isolate_, shared);
}
@ -1866,19 +1866,20 @@ void Debug::RunPromiseHook(PromiseHookType hook_type, Handle<JSPromise> promise,
return;
}
last_frame_was_promise_builtin = false;
Handle<Code> code(info->code());
if (*code == *BUILTIN_CODE(isolate_, AsyncFunctionPromiseCreate)) {
type = debug::kDebugAsyncFunctionPromiseCreated;
last_frame_was_promise_builtin = true;
} else if (*code == *BUILTIN_CODE(isolate_, PromisePrototypeThen)) {
type = debug::kDebugPromiseThen;
last_frame_was_promise_builtin = true;
} else if (*code == *BUILTIN_CODE(isolate_, PromisePrototypeCatch)) {
type = debug::kDebugPromiseCatch;
last_frame_was_promise_builtin = true;
} else if (*code == *BUILTIN_CODE(isolate_, PromisePrototypeFinally)) {
type = debug::kDebugPromiseFinally;
last_frame_was_promise_builtin = true;
if (info->HasBuiltinId()) {
if (info->builtin_id() == Builtins::kAsyncFunctionPromiseCreate) {
type = debug::kDebugAsyncFunctionPromiseCreated;
last_frame_was_promise_builtin = true;
} else if (info->builtin_id() == Builtins::kPromisePrototypeThen) {
type = debug::kDebugPromiseThen;
last_frame_was_promise_builtin = true;
} else if (info->builtin_id() == Builtins::kPromisePrototypeCatch) {
type = debug::kDebugPromiseCatch;
last_frame_was_promise_builtin = true;
} else if (info->builtin_id() == Builtins::kPromisePrototypeFinally) {
type = debug::kDebugPromiseFinally;
last_frame_was_promise_builtin = true;
}
}
}
it.Advance();

View File

@ -824,11 +824,6 @@ void LiveEdit::ReplaceFunctionCode(
compile_info_wrapper.GetSharedFunctionInfo();
if (shared_info->is_compiled()) {
// Take whatever code we can get from the new shared function info. We
// expect activations of neither the old bytecode, since the lowest
// activation is going to be restarted.
Handle<Code> old_code(shared_info->code());
Handle<Code> new_code(new_shared_info->code());
// Clear old bytecode. This will trigger self-healing if we do not install
// new bytecode.
shared_info->ClearBytecodeArray();
@ -1071,7 +1066,7 @@ void LiveEdit::ReplaceRefToNestedFunction(
Handle<SharedFunctionInfo> subst_shared =
UnwrapSharedFunctionInfoFromJSValue(subst_function_wrapper);
for (RelocIterator it(parent_shared->code()); !it.done(); it.next()) {
for (RelocIterator it(parent_shared->GetCode()); !it.done(); it.next()) {
if (it.rinfo()->rmode() == RelocInfo::EMBEDDED_OBJECT) {
if (it.rinfo()->target_object() == *orig_shared) {
it.rinfo()->set_target_object(*subst_shared);

View File

@ -35,7 +35,7 @@ void StackGuard::reset_limits(const ExecutionAccess& lock) {
static void PrintDeserializedCodeInfo(Handle<JSFunction> function) {
if (function->code() == function->shared()->code() &&
if (function->code() == function->shared()->GetCode() &&
function->shared()->deserialized()) {
PrintF("[Running deserialized script");
Object* script = function->shared()->script();

View File

@ -1589,7 +1589,7 @@ Handle<JSFunction> Factory::NewFunction(Handle<Map> map,
function->initialize_properties();
function->initialize_elements();
function->set_shared(*info);
function->set_code(info->code());
function->set_code(info->GetCode());
function->set_context(*context_or_undefined);
function->set_feedback_cell(*many_closures_cell());
int header_size;
@ -1617,9 +1617,9 @@ Handle<JSFunction> Factory::NewFunction(const NewFunctionArgs& args) {
// Create the SharedFunctionInfo.
Handle<Context> context(isolate()->native_context());
Handle<Map> map = args.GetMap(isolate());
Handle<SharedFunctionInfo> info =
NewSharedFunctionInfo(args.name_, args.maybe_code_, map->is_constructor(),
kNormalFunction, args.maybe_builtin_id_);
Handle<SharedFunctionInfo> info = NewSharedFunctionInfo(
args.name_, args.maybe_code_, args.maybe_builtin_id_,
map->is_constructor(), kNormalFunction);
// Proper language mode in shared function info will be set later.
DCHECK(is_sloppy(info->language_mode()));
@ -1628,7 +1628,6 @@ Handle<JSFunction> Factory::NewFunction(const NewFunctionArgs& args) {
#ifdef DEBUG
if (isolate()->bootstrapper()->IsActive()) {
Handle<Code> code;
bool has_code = args.maybe_code_.ToHandle(&code);
DCHECK(
// During bootstrapping some of these maps could be not created yet.
(*map == context->get(Context::STRICT_FUNCTION_MAP_INDEX)) ||
@ -1639,8 +1638,8 @@ Handle<JSFunction> Factory::NewFunction(const NewFunctionArgs& args) {
Context::STRICT_FUNCTION_WITH_READONLY_PROTOTYPE_MAP_INDEX)) ||
// Check if it's a creation of an empty or Proxy function during
// bootstrapping.
(has_code && (code->builtin_index() == Builtins::kEmptyFunction ||
code->builtin_index() == Builtins::kProxyConstructor)));
(args.maybe_builtin_id_ == Builtins::kEmptyFunction ||
args.maybe_builtin_id_ == Builtins::kProxyConstructor));
} else {
DCHECK(
(*map == *isolate()->sloppy_function_map()) ||
@ -2548,10 +2547,9 @@ void Factory::ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object,
Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForLiteral(
FunctionLiteral* literal, Handle<Script> script) {
Handle<Code> code = BUILTIN_CODE(isolate(), CompileLazy);
FunctionKind kind = literal->kind();
Handle<SharedFunctionInfo> shared =
NewSharedFunctionInfo(literal->name(), code, IsConstructable(kind), kind);
Handle<SharedFunctionInfo> shared = NewSharedFunctionInfoForBuiltin(
literal->name(), Builtins::kCompileLazy, IsConstructable(kind), kind);
SharedFunctionInfo::InitFromFunctionLiteral(shared, literal);
SharedFunctionInfo::SetScript(shared, script, false);
return shared;
@ -2577,9 +2575,24 @@ Handle<JSMessageObject> Factory::NewJSMessageObject(
return message_obj;
}
Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForApiFunction(
MaybeHandle<String> maybe_name,
Handle<FunctionTemplateInfo> function_template_info, bool is_constructor,
FunctionKind kind) {
return NewSharedFunctionInfo(maybe_name, function_template_info,
Builtins::kNoBuiltinId, is_constructor, kind);
}
Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForBuiltin(
MaybeHandle<String> maybe_name, int builtin_index, bool is_constructor,
FunctionKind kind) {
return NewSharedFunctionInfo(maybe_name, MaybeHandle<Code>(), builtin_index,
is_constructor, kind);
}
Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
MaybeHandle<String> maybe_name, MaybeHandle<Code> maybe_code,
bool is_constructor, FunctionKind kind, int maybe_builtin_index) {
MaybeHandle<String> maybe_name, MaybeHandle<HeapObject> maybe_function_data,
int maybe_builtin_index, bool is_constructor, FunctionKind kind) {
// Function names are assumed to be flat elsewhere. Must flatten before
// allocating SharedFunctionInfo to avoid GC seeing the uninitialized SFI.
Handle<String> shared_name;
@ -2597,16 +2610,20 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
share->set_name_or_scope_info(
has_shared_name ? *shared_name
: SharedFunctionInfo::kNoSharedNameSentinel);
Handle<Code> code;
if (!maybe_code.ToHandle(&code)) {
code = BUILTIN_CODE(isolate(), Illegal);
Handle<HeapObject> function_data;
if (maybe_function_data.ToHandle(&function_data)) {
// If we pass function_data then we shouldn't pass a builtin index, and
// the function_data should not be code with a builtin.
DCHECK(!Builtins::IsBuiltinId(maybe_builtin_index));
DCHECK_IMPLIES(function_data->IsCode(),
!Code::cast(*function_data)->is_builtin());
share->set_function_data(*function_data);
} else if (Builtins::IsBuiltinId(maybe_builtin_index)) {
DCHECK_NE(maybe_builtin_index, Builtins::kDeserializeLazy);
share->set_builtin_id(maybe_builtin_index);
} else {
share->set_builtin_id(Builtins::kIllegal);
}
Object* function_data = (Builtins::IsBuiltinId(maybe_builtin_index) &&
Builtins::IsLazy(maybe_builtin_index))
? Smi::FromInt(maybe_builtin_index)
: Object::cast(*undefined_value());
share->set_function_data(function_data, SKIP_WRITE_BARRIER);
share->set_code(*code);
share->set_outer_scope_info(*the_hole_value());
DCHECK(!Builtins::IsLazy(Builtins::kConstructedNonConstructable));
Handle<Code> construct_stub =
@ -3193,14 +3210,12 @@ NewFunctionArgs NewFunctionArgs::ForWasm(Handle<String> name, Handle<Code> code,
// static
NewFunctionArgs NewFunctionArgs::ForBuiltin(Handle<String> name,
Handle<Code> code, Handle<Map> map,
int builtin_id) {
Handle<Map> map, int builtin_id) {
DCHECK(Builtins::IsBuiltinId(builtin_id));
NewFunctionArgs args;
args.name_ = name;
args.maybe_map_ = map;
args.maybe_code_ = code;
args.maybe_builtin_id_ = builtin_id;
args.language_mode_ = LanguageMode::kStrict;
args.prototype_mutability_ = MUTABLE;
@ -3216,6 +3231,7 @@ NewFunctionArgs NewFunctionArgs::ForFunctionWithoutCode(
NewFunctionArgs args;
args.name_ = name;
args.maybe_map_ = map;
args.maybe_builtin_id_ = Builtins::kIllegal;
args.language_mode_ = language_mode;
args.prototype_mutability_ = MUTABLE;
@ -3226,14 +3242,13 @@ NewFunctionArgs NewFunctionArgs::ForFunctionWithoutCode(
// static
NewFunctionArgs NewFunctionArgs::ForBuiltinWithPrototype(
Handle<String> name, Handle<Code> code, Handle<Object> prototype,
InstanceType type, int instance_size, int inobject_properties,
int builtin_id, MutableMode prototype_mutability) {
Handle<String> name, Handle<Object> prototype, InstanceType type,
int instance_size, int inobject_properties, int builtin_id,
MutableMode prototype_mutability) {
DCHECK(Builtins::IsBuiltinId(builtin_id));
NewFunctionArgs args;
args.name_ = name;
args.maybe_code_ = code;
args.type_ = type;
args.instance_size_ = instance_size;
args.inobject_properties_ = inobject_properties;
@ -3251,13 +3266,11 @@ NewFunctionArgs NewFunctionArgs::ForBuiltinWithPrototype(
// static
NewFunctionArgs NewFunctionArgs::ForBuiltinWithoutPrototype(
Handle<String> name, Handle<Code> code, int builtin_id,
LanguageMode language_mode) {
Handle<String> name, int builtin_id, LanguageMode language_mode) {
DCHECK(Builtins::IsBuiltinId(builtin_id));
NewFunctionArgs args;
args.name_ = name;
args.maybe_code_ = code;
args.maybe_builtin_id_ = builtin_id;
args.language_mode_ = language_mode;
args.prototype_mutability_ = MUTABLE;

View File

@ -796,10 +796,14 @@ class V8_EXPORT_PRIVATE Factory final {
#undef ACCESSOR_INFO_ACCESSOR
// Allocates a new SharedFunctionInfo object.
Handle<SharedFunctionInfo> NewSharedFunctionInfo(
MaybeHandle<String> name, MaybeHandle<Code> code, bool is_constructor,
FunctionKind kind = kNormalFunction,
int maybe_builtin_index = Builtins::kNoBuiltinId);
Handle<SharedFunctionInfo> NewSharedFunctionInfoForApiFunction(
MaybeHandle<String> maybe_name,
Handle<FunctionTemplateInfo> function_template_info, bool is_constructor,
FunctionKind kind);
Handle<SharedFunctionInfo> NewSharedFunctionInfoForBuiltin(
MaybeHandle<String> name, int builtin_index, bool is_constructor,
FunctionKind kind = kNormalFunction);
Handle<SharedFunctionInfo> NewSharedFunctionInfoForLiteral(
FunctionLiteral* literal, Handle<Script> script);
@ -909,6 +913,11 @@ class V8_EXPORT_PRIVATE Factory final {
Handle<JSPromise> NewJSPromiseWithoutHook(
PretenureFlag pretenure = NOT_TENURED);
Handle<SharedFunctionInfo> NewSharedFunctionInfo(
MaybeHandle<String> name, MaybeHandle<HeapObject> maybe_function_data,
int maybe_builtin_index, bool is_constructor,
FunctionKind kind = kNormalFunction);
};
// Utility class to simplify argument handling around JSFunction creation.
@ -916,17 +925,16 @@ class NewFunctionArgs final {
public:
static NewFunctionArgs ForWasm(Handle<String> name, Handle<Code> code,
Handle<Map> map);
static NewFunctionArgs ForBuiltin(Handle<String> name, Handle<Code> code,
Handle<Map> map, int builtin_id);
static NewFunctionArgs ForBuiltin(Handle<String> name, Handle<Map> map,
int builtin_id);
static NewFunctionArgs ForFunctionWithoutCode(Handle<String> name,
Handle<Map> map,
LanguageMode language_mode);
static NewFunctionArgs ForBuiltinWithPrototype(
Handle<String> name, Handle<Code> code, Handle<Object> prototype,
InstanceType type, int instance_size, int inobject_properties,
int builtin_id, MutableMode prototype_mutability);
Handle<String> name, Handle<Object> prototype, InstanceType type,
int instance_size, int inobject_properties, int builtin_id,
MutableMode prototype_mutability);
static NewFunctionArgs ForBuiltinWithoutPrototype(Handle<String> name,
Handle<Code> code,
int builtin_id,
LanguageMode language_mode);

View File

@ -1924,6 +1924,7 @@ void JavaScriptFrame::Print(StringStream* accumulator,
Code* code = nullptr;
if (IsConstructor()) accumulator->Add("new ");
accumulator->PrintFunction(function, receiver, &code);
accumulator->Add(" [%p]", function);
// Get scope information for nicer output, if possible. If code is nullptr, or
// doesn't contain scope info, scope_info will return 0 for the number of

View File

@ -223,8 +223,6 @@ InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl(
}
compilation_info()->SetBytecodeArray(bytecodes);
compilation_info()->SetCode(
BUILTIN_CODE(isolate, InterpreterEntryTrampoline));
return SUCCEEDED;
}

View File

@ -1558,19 +1558,9 @@ static int EnumerateCompiledFunctions(Heap* heap,
if (sfi->is_compiled() &&
(!sfi->script()->IsScript() ||
Script::cast(sfi->script())->HasValidSource())) {
// In some cases, an SFI might have (and have executing!) both bytecode
// and baseline code, so check for both and add them both if needed.
if (sfi->HasBytecodeArray()) {
AddFunctionAndCode(sfi, AbstractCode::cast(sfi->bytecode_array()),
sfis, code_objects, compiled_funcs_count);
++compiled_funcs_count;
}
if (!sfi->IsInterpreted()) {
AddFunctionAndCode(sfi, AbstractCode::cast(sfi->code()), sfis,
code_objects, compiled_funcs_count);
++compiled_funcs_count;
}
AddFunctionAndCode(sfi, AbstractCode::cast(sfi->abstract_code()), sfis,
code_objects, compiled_funcs_count);
++compiled_funcs_count;
}
} else if (obj->IsJSFunction()) {
// Given that we no longer iterate over all optimized JSFunctions, we need

View File

@ -832,10 +832,9 @@ void JSFunction::JSFunctionVerify() {
void SharedFunctionInfo::SharedFunctionInfoVerify() {
CHECK(IsSharedFunctionInfo());
VerifyObjectField(kCodeOffset);
VerifyObjectField(kFunctionDataOffset);
VerifyObjectField(kDebugInfoOffset);
VerifyObjectField(kFeedbackMetadataOffset);
VerifyObjectField(kFunctionDataOffset);
VerifyObjectField(kFunctionIdentifierOffset);
VerifyObjectField(kNameOrScopeInfoOffset);
VerifyObjectField(kOuterScopeInfoOffset);
@ -850,9 +849,8 @@ void SharedFunctionInfo::SharedFunctionInfoVerify() {
}
Isolate* isolate = GetIsolate();
CHECK(function_data()->IsUndefined(isolate) || IsApiFunction() ||
HasBytecodeArray() || HasAsmWasmData() ||
HasLazyDeserializationBuiltinId() || HasPreParsedScopeData());
CHECK(HasCodeObject() || IsApiFunction() || HasBytecodeArray() ||
HasAsmWasmData() || HasBuiltinId() || HasPreParsedScopeData());
CHECK(function_identifier()->IsUndefined(isolate) || HasBuiltinFunctionId() ||
HasInferredName());

View File

@ -1102,8 +1102,8 @@ void JSFunction::JSFunctionPrint(std::ostream& os) { // NOLINT
int builtin_index = code()->builtin_index();
if (builtin_index != -1 && !IsInterpreted()) {
if (builtin_index == Builtins::kDeserializeLazy) {
if (shared()->HasLazyDeserializationBuiltinId()) {
builtin_index = shared()->lazy_deserialization_builtin_id();
if (shared()->HasBuiltinId()) {
builtin_index = shared()->builtin_id();
os << "\n - builtin: " << GetIsolate()->builtins()->name(builtin_index)
<< "(lazy)";
}
@ -1169,13 +1169,8 @@ void SharedFunctionInfo::SharedFunctionInfoPrint(std::ostream& os) { // NOLINT
os << "\n - formal_parameter_count: " << internal_formal_parameter_count();
os << "\n - expected_nof_properties: " << expected_nof_properties();
os << "\n - language_mode: " << language_mode();
os << "\n - code: " << Brief(code());
if (HasBytecodeArray()) {
os << "\n - bytecode_array: " << bytecode_array();
}
if (HasAsmWasmData()) {
os << "\n - asm_wasm_data: " << Brief(asm_wasm_data());
}
os << "\n - data: " << Brief(function_data());
os << "\n - code (from data): " << Brief(GetCode());
PrintSourceCode(os);
// Script files are often large, hard to read.
// os << "\n - script =";
@ -1199,11 +1194,6 @@ void SharedFunctionInfo::SharedFunctionInfoPrint(std::ostream& os) { // NOLINT
os << "\n - length: " << length();
os << "\n - feedback_metadata: ";
feedback_metadata()->FeedbackMetadataPrint(os);
if (HasPreParsedScopeData()) {
os << "\n - preparsed scope data: " << preparsed_scope_data();
} else {
os << "\n - no preparsed scope data";
}
os << "\n";
}

View File

@ -1209,7 +1209,6 @@ Handle<SharedFunctionInfo> FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
} else {
name_string = isolate->factory()->empty_string();
}
Handle<Code> code = BUILTIN_CODE(isolate, HandleApiCall);
bool is_constructor;
FunctionKind function_kind;
if (info->remove_prototype()) {
@ -1219,14 +1218,14 @@ Handle<SharedFunctionInfo> FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
is_constructor = true;
function_kind = kNormalFunction;
}
Handle<SharedFunctionInfo> result = isolate->factory()->NewSharedFunctionInfo(
name_string, code, is_constructor, function_kind);
Handle<SharedFunctionInfo> result =
isolate->factory()->NewSharedFunctionInfoForApiFunction(
name_string, info, is_constructor, function_kind);
if (is_constructor) {
result->SetConstructStub(*BUILTIN_CODE(isolate, JSConstructStubApi));
}
result->set_length(info->length());
result->set_api_func_data(*info);
result->DontAdaptArguments();
DCHECK(result->IsApiFunction());
@ -3355,6 +3354,8 @@ void HeapObject::HeapObjectShortPrint(std::ostream& os) { // NOLINT
os << "<Code " << Code::Kind2String(code->kind());
if (code->is_stub()) {
os << " " << CodeStub::MajorName(CodeStub::GetMajorKey(code));
} else if (code->is_builtin()) {
os << " " << Builtins::name(code->builtin_index());
}
os << ">";
break;
@ -13871,7 +13872,7 @@ void SharedFunctionInfo::SetConstructStub(Code* code) {
DCHECK(builtin_id == Builtins::kJSBuiltinsConstructStub ||
!Builtins::IsLazy(builtin_id));
// Builtins should use JSBuiltinsConstructStub.
DCHECK_NE(this->code(), code);
DCHECK_NE(this->GetCode(), code);
}
#endif
set_construct_stub(code);

View File

@ -92,7 +92,7 @@ AbstractCode* SharedFunctionInfo::abstract_code() {
if (HasBytecodeArray()) {
return AbstractCode::cast(bytecode_array());
} else {
return AbstractCode::cast(code());
return AbstractCode::cast(GetCode());
}
}
@ -213,7 +213,8 @@ BIT_FIELD_ACCESSORS(SharedFunctionInfo, debugger_hints, debugging_id,
SharedFunctionInfo::DebuggingIdBits)
void SharedFunctionInfo::DontAdaptArguments() {
DCHECK(code()->kind() == Code::BUILTIN || code()->kind() == Code::STUB);
// TODO(leszeks): Revise this DCHECK now that the code field is gone.
DCHECK(!HasCodeObject());
set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
}
@ -238,23 +239,44 @@ int SharedFunctionInfo::EndPosition() const {
return info->EndPosition();
}
Code* SharedFunctionInfo::code() const {
return Code::cast(READ_FIELD(this, kCodeOffset));
Code* SharedFunctionInfo::GetCode() const {
// ======
// NOTE: This chain of checks MUST be kept in sync with the equivalent CSA
// GetSharedFunctionInfoCode method in code-stub-assembler.cc, and the
// architecture-specific GetSharedFunctionInfoCode methods in builtins-*.cc.
// ======
Isolate* isolate = GetIsolate();
Object* data = function_data();
if (data->IsSmi()) {
// Holding a Smi means we are a builtin.
DCHECK(HasBuiltinId());
return isolate->builtins()->builtin(builtin_id());
} else if (data->IsBytecodeArray()) {
// Having a bytecode array means we are a compiled, interpreted function.
DCHECK(HasBytecodeArray());
return isolate->builtins()->builtin(Builtins::kInterpreterEntryTrampoline);
} else if (data->IsFixedArray()) {
// Having a fixed array means we are an asm.js/wasm function.
DCHECK(HasAsmWasmData());
return isolate->builtins()->builtin(Builtins::kInstantiateAsmJs);
} else if (data->IsPreParsedScopeData()) {
// Having pre-parsed scope data means we need to compile.
DCHECK(HasPreParsedScopeData());
return isolate->builtins()->builtin(Builtins::kCompileLazy);
} else if (data->IsFunctionTemplateInfo()) {
// Having a function template info means we are an API function.
DCHECK(IsApiFunction());
return isolate->builtins()->builtin(Builtins::kHandleApiCall);
} else if (data->IsCode()) {
// Having a code object means we should run it.
DCHECK(HasCodeObject());
return Code::cast(data);
}
UNREACHABLE();
}
void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
// If the SharedFunctionInfo has bytecode we should never mark it for lazy
// compile, since the bytecode is never flushed.
DCHECK(value != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy) ||
!HasBytecodeArray());
WRITE_FIELD(this, kCodeOffset, value);
CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
}
bool SharedFunctionInfo::IsInterpreted() const {
return code()->is_interpreter_trampoline_builtin();
}
bool SharedFunctionInfo::IsInterpreted() const { return HasBytecodeArray(); }
ScopeInfo* SharedFunctionInfo::scope_info() const {
Object* maybe_scope_info = name_or_scope_info();
@ -289,8 +311,9 @@ ACCESSORS(SharedFunctionInfo, outer_scope_info, HeapObject,
kOuterScopeInfoOffset)
bool SharedFunctionInfo::is_compiled() const {
Builtins* builtins = GetIsolate()->builtins();
return code() != builtins->builtin(Builtins::kCompileLazy);
Object* data = function_data();
return data != Smi::FromEnum(Builtins::kCompileLazy) &&
!data->IsPreParsedScopeData();
}
int SharedFunctionInfo::GetLength() const {
@ -314,7 +337,7 @@ bool SharedFunctionInfo::HasDebugInfo() const {
return has_debug_info;
}
bool SharedFunctionInfo::IsApiFunction() {
bool SharedFunctionInfo::IsApiFunction() const {
return function_data()->IsFunctionTemplateInfo();
}
@ -323,11 +346,6 @@ FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
return FunctionTemplateInfo::cast(function_data());
}
void SharedFunctionInfo::set_api_func_data(FunctionTemplateInfo* data) {
DCHECK(function_data()->IsUndefined(GetIsolate()));
set_function_data(data);
}
bool SharedFunctionInfo::HasBytecodeArray() const {
return function_data()->IsBytecodeArray();
}
@ -338,13 +356,14 @@ BytecodeArray* SharedFunctionInfo::bytecode_array() const {
}
void SharedFunctionInfo::set_bytecode_array(BytecodeArray* bytecode) {
DCHECK(function_data()->IsUndefined(GetIsolate()));
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy));
set_function_data(bytecode);
}
void SharedFunctionInfo::ClearBytecodeArray() {
DCHECK(function_data()->IsUndefined(GetIsolate()) || HasBytecodeArray());
set_function_data(GetHeap()->undefined_value());
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy) ||
HasBytecodeArray());
set_builtin_id(Builtins::kCompileLazy);
}
bool SharedFunctionInfo::HasAsmWasmData() const {
@ -357,26 +376,33 @@ FixedArray* SharedFunctionInfo::asm_wasm_data() const {
}
void SharedFunctionInfo::set_asm_wasm_data(FixedArray* data) {
DCHECK(function_data()->IsUndefined(GetIsolate()) || HasAsmWasmData());
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy) ||
HasAsmWasmData());
set_function_data(data);
}
void SharedFunctionInfo::ClearAsmWasmData() {
DCHECK(function_data()->IsUndefined(GetIsolate()) || HasAsmWasmData());
set_function_data(GetHeap()->undefined_value());
DCHECK(HasAsmWasmData());
set_builtin_id(Builtins::kCompileLazy);
}
bool SharedFunctionInfo::HasLazyDeserializationBuiltinId() const {
bool SharedFunctionInfo::HasBuiltinId() const {
return function_data()->IsSmi();
}
int SharedFunctionInfo::lazy_deserialization_builtin_id() const {
DCHECK(HasLazyDeserializationBuiltinId());
int SharedFunctionInfo::builtin_id() const {
DCHECK(HasBuiltinId());
int id = Smi::ToInt(function_data());
DCHECK(Builtins::IsBuiltinId(id));
return id;
}
void SharedFunctionInfo::set_builtin_id(int builtin_id) {
DCHECK(Builtins::IsBuiltinId(builtin_id));
DCHECK_NE(builtin_id, Builtins::kDeserializeLazy);
set_function_data(Smi::FromInt(builtin_id), SKIP_WRITE_BARRIER);
}
bool SharedFunctionInfo::HasPreParsedScopeData() const {
return function_data()->IsPreParsedScopeData();
}
@ -388,13 +414,18 @@ PreParsedScopeData* SharedFunctionInfo::preparsed_scope_data() const {
void SharedFunctionInfo::set_preparsed_scope_data(
PreParsedScopeData* preparsed_scope_data) {
DCHECK(function_data()->IsUndefined(GetIsolate()));
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy));
set_function_data(preparsed_scope_data);
}
void SharedFunctionInfo::ClearPreParsedScopeData() {
DCHECK(function_data()->IsUndefined(GetIsolate()) || HasPreParsedScopeData());
set_function_data(GetHeap()->undefined_value());
DCHECK(function_data() == Smi::FromEnum(Builtins::kCompileLazy) ||
HasPreParsedScopeData());
set_builtin_id(Builtins::kCompileLazy);
}
bool SharedFunctionInfo::HasCodeObject() const {
return function_data()->IsCode();
}
bool SharedFunctionInfo::HasBuiltinFunctionId() {

View File

@ -45,8 +45,8 @@ class SharedFunctionInfo : public HeapObject {
inline String* Name() const;
inline void SetName(String* name);
// [code]: Function code.
DECL_ACCESSORS(code, Code)
// Get the code object which represents the execution of this function.
inline Code* GetCode() const;
// Get the abstract code associated with the function, which will either be
// a Code object or a BytecodeArray.
@ -147,11 +147,12 @@ class SharedFunctionInfo : public HeapObject {
// - a FunctionTemplateInfo to make benefit the API [IsApiFunction()].
// - a BytecodeArray for the interpreter [HasBytecodeArray()].
// - a FixedArray with Asm->Wasm conversion [HasAsmWasmData()].
// - a Smi containing the builtin id [HasLazyDeserializationBuiltinId()]
// - a Smi containing the builtin id [HasBuiltinId()]
// - a PreParsedScopeData for the parser [HasPreParsedScopeData()]
// - a Code object otherwise [HasCodeObject()]
DECL_ACCESSORS(function_data, Object)
inline bool IsApiFunction();
inline bool IsApiFunction() const;
inline FunctionTemplateInfo* get_api_func_data();
inline void set_api_func_data(FunctionTemplateInfo* data);
inline bool HasBytecodeArray() const;
@ -163,16 +164,20 @@ class SharedFunctionInfo : public HeapObject {
inline void set_asm_wasm_data(FixedArray* data);
inline void ClearAsmWasmData();
// A brief note to clear up possible confusion:
// lazy_deserialization_builtin_id corresponds to the auto-generated
// builtin_id corresponds to the auto-generated
// Builtins::Name id, while builtin_function_id corresponds to
// BuiltinFunctionId (a manually maintained list of 'interesting' functions
// mainly used during optimization).
inline bool HasLazyDeserializationBuiltinId() const;
inline int lazy_deserialization_builtin_id() const;
inline bool HasBuiltinId() const;
inline int builtin_id() const;
inline void set_builtin_id(int builtin_id);
inline bool HasPreParsedScopeData() const;
inline PreParsedScopeData* preparsed_scope_data() const;
inline void set_preparsed_scope_data(PreParsedScopeData* data);
inline void ClearPreParsedScopeData();
inline bool HasCodeObject() const;
inline Code* code_object() const;
inline void set_code_object();
// [function identifier]: This field holds an additional identifier for the
// function.
@ -435,11 +440,11 @@ class SharedFunctionInfo : public HeapObject {
// Layout description.
#define SHARED_FUNCTION_INFO_FIELDS(V) \
/* Pointer fields. */ \
V(kCodeOffset, kPointerSize) \
V(kStartOfPointerFieldsOffset, 0) \
V(kFunctionDataOffset, kPointerSize) \
V(kNameOrScopeInfoOffset, kPointerSize) \
V(kOuterScopeInfoOffset, kPointerSize) \
V(kConstructStubOffset, kPointerSize) \
V(kFunctionDataOffset, kPointerSize) \
V(kScriptOffset, kPointerSize) \
V(kDebugInfoOffset, kPointerSize) \
V(kFunctionIdentifierOffset, kPointerSize) \
@ -464,7 +469,8 @@ class SharedFunctionInfo : public HeapObject {
static const int kAlignedSize = POINTER_SIZE_ALIGN(kSize);
typedef FixedBodyDescriptor<kCodeOffset, kEndOfPointerFieldsOffset, kSize>
typedef FixedBodyDescriptor<kStartOfPointerFieldsOffset,
kEndOfPointerFieldsOffset, kSize>
BodyDescriptor;
// No weak fields.
typedef BodyDescriptor BodyDescriptorWeak;

View File

@ -1151,15 +1151,13 @@ void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
const char* name = nullptr;
if (shared_name != heap_->empty_string()) {
name = names_->GetName(shared_name);
TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
TagObject(shared->GetCode(), names_->GetFormatted("(code for %s)", name));
} else {
TagObject(shared->code(), names_->GetFormatted("(%s code)",
Code::Kind2String(shared->code()->kind())));
TagObject(shared->GetCode(),
names_->GetFormatted(
"(%s code)", Code::Kind2String(shared->GetCode()->kind())));
}
SetInternalReference(obj, entry,
"code", shared->code(),
SharedFunctionInfo::kCodeOffset);
if (shared->name_or_scope_info()->IsScopeInfo()) {
TagObject(shared->name_or_scope_info(), "(function scope info)");
}

View File

@ -129,8 +129,8 @@ RUNTIME_FUNCTION(Runtime_InstantiateAsmJs) {
return *result.ToHandleChecked();
}
}
// Remove wasm data, mark as broken for asm->wasm,
// replace code with CompileLazy, and return a smi 0 to indicate failure.
// Remove wasm data, mark as broken for asm->wasm, replace function code with
// CompileLazy, and return a smi 0 to indicate failure.
if (function->shared()->HasAsmWasmData()) {
function->shared()->ClearAsmWasmData();
}
@ -138,11 +138,6 @@ RUNTIME_FUNCTION(Runtime_InstantiateAsmJs) {
DCHECK(function->code() ==
isolate->builtins()->builtin(Builtins::kInstantiateAsmJs));
function->set_code(isolate->builtins()->builtin(Builtins::kCompileLazy));
if (function->shared()->code() ==
isolate->builtins()->builtin(Builtins::kInstantiateAsmJs)) {
function->shared()->set_code(
isolate->builtins()->builtin(Builtins::kCompileLazy));
}
return Smi::kZero;
}
@ -290,7 +285,7 @@ RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
}
if (!function->IsOptimized()) {
function->set_code(function->shared()->code());
function->set_code(function->shared()->GetCode());
}
return nullptr;
}

View File

@ -112,12 +112,9 @@ RUNTIME_FUNCTION(Runtime_SetCode) {
return isolate->heap()->exception();
}
// Set the code, scope info, formal parameter count, and the length
// Set the function data, scope info, formal parameter count, and the length
// of the target shared function info.
target_shared->set_code(source_shared->code());
if (source_shared->HasBytecodeArray()) {
target_shared->set_bytecode_array(source_shared->bytecode_array());
}
target_shared->set_function_data(source_shared->function_data());
target_shared->set_length(source_shared->GetLength());
target_shared->set_feedback_metadata(source_shared->feedback_metadata());
target_shared->set_internal_formal_parameter_count(
@ -141,8 +138,7 @@ RUNTIME_FUNCTION(Runtime_SetCode) {
SharedFunctionInfo::SetScript(target_shared, source_script);
// Set the code of the target function.
target->set_code(source_shared->code());
target->set_code(source_shared->GetCode());
Handle<Context> context(source->context());
target->set_context(*context);

View File

@ -460,7 +460,7 @@ RUNTIME_FUNCTION(Runtime_DeserializeLazy) {
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
#ifdef DEBUG
int builtin_id = shared->lazy_deserialization_builtin_id();
int builtin_id = shared->builtin_id();
// At this point, the builtins table should definitely have DeserializeLazy
// set at the position of the target builtin.
CHECK_EQ(Builtins::kDeserializeLazy,
@ -471,10 +471,10 @@ RUNTIME_FUNCTION(Runtime_DeserializeLazy) {
CHECK_EQ(Builtins::TFJ, Builtins::KindOf(builtin_id));
#endif // DEBUG
Snapshot::EnsureBuiltinIsDeserialized(isolate, shared);
Code* code = Snapshot::EnsureBuiltinIsDeserialized(isolate, shared);
function->set_code(shared->code());
return shared->code();
function->set_code(code);
return code;
}
RUNTIME_FUNCTION(Runtime_IncrementUseCounter) {

View File

@ -235,7 +235,7 @@ RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) {
// function has.
if (!function->is_compiled()) {
DCHECK(function->shared()->IsInterpreted());
function->set_code(function->shared()->code());
function->set_code(*BUILTIN_CODE(isolate, InterpreterEntryTrampoline));
}
JSFunction::EnsureFeedbackVector(function);
@ -754,8 +754,8 @@ RUNTIME_FUNCTION(Runtime_IsAsmWasmCode) {
// Doesn't have wasm data.
return isolate->heap()->false_value();
}
if (function->shared()->code() !=
isolate->builtins()->builtin(Builtins::kInstantiateAsmJs)) {
if (function->shared()->HasBuiltinId() &&
function->shared()->builtin_id() == Builtins::kInstantiateAsmJs) {
// Hasn't been compiled yet.
return isolate->heap()->false_value();
}

View File

@ -253,6 +253,7 @@ HeapObject* Deserializer<AllocatorT>::PostProcessNewObject(HeapObject* obj,
interpreter::Interpreter::InterruptBudget());
bytecode_array->set_osr_loop_nesting_level(0);
}
// Check alignment.
DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
HeapObject::RequiredAlignment(obj->map())));

View File

@ -53,10 +53,7 @@ void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip) {
DCHECK(!ObjectIsBytecodeHandler(obj)); // Only referenced in dispatch table.
BuiltinReferenceSerializationMode mode =
startup_serializer_->clear_function_code() ? kCanonicalizeCompileLazy
: kDefault;
if (SerializeBuiltinReference(obj, how_to_code, where_to_point, skip, mode)) {
if (SerializeBuiltinReference(obj, how_to_code, where_to_point, skip)) {
return;
}
if (SerializeHotObject(obj, how_to_code, where_to_point, skip)) return;
@ -104,6 +101,13 @@ void PartialSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
}
}
if (obj->IsJSFunction()) {
// Unconditionally reset the JSFunction to its SFI's code, since we can't
// serialize optimized code anyway.
JSFunction* closure = JSFunction::cast(obj);
closure->set_code(closure->shared()->GetCode());
}
CheckRehashability(obj);
// Object has not yet been serialized. Serialize it here.

View File

@ -186,7 +186,7 @@ bool Serializer<AllocatorT>::SerializeBackReference(HeapObject* obj,
template <class AllocatorT>
bool Serializer<AllocatorT>::SerializeBuiltinReference(
HeapObject* obj, HowToCode how_to_code, WhereToPoint where_to_point,
int skip, BuiltinReferenceSerializationMode mode) {
int skip) {
if (!obj->IsCode()) return false;
Code* code = Code::cast(obj);
@ -198,11 +198,6 @@ bool Serializer<AllocatorT>::SerializeBuiltinReference(
DCHECK_LT(builtin_index, Builtins::builtin_count);
DCHECK_LE(0, builtin_index);
if (mode == kCanonicalizeCompileLazy &&
code->is_interpreter_trampoline_builtin()) {
builtin_index = static_cast<int>(Builtins::kCompileLazy);
}
if (FLAG_trace_serializer) {
PrintF(" Encoding builtin reference: %s\n",
isolate()->builtins()->name(builtin_index));

View File

@ -189,17 +189,10 @@ class Serializer : public SerializerDeserializer {
bool SerializeBackReference(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip);
// Determines whether the interpreter trampoline is replaced by CompileLazy.
enum BuiltinReferenceSerializationMode {
kDefault,
kCanonicalizeCompileLazy,
};
// Returns true if the object was successfully serialized as a builtin
// reference.
bool SerializeBuiltinReference(
HeapObject* obj, HowToCode how_to_code, WhereToPoint where_to_point,
int skip, BuiltinReferenceSerializationMode mode = kDefault);
bool SerializeBuiltinReference(HeapObject* obj, HowToCode how_to_code,
WhereToPoint where_to_point, int skip);
// Returns true if the given heap object is a bytecode handler code object.
bool ObjectIsBytecodeHandler(HeapObject* obj) const;

View File

@ -125,6 +125,10 @@ Code* Snapshot::DeserializeBuiltin(Isolate* isolate, int builtin_id) {
void Snapshot::EnsureAllBuiltinsAreDeserialized(Isolate* isolate) {
if (!FLAG_lazy_deserialization) return;
if (FLAG_trace_lazy_deserialization) {
PrintF("Forcing eager builtin deserialization\n");
}
Builtins* builtins = isolate->builtins();
for (int i = 0; i < Builtins::builtin_count; i++) {
if (!Builtins::IsLazy(i)) continue;
@ -141,11 +145,11 @@ void Snapshot::EnsureAllBuiltinsAreDeserialized(Isolate* isolate) {
}
// static
void Snapshot::EnsureBuiltinIsDeserialized(Isolate* isolate,
Handle<SharedFunctionInfo> shared) {
Code* Snapshot::EnsureBuiltinIsDeserialized(Isolate* isolate,
Handle<SharedFunctionInfo> shared) {
DCHECK(FLAG_lazy_deserialization);
int builtin_id = shared->lazy_deserialization_builtin_id();
int builtin_id = shared->builtin_id();
// We should never lazily deserialize DeserializeLazy.
DCHECK_NE(Builtins::kDeserializeLazy, builtin_id);
@ -160,7 +164,7 @@ void Snapshot::EnsureBuiltinIsDeserialized(Isolate* isolate,
DCHECK_EQ(builtin_id, code->builtin_index());
DCHECK_EQ(code, isolate->builtins()->builtin(builtin_id));
}
shared->set_code(code);
return code;
}
// static

View File

@ -154,8 +154,8 @@ class Snapshot : public AllStatic {
// initialized.
static Code* DeserializeBuiltin(Isolate* isolate, int builtin_id);
static void EnsureAllBuiltinsAreDeserialized(Isolate* isolate);
static void EnsureBuiltinIsDeserialized(Isolate* isolate,
Handle<SharedFunctionInfo> shared);
static Code* EnsureBuiltinIsDeserialized(Isolate* isolate,
Handle<SharedFunctionInfo> shared);
// Deserializes a single given handler code object. Intended to be called at
// runtime after the isolate has been fully initialized.

View File

@ -12,13 +12,8 @@
namespace v8 {
namespace internal {
StartupSerializer::StartupSerializer(
Isolate* isolate,
v8::SnapshotCreator::FunctionCodeHandling function_code_handling)
: Serializer(isolate),
clear_function_code_(function_code_handling ==
v8::SnapshotCreator::FunctionCodeHandling::kClear),
can_be_rehashed_(true) {
StartupSerializer::StartupSerializer(Isolate* isolate)
: Serializer(isolate), can_be_rehashed_(true) {
InitializeCodeAddressMap();
}
@ -33,13 +28,7 @@ void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
DCHECK(!ObjectIsBytecodeHandler(obj)); // Only referenced in dispatch table.
DCHECK(!obj->IsJSFunction());
if (clear_function_code() && obj->IsBytecodeArray()) {
obj = isolate()->heap()->undefined_value();
}
BuiltinReferenceSerializationMode mode =
clear_function_code() ? kCanonicalizeCompileLazy : kDefault;
if (SerializeBuiltinReference(obj, how_to_code, where_to_point, skip, mode)) {
if (SerializeBuiltinReference(obj, how_to_code, where_to_point, skip)) {
return;
}
if (SerializeHotObject(obj, how_to_code, where_to_point, skip)) return;

View File

@ -14,9 +14,7 @@ namespace internal {
class StartupSerializer : public Serializer<> {
public:
StartupSerializer(
Isolate* isolate,
v8::SnapshotCreator::FunctionCodeHandling function_code_handling);
explicit StartupSerializer(Isolate* isolate);
~StartupSerializer() override;
// Serialize the current state of the heap. The order is:
@ -30,7 +28,6 @@ class StartupSerializer : public Serializer<> {
int PartialSnapshotCacheIndex(HeapObject* o);
bool can_be_rehashed() const { return can_be_rehashed_; }
bool clear_function_code() const { return clear_function_code_; }
bool root_has_been_serialized(int root_index) const {
return root_has_been_serialized_.test(root_index);
}
@ -72,7 +69,6 @@ class StartupSerializer : public Serializer<> {
void CheckRehashability(HeapObject* obj);
const bool clear_function_code_;
std::bitset<Heap::kStrongRootListLength> root_has_been_serialized_;
PartialCacheIndexMap partial_cache_index_map_;
std::vector<AccessorInfo*> accessor_infos_;

View File

@ -189,6 +189,9 @@
# BUG(v8:5193): Flaky.
'test-cpu-profiler/TickEvents': [SKIP],
# BUG(v8:7587): Strange breakage on Mac.
'test-log-stack-tracer/PureJSStackTrace': [SKIP],
}], # 'asan == True'
##############################################################################

View File

@ -4976,7 +4976,6 @@ static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
fun->shared()->ClearBytecodeArray(); // Bytecode is code too.
fun->set_code(*BUILTIN_CODE(isolate, CompileLazy));
fun->shared()->set_code(*BUILTIN_CODE(isolate, CompileLazy));
CcTest::CollectAllAvailableGarbage();
}

View File

@ -2279,7 +2279,7 @@ TEST(AllocateFunctionWithMapAndContext) {
CHECK(!fun->has_prototype_slot());
CHECK_EQ(*isolate->promise_capability_default_resolve_shared_fun(),
fun->shared());
CHECK_EQ(isolate->promise_capability_default_resolve_shared_fun()->code(),
CHECK_EQ(isolate->promise_capability_default_resolve_shared_fun()->GetCode(),
fun->code());
}

View File

@ -43,8 +43,9 @@ class FeedbackVectorHelper {
template <typename Spec>
Handle<FeedbackVector> NewFeedbackVector(Isolate* isolate, Spec* spec) {
Handle<FeedbackMetadata> metadata = FeedbackMetadata::New(isolate, spec);
Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
isolate->factory()->empty_string(), MaybeHandle<Code>(), false);
Handle<SharedFunctionInfo> shared =
isolate->factory()->NewSharedFunctionInfoForBuiltin(
isolate->factory()->empty_string(), Builtins::kIllegal, false);
shared->set_feedback_metadata(*metadata);
return FeedbackVector::New(isolate, shared);
}

View File

@ -1520,8 +1520,9 @@ TEST(ReconfigureDataFieldAttribute_DataConstantToDataFieldAfterTargetMap) {
Handle<String> name = factory->empty_string();
Handle<Map> sloppy_map =
Map::CopyInitialMap(isolate->sloppy_function_map());
Handle<SharedFunctionInfo> info = factory->NewSharedFunctionInfo(
name, MaybeHandle<Code>(), sloppy_map->is_constructor());
Handle<SharedFunctionInfo> info =
factory->NewSharedFunctionInfoForBuiltin(
name, Builtins::kIllegal, sloppy_map->is_constructor());
function_type_ = FieldType::Class(sloppy_map, isolate);
CHECK(sloppy_map->is_stable());
@ -2659,8 +2660,8 @@ TEST(TransitionDataConstantToAnotherDataConstant) {
Factory* factory = isolate->factory();
Handle<String> name = factory->empty_string();
Handle<Map> sloppy_map = Map::CopyInitialMap(isolate->sloppy_function_map());
Handle<SharedFunctionInfo> info = factory->NewSharedFunctionInfo(
name, MaybeHandle<Code>(), sloppy_map->is_constructor());
Handle<SharedFunctionInfo> info = factory->NewSharedFunctionInfoForBuiltin(
name, Builtins::kIllegal, sloppy_map->is_constructor());
Handle<FieldType> function_type = FieldType::Class(sloppy_map, isolate);
CHECK(sloppy_map->is_stable());

View File

@ -365,29 +365,23 @@ TEST(HeapSnapshotCodeObjects) {
v8::String::Utf8Value anonymous_name(env->GetIsolate(), anonymous->GetName());
CHECK_EQ(0, strcmp("", *anonymous_name));
// Find references to code.
const v8::HeapGraphNode* compiled_code = GetProperty(
// Find references to shared function info.
const v8::HeapGraphNode* compiled_sfi = GetProperty(
env->GetIsolate(), compiled, v8::HeapGraphEdge::kInternal, "shared");
CHECK(compiled_code);
const v8::HeapGraphNode* lazy_code = GetProperty(
CHECK(compiled_sfi);
const v8::HeapGraphNode* lazy_sfi = GetProperty(
env->GetIsolate(), lazy, v8::HeapGraphEdge::kInternal, "shared");
CHECK(lazy_code);
CHECK(lazy_sfi);
// Check that there's no strong next_code_link. There might be a weak one
// but might be not, so we can't check that fact.
const v8::HeapGraphNode* code = GetProperty(
env->GetIsolate(), compiled_code, v8::HeapGraphEdge::kInternal, "code");
CHECK(code);
const v8::HeapGraphNode* next_code_link = GetProperty(
env->GetIsolate(), code, v8::HeapGraphEdge::kInternal, "code");
CHECK(!next_code_link);
// TODO(leszeks): Check that there's bytecode on the compiled function, but
// not the lazy function.
// Verify that non-compiled code doesn't contain references to "x"
// literal, while compiled code does. The scope info is stored in FixedArray
// objects attached to the SharedFunctionInfo.
// Verify that non-compiled function doesn't contain references to "x"
// literal, while compiled function does. The scope info is stored in
// FixedArray objects attached to the SharedFunctionInfo.
bool compiled_references_x = false, lazy_references_x = false;
for (int i = 0, count = compiled_code->GetChildrenCount(); i < count; ++i) {
const v8::HeapGraphEdge* prop = compiled_code->GetChild(i);
for (int i = 0, count = compiled_sfi->GetChildrenCount(); i < count; ++i) {
const v8::HeapGraphEdge* prop = compiled_sfi->GetChild(i);
const v8::HeapGraphNode* node = prop->GetToNode();
if (node->GetType() == v8::HeapGraphNode::kArray) {
if (HasString(env->GetIsolate(), node, "x")) {
@ -396,8 +390,8 @@ TEST(HeapSnapshotCodeObjects) {
}
}
}
for (int i = 0, count = lazy_code->GetChildrenCount(); i < count; ++i) {
const v8::HeapGraphEdge* prop = lazy_code->GetChild(i);
for (int i = 0, count = lazy_sfi->GetChildrenCount(); i < count; ++i) {
const v8::HeapGraphEdge* prop = lazy_sfi->GetChild(i);
const v8::HeapGraphNode* node = prop->GetToNode();
if (node->GetType() == v8::HeapGraphNode::kArray) {
if (HasString(env->GetIsolate(), node, "x")) {

View File

@ -135,8 +135,7 @@ static StartupBlobs Serialize(v8::Isolate* isolate) {
Isolate* internal_isolate = reinterpret_cast<Isolate*>(isolate);
internal_isolate->heap()->CollectAllAvailableGarbage(
i::GarbageCollectionReason::kTesting);
StartupSerializer ser(internal_isolate,
v8::SnapshotCreator::FunctionCodeHandling::kClear);
StartupSerializer ser(internal_isolate);
ser.SerializeStrongReferences();
i::BuiltinSerializer builtin_serializer(internal_isolate, &ser);
@ -376,8 +375,7 @@ static void PartiallySerializeContext(Vector<const byte>* startup_blob_out,
env.Reset();
SnapshotByteSink startup_sink;
StartupSerializer startup_serializer(
isolate, v8::SnapshotCreator::FunctionCodeHandling::kClear);
StartupSerializer startup_serializer(isolate);
startup_serializer.SerializeStrongReferences();
SnapshotByteSink partial_sink;
@ -501,8 +499,7 @@ static void PartiallySerializeCustomContext(
env.Reset();
SnapshotByteSink startup_sink;
StartupSerializer startup_serializer(
isolate, v8::SnapshotCreator::FunctionCodeHandling::kClear);
StartupSerializer startup_serializer(isolate);
startup_serializer.SerializeStrongReferences();
SnapshotByteSink partial_sink;

View File

@ -105,8 +105,8 @@ class JSCallReducerTest : public TypedGraphTest {
spec.AddCallICSlot();
Handle<FeedbackMetadata> metadata = FeedbackMetadata::New(isolate(), &spec);
Handle<SharedFunctionInfo> shared =
isolate()->factory()->NewSharedFunctionInfo(
isolate()->factory()->empty_string(), MaybeHandle<Code>(), false);
isolate()->factory()->NewSharedFunctionInfoForBuiltin(
isolate()->factory()->empty_string(), Builtins::kIllegal, false);
shared->set_feedback_metadata(*metadata);
Handle<FeedbackVector> vector = FeedbackVector::New(isolate(), shared);
VectorSlotPair feedback(vector, FeedbackSlot(0));

View File

@ -34,9 +34,10 @@ Handle<SharedFunctionInfo> CreateSharedFunctionInfo(
Handle<Script> script = isolate->factory()->NewScript(source);
Handle<WeakFixedArray> infos = isolate->factory()->NewWeakFixedArray(3);
script->set_shared_function_infos(*infos);
Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
isolate->factory()->NewStringFromAsciiChecked("f"),
BUILTIN_CODE(isolate, CompileLazy), false);
Handle<SharedFunctionInfo> shared =
isolate->factory()->NewSharedFunctionInfoForBuiltin(
isolate->factory()->NewStringFromAsciiChecked("f"),
Builtins::kCompileLazy, false);
shared->set_raw_end_position(source->length());
shared->set_outer_scope_info(ScopeInfo::Empty(isolate));
shared->set_function_literal_id(1);

View File

@ -265,7 +265,6 @@ extras_accessors = [
'ExternalString, resource, Object, kResourceOffset',
'SeqOneByteString, chars, char, kHeaderSize',
'SeqTwoByteString, chars, char, kHeaderSize',
'SharedFunctionInfo, code, Code, kCodeOffset',
'SharedFunctionInfo, function_token_position, int, kFunctionTokenPositionOffset',
'SharedFunctionInfo, start_position_and_type, int, kStartPositionAndTypeOffset',
'SharedFunctionInfo, end_position, int, kEndPositionOffset',