[runtime] Rename Builtins::Name to Builtin 1/2

- Add new Builtin enum
- Move Builtins::Name:kXXX to Builtin::kXXX
- Update existing code

Follow CLs will unify the mix of using int builtin-ids and
Builtins::Name to only use the new Builtin enum and changing it to
an enum class.

Change-Id: Ib39aa45a25696acdf147f46392901b1e051deaa4
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2905592
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Reviewed-by: Michael Stanton <mvstanton@chromium.org>
Reviewed-by: Mythri Alle <mythria@chromium.org>
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Andreas Haas <ahaas@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#74995}
This commit is contained in:
Camillo Bruni 2021-06-07 17:24:12 +02:00 committed by V8 LUCI CQ
parent 1f5113816c
commit 8ab75a56a2
194 changed files with 2881 additions and 2968 deletions

View File

@ -6116,7 +6116,7 @@ Local<Context> NewContext(
// TODO(jkummerow): This is for crbug.com/713699. Remove it if it doesn't
// fail.
// Sanity-check that the isolate is initialized and usable.
CHECK(isolate->builtins()->builtin(i::Builtins::kIllegal).IsCode());
CHECK(isolate->builtins()->builtin(i::Builtin::kIllegal).IsCode());
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.NewContext");
LOG_API(isolate, Context, New);
@ -8964,10 +8964,10 @@ JSEntryStubs Isolate::GetJSEntryStubs() {
JSEntryStubs entry_stubs;
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
std::array<std::pair<i::Builtins::Name, JSEntryStub*>, 3> stubs = {
{{i::Builtins::kJSEntry, &entry_stubs.js_entry_stub},
{i::Builtins::kJSConstructEntry, &entry_stubs.js_construct_entry_stub},
{i::Builtins::kJSRunMicrotasksEntry,
std::array<std::pair<i::Builtin, JSEntryStub*>, 3> stubs = {
{{i::Builtin::kJSEntry, &entry_stubs.js_entry_stub},
{i::Builtin::kJSConstructEntry, &entry_stubs.js_construct_entry_stub},
{i::Builtin::kJSRunMicrotasksEntry,
&entry_stubs.js_run_microtasks_entry_stub}}};
for (auto& pair : stubs) {
i::Code js_entry = isolate->heap()->builtin(pair.first);

View File

@ -74,11 +74,11 @@ bool AreStdlibMembersValid(Isolate* isolate, Handle<JSReceiver> stdlib,
if (!value->IsJSFunction()) return false; \
SharedFunctionInfo shared = Handle<JSFunction>::cast(value)->shared(); \
if (!shared.HasBuiltinId() || \
shared.builtin_id() != Builtins::kMath##FName) { \
shared.builtin_id() != Builtin::kMath##FName) { \
return false; \
} \
DCHECK_EQ(shared.GetCode(), \
isolate->builtins()->builtin(Builtins::kMath##FName)); \
isolate->builtins()->builtin(Builtin::kMath##FName)); \
}
STDLIB_MATH_FUNCTION_LIST(STDLIB_MATH_FUNC)
#undef STDLIB_MATH_FUNC

View File

@ -117,21 +117,21 @@ void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
__ JumpIfNotSmi(value, target);
}
void BaselineAssembler::CallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::CallBuiltin(Builtin builtin) {
// __ CallBuiltin(static_cast<int>(builtin));
__ RecordCommentForOffHeapTrampoline(builtin);
ScratchRegisterScope temps(this);
Register temp = temps.AcquireScratch();
__ LoadEntryFromBuiltinIndex(builtin, temp);
__ LoadEntryFromBuiltin(builtin, temp);
__ Call(temp);
__ RecordComment("]");
}
void BaselineAssembler::TailCallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
__ RecordCommentForOffHeapTrampoline(builtin);
ScratchRegisterScope temps(this);
Register temp = temps.AcquireScratch();
__ LoadEntryFromBuiltinIndex(builtin, temp);
__ LoadEntryFromBuiltin(builtin, temp);
__ Jump(temp);
__ RecordComment("]");
}

View File

@ -19,7 +19,7 @@ void BaselineCompiler::Prologue() {
__ masm()->EnterFrame(StackFrame::BASELINE);
DCHECK_EQ(kJSFunctionRegister, kJavaScriptCallTargetRegister);
int max_frame_size = bytecode_->frame_size() + max_call_args_;
CallBuiltin<Builtins::kBaselineOutOfLinePrologue>(
CallBuiltin<Builtin::kBaselineOutOfLinePrologue>(
kContextRegister, kJSFunctionRegister, kJavaScriptCallArgCountRegister,
max_frame_size, kJavaScriptCallNewTargetRegister, bytecode_);

View File

@ -115,19 +115,19 @@ void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
__ JumpIfNotSmi(value, target);
}
void BaselineAssembler::CallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::CallBuiltin(Builtin builtin) {
if (masm()->options().short_builtin_calls) {
// Generate pc-relative call.
__ CallBuiltin(builtin);
} else {
ScratchRegisterScope temps(this);
Register temp = temps.AcquireScratch();
__ LoadEntryFromBuiltinIndex(builtin, temp);
__ LoadEntryFromBuiltin(builtin, temp);
__ Call(temp);
}
}
void BaselineAssembler::TailCallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
if (masm()->options().short_builtin_calls) {
// Generate pc-relative call.
__ TailCallBuiltin(builtin);
@ -147,7 +147,7 @@ void BaselineAssembler::TailCallBuiltin(Builtins::Name builtin) {
UseScratchRegisterScope temps(masm());
temps.Exclude(temp);
__ LoadEntryFromBuiltinIndex(builtin, temp);
__ LoadEntryFromBuiltin(builtin, temp);
__ Jump(temp);
}
}

View File

@ -18,7 +18,7 @@ void BaselineCompiler::Prologue() {
__ masm()->EnterFrame(StackFrame::BASELINE);
DCHECK_EQ(kJSFunctionRegister, kJavaScriptCallTargetRegister);
int max_frame_size = bytecode_->frame_size() + max_call_args_;
CallBuiltin<Builtins::kBaselineOutOfLinePrologue>(
CallBuiltin<Builtin::kBaselineOutOfLinePrologue>(
kContextRegister, kJSFunctionRegister, kJavaScriptCallArgCountRegister,
max_frame_size, kJavaScriptCallNewTargetRegister, bytecode_);

View File

@ -126,8 +126,8 @@ class BaselineAssembler {
template <typename... T>
inline void Pop(T... registers);
inline void CallBuiltin(Builtins::Name builtin);
inline void TailCallBuiltin(Builtins::Name builtin);
inline void CallBuiltin(Builtin builtin);
inline void TailCallBuiltin(Builtin builtin);
inline void CallRuntime(Runtime::FunctionId function, int nargs);
inline void LoadTaggedPointerField(Register output, Register source,

View File

@ -217,7 +217,7 @@ struct ArgumentSettingHelper<Descriptor, ArgIndex, false, Arg, Args...> {
}
};
template <Builtins::Name kBuiltin, typename... Args>
template <Builtin kBuiltin, typename... Args>
void MoveArgumentsForBuiltin(BaselineAssembler* masm, Args... args) {
using Descriptor = typename CallInterfaceDescriptorFor<kBuiltin>::type;
CheckArgs<Descriptor>(masm, args...);
@ -585,7 +585,7 @@ Label* BaselineCompiler::BuildForwardJumpLabel() {
return &threaded_label->label;
}
template <Builtins::Name kBuiltin, typename... Args>
template <Builtin kBuiltin, typename... Args>
void BaselineCompiler::CallBuiltin(Args... args) {
__ RecordComment("[ CallBuiltin");
detail::MoveArgumentsForBuiltin<kBuiltin>(&basm_, args...);
@ -593,7 +593,7 @@ void BaselineCompiler::CallBuiltin(Args... args) {
__ RecordComment("]");
}
template <Builtins::Name kBuiltin, typename... Args>
template <Builtin kBuiltin, typename... Args>
void BaselineCompiler::TailCallBuiltin(Args... args) {
detail::MoveArgumentsForBuiltin<kBuiltin>(&basm_, args...);
__ TailCallBuiltin(kBuiltin);
@ -609,7 +609,7 @@ void BaselineCompiler::CallRuntime(Runtime::FunctionId function, Args... args) {
// Returns into kInterpreterAccumulatorRegister
void BaselineCompiler::JumpIfToBoolean(bool do_jump_if_true, Label* label,
Label::Distance distance) {
CallBuiltin<Builtins::kToBooleanForBaselineJump>(
CallBuiltin<Builtin::kToBooleanForBaselineJump>(
kInterpreterAccumulatorRegister);
// ToBooleanForBaselineJump returns the ToBoolean value into return reg 1, and
// the original value into kInterpreterAccumulatorRegister, so we don't have
@ -654,18 +654,18 @@ void BaselineCompiler::VisitLdaConstant() {
}
void BaselineCompiler::VisitLdaGlobal() {
CallBuiltin<Builtins::kLoadGlobalICBaseline>(Constant<Name>(0), // name
IndexAsTagged(1)); // slot
CallBuiltin<Builtin::kLoadGlobalICBaseline>(Constant<Name>(0), // name
IndexAsTagged(1)); // slot
}
void BaselineCompiler::VisitLdaGlobalInsideTypeof() {
CallBuiltin<Builtins::kLoadGlobalICInsideTypeofBaseline>(
CallBuiltin<Builtin::kLoadGlobalICInsideTypeofBaseline>(
Constant<Name>(0), // name
IndexAsTagged(1)); // slot
}
void BaselineCompiler::VisitStaGlobal() {
CallBuiltin<Builtins::kStoreGlobalICBaseline>(
CallBuiltin<Builtin::kStoreGlobalICBaseline>(
Constant<Name>(0), // name
kInterpreterAccumulatorRegister, // value
IndexAsTagged(1)); // slot
@ -742,12 +742,12 @@ void BaselineCompiler::VisitLdaLookupSlot() {
}
void BaselineCompiler::VisitLdaLookupContextSlot() {
CallBuiltin<Builtins::kLookupContextBaseline>(
CallBuiltin<Builtin::kLookupContextBaseline>(
Constant<Name>(0), UintAsTagged(2), IndexAsTagged(1));
}
void BaselineCompiler::VisitLdaLookupGlobalSlot() {
CallBuiltin<Builtins::kLookupGlobalICBaseline>(
CallBuiltin<Builtin::kLookupGlobalICBaseline>(
Constant<Name>(0), UintAsTagged(2), IndexAsTagged(1));
}
@ -756,12 +756,12 @@ void BaselineCompiler::VisitLdaLookupSlotInsideTypeof() {
}
void BaselineCompiler::VisitLdaLookupContextSlotInsideTypeof() {
CallBuiltin<Builtins::kLookupContextInsideTypeofBaseline>(
CallBuiltin<Builtin::kLookupContextInsideTypeofBaseline>(
Constant<Name>(0), UintAsTagged(2), IndexAsTagged(1));
}
void BaselineCompiler::VisitLdaLookupGlobalSlotInsideTypeof() {
CallBuiltin<Builtins::kLookupGlobalICInsideTypeofBaseline>(
CallBuiltin<Builtin::kLookupGlobalICInsideTypeofBaseline>(
Constant<Name>(0), UintAsTagged(2), IndexAsTagged(1));
}
@ -805,9 +805,9 @@ void BaselineCompiler::VisitMov() {
}
void BaselineCompiler::VisitLdaNamedProperty() {
CallBuiltin<Builtins::kLoadICBaseline>(RegisterOperand(0), // object
Constant<Name>(1), // name
IndexAsTagged(2)); // slot
CallBuiltin<Builtin::kLoadICBaseline>(RegisterOperand(0), // object
Constant<Name>(1), // name
IndexAsTagged(2)); // slot
}
void BaselineCompiler::VisitLdaNamedPropertyFromSuper() {
@ -815,7 +815,7 @@ void BaselineCompiler::VisitLdaNamedPropertyFromSuper() {
LoadWithReceiverAndVectorDescriptor::LookupStartObjectRegister(),
kInterpreterAccumulatorRegister);
CallBuiltin<Builtins::kLoadSuperICBaseline>(
CallBuiltin<Builtin::kLoadSuperICBaseline>(
RegisterOperand(0), // object
LoadWithReceiverAndVectorDescriptor::
LookupStartObjectRegister(), // lookup start
@ -824,7 +824,7 @@ void BaselineCompiler::VisitLdaNamedPropertyFromSuper() {
}
void BaselineCompiler::VisitLdaKeyedProperty() {
CallBuiltin<Builtins::kKeyedLoadICBaseline>(
CallBuiltin<Builtin::kKeyedLoadICBaseline>(
RegisterOperand(0), // object
kInterpreterAccumulatorRegister, // key
IndexAsTagged(1)); // slot
@ -885,7 +885,7 @@ void BaselineCompiler::VisitStaModuleVariable() {
}
void BaselineCompiler::VisitStaNamedProperty() {
CallBuiltin<Builtins::kStoreICBaseline>(
CallBuiltin<Builtin::kStoreICBaseline>(
RegisterOperand(0), // object
Constant<Name>(1), // name
kInterpreterAccumulatorRegister, // value
@ -900,7 +900,7 @@ void BaselineCompiler::VisitStaNamedOwnProperty() {
}
void BaselineCompiler::VisitStaKeyedProperty() {
CallBuiltin<Builtins::kKeyedStoreICBaseline>(
CallBuiltin<Builtin::kKeyedStoreICBaseline>(
RegisterOperand(0), // object
RegisterOperand(1), // key
kInterpreterAccumulatorRegister, // value
@ -908,7 +908,7 @@ void BaselineCompiler::VisitStaKeyedProperty() {
}
void BaselineCompiler::VisitStaInArrayLiteral() {
CallBuiltin<Builtins::kStoreInArrayLiteralICBaseline>(
CallBuiltin<Builtin::kStoreInArrayLiteralICBaseline>(
RegisterOperand(0), // object
RegisterOperand(1), // name
kInterpreterAccumulatorRegister, // value
@ -934,143 +934,143 @@ void BaselineCompiler::VisitCollectTypeProfile() {
}
void BaselineCompiler::VisitAdd() {
CallBuiltin<Builtins::kAdd_Baseline>(
CallBuiltin<Builtin::kAdd_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitSub() {
CallBuiltin<Builtins::kSubtract_Baseline>(
CallBuiltin<Builtin::kSubtract_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitMul() {
CallBuiltin<Builtins::kMultiply_Baseline>(
CallBuiltin<Builtin::kMultiply_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitDiv() {
CallBuiltin<Builtins::kDivide_Baseline>(
CallBuiltin<Builtin::kDivide_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitMod() {
CallBuiltin<Builtins::kModulus_Baseline>(
CallBuiltin<Builtin::kModulus_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitExp() {
CallBuiltin<Builtins::kExponentiate_Baseline>(
CallBuiltin<Builtin::kExponentiate_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitBitwiseOr() {
CallBuiltin<Builtins::kBitwiseOr_Baseline>(
CallBuiltin<Builtin::kBitwiseOr_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitBitwiseXor() {
CallBuiltin<Builtins::kBitwiseXor_Baseline>(
CallBuiltin<Builtin::kBitwiseXor_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitBitwiseAnd() {
CallBuiltin<Builtins::kBitwiseAnd_Baseline>(
CallBuiltin<Builtin::kBitwiseAnd_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitShiftLeft() {
CallBuiltin<Builtins::kShiftLeft_Baseline>(
CallBuiltin<Builtin::kShiftLeft_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitShiftRight() {
CallBuiltin<Builtins::kShiftRight_Baseline>(
CallBuiltin<Builtin::kShiftRight_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitShiftRightLogical() {
CallBuiltin<Builtins::kShiftRightLogical_Baseline>(
CallBuiltin<Builtin::kShiftRightLogical_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitAddSmi() {
CallBuiltin<Builtins::kAdd_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
CallBuiltin<Builtin::kAdd_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitSubSmi() {
CallBuiltin<Builtins::kSubtract_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitMulSmi() {
CallBuiltin<Builtins::kMultiply_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitDivSmi() {
CallBuiltin<Builtins::kDivide_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitModSmi() {
CallBuiltin<Builtins::kModulus_Baseline>(kInterpreterAccumulatorRegister,
CallBuiltin<Builtin::kSubtract_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitMulSmi() {
CallBuiltin<Builtin::kMultiply_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitDivSmi() {
CallBuiltin<Builtin::kDivide_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitModSmi() {
CallBuiltin<Builtin::kModulus_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitExpSmi() {
CallBuiltin<Builtins::kExponentiate_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
CallBuiltin<Builtin::kExponentiate_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitBitwiseOrSmi() {
CallBuiltin<Builtins::kBitwiseOr_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
CallBuiltin<Builtin::kBitwiseOr_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitBitwiseXorSmi() {
CallBuiltin<Builtins::kBitwiseXor_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitBitwiseAndSmi() {
CallBuiltin<Builtins::kBitwiseAnd_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitShiftLeftSmi() {
CallBuiltin<Builtins::kShiftLeft_Baseline>(kInterpreterAccumulatorRegister,
CallBuiltin<Builtin::kBitwiseXor_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitBitwiseAndSmi() {
CallBuiltin<Builtin::kBitwiseAnd_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitShiftLeftSmi() {
CallBuiltin<Builtin::kShiftLeft_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitShiftRightSmi() {
CallBuiltin<Builtins::kShiftRight_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
CallBuiltin<Builtin::kShiftRight_Baseline>(kInterpreterAccumulatorRegister,
IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitShiftRightLogicalSmi() {
CallBuiltin<Builtins::kShiftRightLogical_Baseline>(
CallBuiltin<Builtin::kShiftRightLogical_Baseline>(
kInterpreterAccumulatorRegister, IntAsSmi(0), Index(1));
}
void BaselineCompiler::VisitInc() {
CallBuiltin<Builtins::kIncrement_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
CallBuiltin<Builtin::kIncrement_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
}
void BaselineCompiler::VisitDec() {
CallBuiltin<Builtins::kDecrement_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
CallBuiltin<Builtin::kDecrement_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
}
void BaselineCompiler::VisitNegate() {
CallBuiltin<Builtins::kNegate_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
CallBuiltin<Builtin::kNegate_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
}
void BaselineCompiler::VisitBitwiseNot() {
CallBuiltin<Builtins::kBitwiseNot_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
CallBuiltin<Builtin::kBitwiseNot_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
}
void BaselineCompiler::VisitToBooleanLogicalNot() {
@ -1090,23 +1090,23 @@ void BaselineCompiler::VisitLogicalNot() {
}
void BaselineCompiler::VisitTypeOf() {
CallBuiltin<Builtins::kTypeof>(kInterpreterAccumulatorRegister);
CallBuiltin<Builtin::kTypeof>(kInterpreterAccumulatorRegister);
}
void BaselineCompiler::VisitDeletePropertyStrict() {
BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
Register scratch = scratch_scope.AcquireScratch();
__ Move(scratch, kInterpreterAccumulatorRegister);
CallBuiltin<Builtins::kDeleteProperty>(RegisterOperand(0), scratch,
Smi::FromEnum(LanguageMode::kStrict));
CallBuiltin<Builtin::kDeleteProperty>(RegisterOperand(0), scratch,
Smi::FromEnum(LanguageMode::kStrict));
}
void BaselineCompiler::VisitDeletePropertySloppy() {
BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
Register scratch = scratch_scope.AcquireScratch();
__ Move(scratch, kInterpreterAccumulatorRegister);
CallBuiltin<Builtins::kDeleteProperty>(RegisterOperand(0), scratch,
Smi::FromEnum(LanguageMode::kSloppy));
CallBuiltin<Builtin::kDeleteProperty>(RegisterOperand(0), scratch,
Smi::FromEnum(LanguageMode::kSloppy));
}
void BaselineCompiler::VisitGetSuperConstructor() {
@ -1117,31 +1117,30 @@ void BaselineCompiler::VisitGetSuperConstructor() {
}
namespace {
constexpr Builtins::Name ConvertReceiverModeToCompactBuiltin(
constexpr Builtin ConvertReceiverModeToCompactBuiltin(
ConvertReceiverMode mode) {
switch (mode) {
case ConvertReceiverMode::kAny:
return Builtins::kCall_ReceiverIsAny_Baseline_Compact;
return Builtin::kCall_ReceiverIsAny_Baseline_Compact;
break;
case ConvertReceiverMode::kNullOrUndefined:
return Builtins::kCall_ReceiverIsNullOrUndefined_Baseline_Compact;
return Builtin::kCall_ReceiverIsNullOrUndefined_Baseline_Compact;
break;
case ConvertReceiverMode::kNotNullOrUndefined:
return Builtins::kCall_ReceiverIsNotNullOrUndefined_Baseline_Compact;
return Builtin::kCall_ReceiverIsNotNullOrUndefined_Baseline_Compact;
break;
}
}
constexpr Builtins::Name ConvertReceiverModeToBuiltin(
ConvertReceiverMode mode) {
constexpr Builtin ConvertReceiverModeToBuiltin(ConvertReceiverMode mode) {
switch (mode) {
case ConvertReceiverMode::kAny:
return Builtins::kCall_ReceiverIsAny_Baseline;
return Builtin::kCall_ReceiverIsAny_Baseline;
break;
case ConvertReceiverMode::kNullOrUndefined:
return Builtins::kCall_ReceiverIsNullOrUndefined_Baseline;
return Builtin::kCall_ReceiverIsNullOrUndefined_Baseline;
break;
case ConvertReceiverMode::kNotNullOrUndefined:
return Builtins::kCall_ReceiverIsNotNullOrUndefined_Baseline;
return Builtin::kCall_ReceiverIsNotNullOrUndefined_Baseline;
break;
}
}
@ -1226,7 +1225,7 @@ void BaselineCompiler::VisitCallWithSpread() {
uint32_t arg_count = args.register_count() - 1; // Remove receiver.
CallBuiltin<Builtins::kCallWithSpread_Baseline>(
CallBuiltin<Builtin::kCallWithSpread_Baseline>(
RegisterOperand(0), // kFunction
arg_count, // kActualArgumentsCount
spread_register, // kSpread
@ -1254,7 +1253,7 @@ void BaselineCompiler::VisitCallJSRuntime() {
__ LoadContext(kContextRegister);
__ LoadNativeContextSlot(kJavaScriptCallTargetRegister,
iterator().GetNativeContextIndexOperand(0));
CallBuiltin<Builtins::kCall_ReceiverIsNullOrUndefined>(
CallBuiltin<Builtin::kCall_ReceiverIsNullOrUndefined>(
kJavaScriptCallTargetRegister, // kFunction
arg_count, // kActualArgumentsCount
RootIndex::kUndefinedValue, // kReceiver
@ -1329,25 +1328,25 @@ void BaselineCompiler::VisitIntrinsicIsSmi(interpreter::RegisterList args) {
void BaselineCompiler::VisitIntrinsicCopyDataProperties(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kCopyDataProperties>(args);
CallBuiltin<Builtin::kCopyDataProperties>(args);
}
void BaselineCompiler::VisitIntrinsicCreateIterResultObject(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kCreateIterResultObject>(args);
CallBuiltin<Builtin::kCreateIterResultObject>(args);
}
void BaselineCompiler::VisitIntrinsicHasProperty(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kHasProperty>(args);
CallBuiltin<Builtin::kHasProperty>(args);
}
void BaselineCompiler::VisitIntrinsicToLength(interpreter::RegisterList args) {
CallBuiltin<Builtins::kToLength>(args);
CallBuiltin<Builtin::kToLength>(args);
}
void BaselineCompiler::VisitIntrinsicToObject(interpreter::RegisterList args) {
CallBuiltin<Builtins::kToObject>(args);
CallBuiltin<Builtin::kToObject>(args);
}
void BaselineCompiler::VisitIntrinsicCall(interpreter::RegisterList args) {
@ -1359,7 +1358,7 @@ void BaselineCompiler::VisitIntrinsicCall(interpreter::RegisterList args) {
args = args.PopLeft();
uint32_t arg_count = args.register_count();
CallBuiltin<Builtins::kCall_ReceiverIsAny>(
CallBuiltin<Builtin::kCall_ReceiverIsAny>(
kJavaScriptCallTargetRegister, // kFunction
arg_count - 1, // kActualArgumentsCount
args);
@ -1367,12 +1366,12 @@ void BaselineCompiler::VisitIntrinsicCall(interpreter::RegisterList args) {
void BaselineCompiler::VisitIntrinsicCreateAsyncFromSyncIterator(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kCreateAsyncFromSyncIteratorBaseline>(args[0]);
CallBuiltin<Builtin::kCreateAsyncFromSyncIteratorBaseline>(args[0]);
}
void BaselineCompiler::VisitIntrinsicCreateJSGeneratorObject(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kCreateGeneratorObject>(args);
CallBuiltin<Builtin::kCreateGeneratorObject>(args);
}
void BaselineCompiler::VisitIntrinsicGeneratorGetResumeMode(
@ -1394,63 +1393,63 @@ void BaselineCompiler::VisitIntrinsicGeneratorClose(
void BaselineCompiler::VisitIntrinsicGetImportMetaObject(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kGetImportMetaObjectBaseline>();
CallBuiltin<Builtin::kGetImportMetaObjectBaseline>();
}
void BaselineCompiler::VisitIntrinsicAsyncFunctionAwaitCaught(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncFunctionAwaitCaught>(args);
CallBuiltin<Builtin::kAsyncFunctionAwaitCaught>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncFunctionAwaitUncaught(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncFunctionAwaitUncaught>(args);
CallBuiltin<Builtin::kAsyncFunctionAwaitUncaught>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncFunctionEnter(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncFunctionEnter>(args);
CallBuiltin<Builtin::kAsyncFunctionEnter>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncFunctionReject(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncFunctionReject>(args);
CallBuiltin<Builtin::kAsyncFunctionReject>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncFunctionResolve(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncFunctionResolve>(args);
CallBuiltin<Builtin::kAsyncFunctionResolve>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncGeneratorAwaitCaught(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncGeneratorAwaitCaught>(args);
CallBuiltin<Builtin::kAsyncGeneratorAwaitCaught>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncGeneratorAwaitUncaught(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncGeneratorAwaitUncaught>(args);
CallBuiltin<Builtin::kAsyncGeneratorAwaitUncaught>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncGeneratorReject(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncGeneratorReject>(args);
CallBuiltin<Builtin::kAsyncGeneratorReject>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncGeneratorResolve(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncGeneratorResolve>(args);
CallBuiltin<Builtin::kAsyncGeneratorResolve>(args);
}
void BaselineCompiler::VisitIntrinsicAsyncGeneratorYield(
interpreter::RegisterList args) {
CallBuiltin<Builtins::kAsyncGeneratorYield>(args);
CallBuiltin<Builtin::kAsyncGeneratorYield>(args);
}
void BaselineCompiler::VisitConstruct() {
interpreter::RegisterList args = iterator().GetRegisterListOperand(1);
uint32_t arg_count = args.register_count();
CallBuiltin<Builtins::kConstruct_Baseline>(
CallBuiltin<Builtin::kConstruct_Baseline>(
RegisterOperand(0), // kFunction
kInterpreterAccumulatorRegister, // kNewTarget
arg_count, // kActualArgumentsCount
@ -1469,12 +1468,12 @@ void BaselineCompiler::VisitConstructWithSpread() {
uint32_t arg_count = args.register_count();
using Descriptor =
CallInterfaceDescriptorFor<Builtins::kConstructWithSpread_Baseline>::type;
CallInterfaceDescriptorFor<Builtin::kConstructWithSpread_Baseline>::type;
Register new_target =
Descriptor::GetRegisterParameter(Descriptor::kNewTarget);
__ Move(new_target, kInterpreterAccumulatorRegister);
CallBuiltin<Builtins::kConstructWithSpread_Baseline>(
CallBuiltin<Builtin::kConstructWithSpread_Baseline>(
RegisterOperand(0), // kFunction
new_target, // kNewTarget
arg_count, // kActualArgumentsCount
@ -1485,32 +1484,32 @@ void BaselineCompiler::VisitConstructWithSpread() {
}
void BaselineCompiler::VisitTestEqual() {
CallBuiltin<Builtins::kEqual_Baseline>(
CallBuiltin<Builtin::kEqual_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitTestEqualStrict() {
CallBuiltin<Builtins::kStrictEqual_Baseline>(
CallBuiltin<Builtin::kStrictEqual_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitTestLessThan() {
CallBuiltin<Builtins::kLessThan_Baseline>(
CallBuiltin<Builtin::kLessThan_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitTestGreaterThan() {
CallBuiltin<Builtins::kGreaterThan_Baseline>(
CallBuiltin<Builtin::kGreaterThan_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitTestLessThanOrEqual() {
CallBuiltin<Builtins::kLessThanOrEqual_Baseline>(
CallBuiltin<Builtin::kLessThanOrEqual_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
void BaselineCompiler::VisitTestGreaterThanOrEqual() {
CallBuiltin<Builtins::kGreaterThanOrEqual_Baseline>(
CallBuiltin<Builtin::kGreaterThanOrEqual_Baseline>(
RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1));
}
@ -1526,17 +1525,17 @@ void BaselineCompiler::VisitTestReferenceEqual() {
void BaselineCompiler::VisitTestInstanceOf() {
using Descriptor =
CallInterfaceDescriptorFor<Builtins::kInstanceOf_Baseline>::type;
CallInterfaceDescriptorFor<Builtin::kInstanceOf_Baseline>::type;
Register callable = Descriptor::GetRegisterParameter(Descriptor::kRight);
__ Move(callable, kInterpreterAccumulatorRegister);
CallBuiltin<Builtins::kInstanceOf_Baseline>(RegisterOperand(0), // object
callable, // callable
Index(1)); // slot
CallBuiltin<Builtin::kInstanceOf_Baseline>(RegisterOperand(0), // object
callable, // callable
Index(1)); // slot
}
void BaselineCompiler::VisitTestIn() {
CallBuiltin<Builtins::kKeyedHasICBaseline>(
CallBuiltin<Builtin::kKeyedHasICBaseline>(
kInterpreterAccumulatorRegister, // object
RegisterOperand(0), // name
IndexAsTagged(1)); // slot
@ -1750,32 +1749,32 @@ void BaselineCompiler::VisitTestTypeOf() {
void BaselineCompiler::VisitToName() {
SaveAccumulatorScope save_accumulator(&basm_);
CallBuiltin<Builtins::kToName>(kInterpreterAccumulatorRegister);
CallBuiltin<Builtin::kToName>(kInterpreterAccumulatorRegister);
StoreRegister(0, kInterpreterAccumulatorRegister);
}
void BaselineCompiler::VisitToNumber() {
CallBuiltin<Builtins::kToNumber_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
CallBuiltin<Builtin::kToNumber_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
}
void BaselineCompiler::VisitToNumeric() {
CallBuiltin<Builtins::kToNumeric_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
CallBuiltin<Builtin::kToNumeric_Baseline>(kInterpreterAccumulatorRegister,
Index(0));
}
void BaselineCompiler::VisitToObject() {
SaveAccumulatorScope save_accumulator(&basm_);
CallBuiltin<Builtins::kToObject>(kInterpreterAccumulatorRegister);
CallBuiltin<Builtin::kToObject>(kInterpreterAccumulatorRegister);
StoreRegister(0, kInterpreterAccumulatorRegister);
}
void BaselineCompiler::VisitToString() {
CallBuiltin<Builtins::kToString>(kInterpreterAccumulatorRegister);
CallBuiltin<Builtin::kToString>(kInterpreterAccumulatorRegister);
}
void BaselineCompiler::VisitCreateRegExpLiteral() {
CallBuiltin<Builtins::kCreateRegExpLiteral>(
CallBuiltin<Builtin::kCreateRegExpLiteral>(
FeedbackVector(), // feedback vector
IndexAsTagged(1), // slot
Constant<HeapObject>(0), // pattern
@ -1788,7 +1787,7 @@ void BaselineCompiler::VisitCreateArrayLiteral() {
interpreter::CreateArrayLiteralFlags::FlagsBits::decode(flags));
if (flags &
interpreter::CreateArrayLiteralFlags::FastCloneSupportedBit::kMask) {
CallBuiltin<Builtins::kCreateShallowArrayLiteral>(
CallBuiltin<Builtin::kCreateShallowArrayLiteral>(
FeedbackVector(), // feedback vector
IndexAsTagged(1), // slot
Constant<HeapObject>(0), // constant elements
@ -1803,13 +1802,13 @@ void BaselineCompiler::VisitCreateArrayLiteral() {
}
void BaselineCompiler::VisitCreateArrayFromIterable() {
CallBuiltin<Builtins::kIterableToListWithSymbolLookup>(
CallBuiltin<Builtin::kIterableToListWithSymbolLookup>(
kInterpreterAccumulatorRegister); // iterable
}
void BaselineCompiler::VisitCreateEmptyArrayLiteral() {
CallBuiltin<Builtins::kCreateEmptyArrayLiteral>(FeedbackVector(),
IndexAsTagged(0));
CallBuiltin<Builtin::kCreateEmptyArrayLiteral>(FeedbackVector(),
IndexAsTagged(0));
}
void BaselineCompiler::VisitCreateObjectLiteral() {
@ -1818,7 +1817,7 @@ void BaselineCompiler::VisitCreateObjectLiteral() {
interpreter::CreateObjectLiteralFlags::FlagsBits::decode(flags));
if (flags &
interpreter::CreateObjectLiteralFlags::FastCloneSupportedBit::kMask) {
CallBuiltin<Builtins::kCreateShallowObjectLiteral>(
CallBuiltin<Builtin::kCreateShallowObjectLiteral>(
FeedbackVector(), // feedback vector
IndexAsTagged(1), // slot
Constant<ObjectBoilerplateDescription>(0), // boilerplate
@ -1833,14 +1832,14 @@ void BaselineCompiler::VisitCreateObjectLiteral() {
}
void BaselineCompiler::VisitCreateEmptyObjectLiteral() {
CallBuiltin<Builtins::kCreateEmptyLiteralObject>();
CallBuiltin<Builtin::kCreateEmptyLiteralObject>();
}
void BaselineCompiler::VisitCloneObject() {
uint32_t flags = Flag(1);
int32_t raw_flags =
interpreter::CreateObjectLiteralFlags::FlagsBits::decode(flags);
CallBuiltin<Builtins::kCloneObjectICBaseline>(
CallBuiltin<Builtin::kCloneObjectICBaseline>(
RegisterOperand(0), // source
Smi::FromInt(raw_flags), // flags
IndexAsTagged(2)); // slot
@ -1848,7 +1847,7 @@ void BaselineCompiler::VisitCloneObject() {
void BaselineCompiler::VisitGetTemplateObject() {
BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
CallBuiltin<Builtins::kGetTemplateObject>(
CallBuiltin<Builtin::kGetTemplateObject>(
shared_function_info_, // shared function info
Constant<HeapObject>(0), // description
Index(1), // slot
@ -1864,7 +1863,7 @@ void BaselineCompiler::VisitCreateClosure() {
uint32_t flags = Flag(2);
if (interpreter::CreateClosureFlags::FastNewClosureBit::decode(flags)) {
CallBuiltin<Builtins::kFastNewClosureBaseline>(
CallBuiltin<Builtin::kFastNewClosureBaseline>(
Constant<SharedFunctionInfo>(0), feedback_cell);
} else {
Runtime::FunctionId function_id =
@ -1891,7 +1890,7 @@ void BaselineCompiler::VisitCreateFunctionContext() {
if (slot_count < static_cast<uint32_t>(
ConstructorBuiltins::MaximumFunctionContextSlots())) {
DCHECK_EQ(info->scope_type(), ScopeType::FUNCTION_SCOPE);
CallBuiltin<Builtins::kFastNewFunctionContextFunction>(info, slot_count);
CallBuiltin<Builtin::kFastNewFunctionContextFunction>(info, slot_count);
} else {
CallRuntime(Runtime::kNewFunctionContext, Constant<ScopeInfo>(0));
}
@ -1903,7 +1902,7 @@ void BaselineCompiler::VisitCreateEvalContext() {
if (slot_count < static_cast<uint32_t>(
ConstructorBuiltins::MaximumFunctionContextSlots())) {
DCHECK_EQ(info->scope_type(), ScopeType::EVAL_SCOPE);
CallBuiltin<Builtins::kFastNewFunctionContextEval>(info, slot_count);
CallBuiltin<Builtin::kFastNewFunctionContextEval>(info, slot_count);
} else {
CallRuntime(Runtime::kNewFunctionContext, Constant<ScopeInfo>(0));
}
@ -1919,16 +1918,16 @@ void BaselineCompiler::VisitCreateMappedArguments() {
if (shared_function_info_->has_duplicate_parameters()) {
CallRuntime(Runtime::kNewSloppyArguments, __ FunctionOperand());
} else {
CallBuiltin<Builtins::kFastNewSloppyArguments>(__ FunctionOperand());
CallBuiltin<Builtin::kFastNewSloppyArguments>(__ FunctionOperand());
}
}
void BaselineCompiler::VisitCreateUnmappedArguments() {
CallBuiltin<Builtins::kFastNewStrictArguments>(__ FunctionOperand());
CallBuiltin<Builtin::kFastNewStrictArguments>(__ FunctionOperand());
}
void BaselineCompiler::VisitCreateRestParameter() {
CallBuiltin<Builtins::kFastNewRestArguments>(__ FunctionOperand());
CallBuiltin<Builtin::kFastNewRestArguments>(__ FunctionOperand());
}
void BaselineCompiler::VisitJumpLoop() {
@ -1942,7 +1941,7 @@ void BaselineCompiler::VisitJumpLoop() {
int loop_depth = iterator().GetImmediateOperand(1);
__ CompareByte(osr_level, loop_depth);
__ JumpIf(Condition::kUnsignedLessThanEqual, &osr_not_armed);
CallBuiltin<Builtins::kBaselineOnStackReplacement>();
CallBuiltin<Builtin::kBaselineOnStackReplacement>();
__ RecordComment("]");
__ Bind(&osr_not_armed);
@ -2078,13 +2077,13 @@ void BaselineCompiler::VisitSwitchOnSmiNoFeedback() {
}
void BaselineCompiler::VisitForInEnumerate() {
CallBuiltin<Builtins::kForInEnumerate>(RegisterOperand(0));
CallBuiltin<Builtin::kForInEnumerate>(RegisterOperand(0));
}
void BaselineCompiler::VisitForInPrepare() {
StoreRegister(0, kInterpreterAccumulatorRegister);
CallBuiltin<Builtins::kForInPrepare>(kInterpreterAccumulatorRegister,
IndexAsTagged(1), FeedbackVector());
CallBuiltin<Builtin::kForInPrepare>(kInterpreterAccumulatorRegister,
IndexAsTagged(1), FeedbackVector());
interpreter::Register first = iterator().GetRegisterOperand(0);
interpreter::Register second(first.index() + 1);
interpreter::Register third(first.index() + 2);
@ -2106,12 +2105,12 @@ void BaselineCompiler::VisitForInContinue() {
void BaselineCompiler::VisitForInNext() {
interpreter::Register cache_type, cache_array;
std::tie(cache_type, cache_array) = iterator().GetRegisterPairOperand(2);
CallBuiltin<Builtins::kForInNext>(Index(3), // vector slot
RegisterOperand(0), // object
cache_array, // cache array
cache_type, // cache type
RegisterOperand(1), // index
FeedbackVector()); // feedback vector
CallBuiltin<Builtin::kForInNext>(Index(3), // vector slot
RegisterOperand(0), // object
cache_array, // cache array
cache_type, // cache type
RegisterOperand(1), // index
FeedbackVector()); // feedback vector
}
void BaselineCompiler::VisitForInStep() {
@ -2151,7 +2150,7 @@ void BaselineCompiler::VisitReturn() {
int parameter_count_without_receiver =
parameter_count - 1; // Exclude the receiver to simplify the
// computation. We'll account for it at the end.
TailCallBuiltin<Builtins::kBaselineLeaveFrame>(
TailCallBuiltin<Builtin::kBaselineLeaveFrame>(
parameter_count_without_receiver, -profiling_weight);
__ RecordComment("]");
}
@ -2255,7 +2254,7 @@ void BaselineCompiler::VisitSuspendGenerator() {
int bytecode_offset =
BytecodeArray::kHeaderSize + iterator().current_offset();
CallBuiltin<Builtins::kSuspendGeneratorBaseline>(
CallBuiltin<Builtin::kSuspendGeneratorBaseline>(
generator_object,
static_cast<int>(Uint(3)), // suspend_id
bytecode_offset,
@ -2269,15 +2268,15 @@ void BaselineCompiler::VisitResumeGenerator() {
BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_);
Register generator_object = scratch_scope.AcquireScratch();
LoadRegister(generator_object, 0);
CallBuiltin<Builtins::kResumeGeneratorBaseline>(
CallBuiltin<Builtin::kResumeGeneratorBaseline>(
generator_object,
static_cast<int>(RegisterCount(2))); // register_count
}
void BaselineCompiler::VisitGetIterator() {
CallBuiltin<Builtins::kGetIteratorBaseline>(RegisterOperand(0), // receiver
IndexAsTagged(1), // load_slot
IndexAsTagged(2)); // call_slot
CallBuiltin<Builtin::kGetIteratorBaseline>(RegisterOperand(0), // receiver
IndexAsTagged(1), // load_slot
IndexAsTagged(2)); // call_slot
}
void BaselineCompiler::VisitDebugger() {
@ -2287,9 +2286,8 @@ void BaselineCompiler::VisitDebugger() {
void BaselineCompiler::VisitIncBlockCounter() {
SaveAccumulatorScope accumulator_scope(&basm_);
CallBuiltin<Builtins::kIncBlockCounter>(
__ FunctionOperand(),
IndexAsSmi(0)); // coverage array slot
CallBuiltin<Builtin::kIncBlockCounter>(__ FunctionOperand(),
IndexAsSmi(0)); // coverage array slot
}
void BaselineCompiler::VisitAbort() {

View File

@ -129,12 +129,12 @@ class BaselineCompiler {
Label::Distance distance = Label::kFar);
// Call helpers.
template <Builtins::Name kBuiltin, typename... Args>
template <Builtin kBuiltin, typename... Args>
void CallBuiltin(Args... args);
template <typename... Args>
void CallRuntime(Runtime::FunctionId function, Args... args);
template <Builtins::Name kBuiltin, typename... Args>
template <Builtin kBuiltin, typename... Args>
void TailCallBuiltin(Args... args);
template <ConvertReceiverMode kMode, typename... Args>

View File

@ -119,15 +119,15 @@ void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
__ JumpIfNotSmi(value, target, distance);
}
void BaselineAssembler::CallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::CallBuiltin(Builtin builtin) {
__ RecordCommentForOffHeapTrampoline(builtin);
__ Call(__ EntryFromBuiltinIndexAsOperand(builtin));
__ Call(__ EntryFromBuiltinAsOperand(builtin));
__ RecordComment("]");
}
void BaselineAssembler::TailCallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
__ RecordCommentForOffHeapTrampoline(builtin);
__ jmp(__ EntryFromBuiltinIndexAsOperand(builtin));
__ jmp(__ EntryFromBuiltinAsOperand(builtin));
__ RecordComment("]");
}

View File

@ -18,7 +18,7 @@ namespace baseline {
void BaselineCompiler::Prologue() {
DCHECK_EQ(kJSFunctionRegister, kJavaScriptCallTargetRegister);
int max_frame_size = bytecode_->frame_size() + max_call_args_;
CallBuiltin<Builtins::kBaselineOutOfLinePrologue>(
CallBuiltin<Builtin::kBaselineOutOfLinePrologue>(
kContextRegister, kJSFunctionRegister, kJavaScriptCallArgCountRegister,
max_frame_size, kJavaScriptCallNewTargetRegister, bytecode_);

View File

@ -112,19 +112,19 @@ void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
__ JumpIfSmi(value, target);
}
void BaselineAssembler::CallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::CallBuiltin(Builtin builtin) {
if (masm()->options().short_builtin_calls) {
__ CallBuiltin(builtin);
} else {
__ RecordCommentForOffHeapTrampoline(builtin);
Register temp = t6;
__ LoadEntryFromBuiltinIndex(builtin, temp);
__ LoadEntryFromBuiltin(builtin, temp);
__ Call(temp);
__ RecordComment("]");
}
}
void BaselineAssembler::TailCallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
if (masm()->options().short_builtin_calls) {
// Generate pc-relative jump.
__ TailCallBuiltin(builtin);
@ -133,7 +133,7 @@ void BaselineAssembler::TailCallBuiltin(Builtins::Name builtin) {
// t6 be used for function call in RISCV64
// For example 'jalr t6' or 'jal t6'
Register temp = t6;
__ LoadEntryFromBuiltinIndex(builtin, temp);
__ LoadEntryFromBuiltin(builtin, temp);
__ Jump(temp);
__ RecordComment("]");
}

View File

@ -18,7 +18,7 @@ void BaselineCompiler::Prologue() {
__ masm()->EnterFrame(StackFrame::BASELINE);
DCHECK_EQ(kJSFunctionRegister, kJavaScriptCallTargetRegister);
int max_frame_size = bytecode_->frame_size() + max_call_args_;
CallBuiltin<Builtins::kBaselineOutOfLinePrologue>(
CallBuiltin<Builtin::kBaselineOutOfLinePrologue>(
kContextRegister, kJSFunctionRegister, kJavaScriptCallArgCountRegister,
max_frame_size, kJavaScriptCallNewTargetRegister, bytecode_);
PrologueFillFrame();

View File

@ -121,24 +121,24 @@ void BaselineAssembler::JumpIfNotSmi(Register value, Label* target,
__ JumpIfNotSmi(value, target, distance);
}
void BaselineAssembler::CallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::CallBuiltin(Builtin builtin) {
if (masm()->options().short_builtin_calls) {
// Generate pc-relative call.
__ CallBuiltin(builtin);
} else {
__ RecordCommentForOffHeapTrampoline(builtin);
__ Call(__ EntryFromBuiltinIndexAsOperand(builtin));
__ Call(__ EntryFromBuiltinAsOperand(builtin));
__ RecordComment("]");
}
}
void BaselineAssembler::TailCallBuiltin(Builtins::Name builtin) {
void BaselineAssembler::TailCallBuiltin(Builtin builtin) {
if (masm()->options().short_builtin_calls) {
// Generate pc-relative jump.
__ TailCallBuiltin(builtin);
} else {
__ RecordCommentForOffHeapTrampoline(builtin);
__ Jump(__ EntryFromBuiltinIndexAsOperand(builtin));
__ Jump(__ EntryFromBuiltinAsOperand(builtin));
__ RecordComment("]");
}
}

View File

@ -18,7 +18,7 @@ namespace baseline {
void BaselineCompiler::Prologue() {
DCHECK_EQ(kJSFunctionRegister, kJavaScriptCallTargetRegister);
int max_frame_size = bytecode_->frame_size() + max_call_args_;
CallBuiltin<Builtins::kBaselineOutOfLinePrologue>(
CallBuiltin<Builtin::kBaselineOutOfLinePrologue>(
kContextRegister, kJSFunctionRegister, kJavaScriptCallArgCountRegister,
max_frame_size, kJavaScriptCallNewTargetRegister, bytecode_);

View File

@ -493,7 +493,7 @@ static_assert(kPushedStackSpace == EntryFrameConstants::kDirectCallerSPOffset +
// using JSEntryFunction = GeneratedCode<Address(
// Address root_register_value, MicrotaskQueue* microtask_queue)>;
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
// The register state is either:
// r0: root_register_value
// r1: code entry
@ -656,18 +656,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
@ -1040,8 +1039,8 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
// Need a few extra registers
temps.Include(r8, r9);
auto descriptor = Builtins::CallInterfaceDescriptorFor(
Builtins::kBaselineOutOfLinePrologue);
auto descriptor =
Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
Register closure = descriptor.GetRegisterParameter(
BaselineOutOfLinePrologueDescriptor::kClosure);
// Load the feedback vector from the closure.

View File

@ -609,7 +609,7 @@ namespace {
// Output:
// x0: result.
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
Label invoke, handler_entry, exit;
{
@ -812,18 +812,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
// Input:
@ -1221,8 +1220,8 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
// Need a few extra registers
temps.Include(x14, x15);
auto descriptor = Builtins::CallInterfaceDescriptorFor(
Builtins::kBaselineOutOfLinePrologue);
auto descriptor =
Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
Register closure = descriptor.GetRegisterParameter(
BaselineOutOfLinePrologueDescriptor::kClosure);
// Load the feedback vector from the closure.

View File

@ -305,7 +305,7 @@ TF_BUILTIN(ArrayPrototypePop, CodeStubAssembler) {
// from the current frame here in order to reduce register pressure on the
// fast path.
TNode<JSFunction> target = LoadTargetFromFrame();
TailCallBuiltin(Builtins::kArrayPop, context, target, UndefinedConstant(),
TailCallBuiltin(Builtin::kArrayPop, context, target, UndefinedConstant(),
argc);
}
}
@ -430,7 +430,7 @@ TF_BUILTIN(ArrayPrototypePush, CodeStubAssembler) {
// from the current frame here in order to reduce register pressure on the
// fast path.
TNode<JSFunction> target = LoadTargetFromFrame();
TailCallBuiltin(Builtins::kArrayPush, context, target, UndefinedConstant(),
TailCallBuiltin(Builtin::kArrayPush, context, target, UndefinedConstant(),
argc);
}
}
@ -678,12 +678,11 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant,
BIND(&if_smiorobjects);
{
Callable callable =
(variant == kIncludes)
? Builtins::CallableFor(isolate(),
Builtins::kArrayIncludesSmiOrObject)
: Builtins::CallableFor(isolate(),
Builtins::kArrayIndexOfSmiOrObject);
Callable callable = (variant == kIncludes)
? Builtins::CallableFor(
isolate(), Builtin::kArrayIncludesSmiOrObject)
: Builtins::CallableFor(
isolate(), Builtin::kArrayIndexOfSmiOrObject);
TNode<Object> result = CallStub(callable, context, elements, search_element,
array_length, SmiTag(index_var.value()));
args.PopAndReturn(result);
@ -694,9 +693,9 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant,
Callable callable =
(variant == kIncludes)
? Builtins::CallableFor(isolate(),
Builtins::kArrayIncludesPackedDoubles)
Builtin::kArrayIncludesPackedDoubles)
: Builtins::CallableFor(isolate(),
Builtins::kArrayIndexOfPackedDoubles);
Builtin::kArrayIndexOfPackedDoubles);
TNode<Object> result = CallStub(callable, context, elements, search_element,
array_length, SmiTag(index_var.value()));
args.PopAndReturn(result);
@ -707,9 +706,9 @@ void ArrayIncludesIndexofAssembler::Generate(SearchVariant variant,
Callable callable =
(variant == kIncludes)
? Builtins::CallableFor(isolate(),
Builtins::kArrayIncludesHoleyDoubles)
Builtin::kArrayIncludesHoleyDoubles)
: Builtins::CallableFor(isolate(),
Builtins::kArrayIndexOfHoleyDoubles);
Builtin::kArrayIndexOfHoleyDoubles);
TNode<Object> result = CallStub(callable, context, elements, search_element,
array_length, SmiTag(index_var.value()));
args.PopAndReturn(result);
@ -1258,7 +1257,7 @@ TF_BUILTIN(ArrayIteratorPrototypeNext, CodeStubAssembler) {
// Check that the {index} is within the bounds of the {array}s "length".
TNode<Number> length = CAST(
CallBuiltin(Builtins::kToLength, context,
CallBuiltin(Builtin::kToLength, context,
GetProperty(context, array, factory()->length_string())));
GotoIfNumberGreaterThanOrEqual(index, length, &set_done);
StoreJSArrayIteratorNextIndex(iterator, NumberInc(index));
@ -1446,7 +1445,7 @@ class ArrayFlattenAssembler : public CodeStubAssembler {
// elementLen, targetIndex,
// depth - 1).
var_target_index = CAST(
CallBuiltin(Builtins::kFlattenIntoArray, context, target, element,
CallBuiltin(Builtin::kFlattenIntoArray, context, target, element,
element_length, target_index, NumberDec(depth)));
Goto(&next);
}
@ -1463,7 +1462,7 @@ class ArrayFlattenAssembler : public CodeStubAssembler {
// elementLen, targetIndex,
// depth - 1).
var_target_index = CAST(
CallBuiltin(Builtins::kFlattenIntoArray, context, target, element,
CallBuiltin(Builtin::kFlattenIntoArray, context, target, element,
element_length, target_index, NumberDec(depth)));
Goto(&next);
}
@ -1569,7 +1568,7 @@ TF_BUILTIN(ArrayPrototypeFlat, CodeStubAssembler) {
const TNode<JSReceiver> a = Construct(context, constructor, SmiConstant(0));
// 6. Perform ? FlattenIntoArray(A, O, sourceLen, 0, depthNum).
CallBuiltin(Builtins::kFlattenIntoArray, context, a, o, source_length,
CallBuiltin(Builtin::kFlattenIntoArray, context, a, o, source_length,
SmiConstant(0), var_depth_num.value());
// 7. Return A.
@ -1606,7 +1605,7 @@ TF_BUILTIN(ArrayPrototypeFlatMap, CodeStubAssembler) {
const TNode<JSReceiver> a = Construct(context, constructor, SmiConstant(0));
// 6. Perform ? FlattenIntoArray(A, O, sourceLen, 0, 1, mapperFunction, T).
CallBuiltin(Builtins::kFlatMapIntoArray, context, a, o, source_length,
CallBuiltin(Builtin::kFlatMapIntoArray, context, a, o, source_length,
SmiConstant(0), SmiConstant(1), mapper_function, t);
// 7. Return A.
@ -1631,8 +1630,8 @@ TF_BUILTIN(ArrayConstructor, ArrayBuiltinsAssembler) {
// Run the native code for the Array function called as a normal function.
TNode<Oddball> no_gc_site = UndefinedConstant();
TailCallBuiltin(Builtins::kArrayConstructorImpl, context, function,
new_target, argc, no_gc_site);
TailCallBuiltin(Builtin::kArrayConstructorImpl, context, function, new_target,
argc, no_gc_site);
}
void ArrayBuiltinsAssembler::TailCallArrayConstructorStub(

View File

@ -188,7 +188,7 @@ TF_BUILTIN(AsyncFunctionReject, AsyncFunctionBuiltinsAssembler) {
// Reject the {promise} for the given {reason}, disabling the
// additional debug event for the rejection since a debug event
// already happend for the exception that got us here.
CallBuiltin(Builtins::kRejectPromise, context, promise, reason,
CallBuiltin(Builtin::kRejectPromise, context, promise, reason,
FalseConstant());
Label if_debugging(this, Label::kDeferred);
@ -210,7 +210,7 @@ TF_BUILTIN(AsyncFunctionResolve, AsyncFunctionBuiltinsAssembler) {
TNode<JSPromise> promise = LoadObjectField<JSPromise>(
async_function_object, JSAsyncFunctionObject::kPromiseOffset);
CallBuiltin(Builtins::kResolvePromise, context, promise, value);
CallBuiltin(Builtin::kResolvePromise, context, promise, value);
Label if_debugging(this, Label::kDeferred);
GotoIf(HasAsyncEventDelegate(), &if_debugging);

View File

@ -104,10 +104,10 @@ TNode<Object> AsyncBuiltinsAssembler::AwaitOld(
&var_throwaway);
// Perform ! Call(promiseCapability.[[Resolve]], undefined, « promise »).
CallBuiltin(Builtins::kResolvePromise, context, promise, value);
CallBuiltin(Builtin::kResolvePromise, context, promise, value);
return CallBuiltin(Builtins::kPerformPromiseThen, context, promise,
on_resolve, on_reject, var_throwaway.value());
return CallBuiltin(Builtin::kPerformPromiseThen, context, promise, on_resolve,
on_reject, var_throwaway.value());
}
TNode<Object> AsyncBuiltinsAssembler::AwaitOptimized(
@ -165,7 +165,7 @@ TNode<Object> AsyncBuiltinsAssembler::AwaitOptimized(
outer_promise, on_reject, is_predicted_as_caught,
&var_throwaway);
return CallBuiltin(Builtins::kPerformPromiseThen, native_context, promise,
return CallBuiltin(Builtin::kPerformPromiseThen, native_context, promise,
on_resolve, on_reject, var_throwaway.value());
}
@ -194,7 +194,7 @@ void AsyncBuiltinsAssembler::InitAwaitPromise(
// This call to NewJSPromise is to keep behaviour parity with what happens
// in Runtime::kAwaitPromisesInit above if native hooks are set. It will
// create a throwaway promise that will trigger an init event and will get
// passed into Builtins::kPerformPromiseThen below.
// passed into Builtin::kPerformPromiseThen below.
Branch(IsContextPromiseHookEnabled(promiseHookFlags), &if_promise_hook,
&do_nothing);
BIND(&if_promise_hook);
@ -327,7 +327,7 @@ TF_BUILTIN(AsyncIteratorValueUnwrap, AsyncBuiltinsAssembler) {
CSA_ASSERT(this, IsBoolean(CAST(done)));
const TNode<Object> unwrapped_value =
CallBuiltin(Builtins::kCreateIterResultObject, context, value, done);
CallBuiltin(Builtin::kCreateIterResultObject, context, value, done);
Return(unwrapped_value);
}

View File

@ -171,7 +171,7 @@ void AsyncGeneratorBuiltinsAssembler::AsyncGeneratorEnqueue(
SmiConstant(JSAsyncGeneratorObject::kGeneratorExecuting)),
&done);
CallBuiltin(Builtins::kAsyncGeneratorResumeNext, context, generator);
CallBuiltin(Builtin::kAsyncGeneratorResumeNext, context, generator);
Goto(&done);
BIND(&done);
@ -180,7 +180,7 @@ void AsyncGeneratorBuiltinsAssembler::AsyncGeneratorEnqueue(
BIND(&if_receiverisincompatible);
{
CallBuiltin(Builtins::kRejectPromise, context, promise,
CallBuiltin(Builtin::kRejectPromise, context, promise,
MakeTypeError(MessageTemplate::kIncompatibleMethodReceiver,
context, StringConstant(method_name), receiver),
TrueConstant());
@ -225,7 +225,7 @@ void AsyncGeneratorBuiltinsAssembler::AsyncGeneratorAwaitResumeClosure(
CallStub(CodeFactory::ResumeGenerator(isolate()), context, value, generator);
TailCallBuiltin(Builtins::kAsyncGeneratorResumeNext, context, generator);
TailCallBuiltin(Builtin::kAsyncGeneratorResumeNext, context, generator);
}
template <typename Descriptor>
@ -438,13 +438,12 @@ TF_BUILTIN(AsyncGeneratorResumeNext, AsyncGeneratorBuiltinsAssembler) {
// In all cases, the last step is to call AsyncGeneratorResumeNext.
TNode<Object> is_caught = CallRuntime(
Runtime::kAsyncGeneratorHasCatchHandlerForPC, context, generator);
TailCallBuiltin(Builtins::kAsyncGeneratorReturn, context, generator,
TailCallBuiltin(Builtin::kAsyncGeneratorReturn, context, generator,
next_value, is_caught);
BIND(&if_throw);
GotoIfNot(IsGeneratorStateClosed(var_state.value()), &resume_generator);
CallBuiltin(Builtins::kAsyncGeneratorReject, context, generator,
next_value);
CallBuiltin(Builtin::kAsyncGeneratorReject, context, generator, next_value);
var_next = LoadFirstAsyncGeneratorRequestFromQueue(generator);
Goto(&start);
}
@ -452,7 +451,7 @@ TF_BUILTIN(AsyncGeneratorResumeNext, AsyncGeneratorBuiltinsAssembler) {
BIND(&if_normal);
{
GotoIfNot(IsGeneratorStateClosed(var_state.value()), &resume_generator);
CallBuiltin(Builtins::kAsyncGeneratorResolve, context, generator,
CallBuiltin(Builtin::kAsyncGeneratorResolve, context, generator,
UndefinedConstant(), TrueConstant());
var_state = LoadGeneratorState(generator);
var_next = LoadFirstAsyncGeneratorRequestFromQueue(generator);
@ -525,14 +524,14 @@ TF_BUILTIN(AsyncGeneratorResolve, AsyncGeneratorBuiltinsAssembler) {
{
// Skip the "then" on {iter_result} and directly fulfill the {promise}
// with the {iter_result}.
CallBuiltin(Builtins::kFulfillPromise, context, promise, iter_result);
CallBuiltin(Builtin::kFulfillPromise, context, promise, iter_result);
Goto(&return_promise);
}
BIND(&if_slow);
{
// Perform Call(promiseCapability.[[Resolve]], undefined, «iteratorResult»).
CallBuiltin(Builtins::kResolvePromise, context, promise, iter_result);
CallBuiltin(Builtin::kResolvePromise, context, promise, iter_result);
Goto(&return_promise);
}
@ -553,7 +552,7 @@ TF_BUILTIN(AsyncGeneratorReject, AsyncGeneratorBuiltinsAssembler) {
TakeFirstAsyncGeneratorRequestFromQueue(generator);
TNode<JSPromise> promise = LoadPromiseFromAsyncGeneratorRequest(next);
Return(CallBuiltin(Builtins::kRejectPromise, context, promise, value,
Return(CallBuiltin(Builtin::kRejectPromise, context, promise, value,
TrueConstant()));
}
@ -585,10 +584,10 @@ TF_BUILTIN(AsyncGeneratorYieldResolveClosure, AsyncGeneratorBuiltinsAssembler) {
// Per proposal-async-iteration/#sec-asyncgeneratoryield step 9
// Return ! AsyncGeneratorResolve(_F_.[[Generator]], _value_, *false*).
CallBuiltin(Builtins::kAsyncGeneratorResolve, context, generator, value,
CallBuiltin(Builtin::kAsyncGeneratorResolve, context, generator, value,
FalseConstant());
TailCallBuiltin(Builtins::kAsyncGeneratorResumeNext, context, generator);
TailCallBuiltin(Builtin::kAsyncGeneratorResumeNext, context, generator);
}
TF_BUILTIN(AsyncGeneratorReturn, AsyncGeneratorBuiltinsAssembler) {
@ -666,10 +665,10 @@ TF_BUILTIN(AsyncGeneratorReturnClosedResolveClosure,
// https://tc39.github.io/proposal-async-iteration/
// #async-generator-resume-next-return-processor-fulfilled step 2:
// Return ! AsyncGeneratorResolve(_F_.[[Generator]], _value_, *true*).
CallBuiltin(Builtins::kAsyncGeneratorResolve, context, generator, value,
CallBuiltin(Builtin::kAsyncGeneratorResolve, context, generator, value,
TrueConstant());
TailCallBuiltin(Builtins::kAsyncGeneratorResumeNext, context, generator);
TailCallBuiltin(Builtin::kAsyncGeneratorResumeNext, context, generator);
}
TF_BUILTIN(AsyncGeneratorReturnClosedRejectClosure,
@ -684,9 +683,9 @@ TF_BUILTIN(AsyncGeneratorReturnClosedRejectClosure,
// https://tc39.github.io/proposal-async-iteration/
// #async-generator-resume-next-return-processor-rejected step 2:
// Return ! AsyncGeneratorReject(_F_.[[Generator]], _reason_).
CallBuiltin(Builtins::kAsyncGeneratorReject, context, generator, value);
CallBuiltin(Builtin::kAsyncGeneratorReject, context, generator, value);
TailCallBuiltin(Builtins::kAsyncGeneratorResumeNext, context, generator);
TailCallBuiltin(Builtin::kAsyncGeneratorResumeNext, context, generator);
}
} // namespace internal

View File

@ -168,7 +168,7 @@ void AsyncFromSyncBuiltinsAssembler::Generate_AsyncFromSyncIteratorMethod(
TNode<Object> value_wrapper;
{
ScopedExceptionHandler handler(this, &reject_promise, &var_exception);
value_wrapper = CallBuiltin(Builtins::kPromiseResolve, native_context,
value_wrapper = CallBuiltin(Builtin::kPromiseResolve, native_context,
promise_fun, value);
}
@ -180,14 +180,14 @@ void AsyncFromSyncBuiltinsAssembler::Generate_AsyncFromSyncIteratorMethod(
// Perform ! PerformPromiseThen(valueWrapper,
// onFulfilled, undefined, promiseCapability).
args->PopAndReturn(CallBuiltin(Builtins::kPerformPromiseThen, context,
args->PopAndReturn(CallBuiltin(Builtin::kPerformPromiseThen, context,
value_wrapper, on_fulfilled,
UndefinedConstant(), promise));
BIND(&reject_promise);
{
const TNode<Object> exception = var_exception.value();
CallBuiltin(Builtins::kRejectPromise, context, promise, exception,
CallBuiltin(Builtin::kRejectPromise, context, promise, exception,
TrueConstant());
args->PopAndReturn(promise);
}
@ -259,7 +259,7 @@ AsyncFromSyncBuiltinsAssembler::LoadIteratorResult(
BIND(&to_boolean);
{
const TNode<Object> result =
CallBuiltin(Builtins::kToBoolean, context, var_done.value());
CallBuiltin(Builtin::kToBoolean, context, var_done.value());
var_done = result;
Goto(&done);
}
@ -309,12 +309,12 @@ TF_BUILTIN(AsyncFromSyncIteratorPrototypeReturn,
// If return is undefined, then
// Let iterResult be ! CreateIterResultObject(value, true)
const TNode<Object> iter_result = CallBuiltin(
Builtins::kCreateIterResultObject, context, value, TrueConstant());
Builtin::kCreateIterResultObject, context, value, TrueConstant());
// Perform ! Call(promiseCapability.[[Resolve]], undefined, « iterResult »).
// IfAbruptRejectPromise(nextDone, promiseCapability).
// Return promiseCapability.[[Promise]].
CallBuiltin(Builtins::kResolvePromise, context, promise, iter_result);
CallBuiltin(Builtin::kResolvePromise, context, promise, iter_result);
args.PopAndReturn(promise);
};

View File

@ -69,7 +69,7 @@ void Builtins::Generate_CallFunctionForwardVarargs(MacroAssembler* masm) {
TF_BUILTIN(Call_ReceiverIsNullOrUndefined_Baseline_Compact,
CallOrConstructBuiltinsAssembler) {
auto receiver = UndefinedConstant();
CallReceiver<Descriptor>(Builtins::kCall_ReceiverIsNullOrUndefined, receiver);
CallReceiver<Descriptor>(Builtin::kCall_ReceiverIsNullOrUndefined, receiver);
}
TF_BUILTIN(Call_ReceiverIsNullOrUndefined_Baseline,
@ -77,32 +77,32 @@ TF_BUILTIN(Call_ReceiverIsNullOrUndefined_Baseline,
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
auto receiver = UndefinedConstant();
CallReceiver<Descriptor>(Builtins::kCall_ReceiverIsNullOrUndefined, argc,
slot, receiver);
CallReceiver<Descriptor>(Builtin::kCall_ReceiverIsNullOrUndefined, argc, slot,
receiver);
}
TF_BUILTIN(Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,
CallOrConstructBuiltinsAssembler) {
CallReceiver<Descriptor>(Builtins::kCall_ReceiverIsNotNullOrUndefined);
CallReceiver<Descriptor>(Builtin::kCall_ReceiverIsNotNullOrUndefined);
}
TF_BUILTIN(Call_ReceiverIsNotNullOrUndefined_Baseline,
CallOrConstructBuiltinsAssembler) {
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
CallReceiver<Descriptor>(Builtins::kCall_ReceiverIsNotNullOrUndefined, argc,
CallReceiver<Descriptor>(Builtin::kCall_ReceiverIsNotNullOrUndefined, argc,
slot);
}
TF_BUILTIN(Call_ReceiverIsAny_Baseline_Compact,
CallOrConstructBuiltinsAssembler) {
CallReceiver<Descriptor>(Builtins::kCall_ReceiverIsAny);
CallReceiver<Descriptor>(Builtin::kCall_ReceiverIsAny);
}
TF_BUILTIN(Call_ReceiverIsAny_Baseline, CallOrConstructBuiltinsAssembler) {
auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
auto slot = UncheckedParameter<UintPtrT>(Descriptor::kSlot);
CallReceiver<Descriptor>(Builtins::kCall_ReceiverIsAny, argc, slot);
CallReceiver<Descriptor>(Builtin::kCall_ReceiverIsAny, argc, slot);
}
TF_BUILTIN(Call_ReceiverIsNullOrUndefined_WithFeedback,
@ -115,7 +115,7 @@ TF_BUILTIN(Call_ReceiverIsNullOrUndefined_WithFeedback,
auto receiver = Parameter<Object>(Descriptor::kReceiver);
CollectCallFeedback(
target, [=] { return receiver; }, context, feedback_vector, slot);
TailCallBuiltin(Builtins::kCall_ReceiverIsNullOrUndefined, context, target,
TailCallBuiltin(Builtin::kCall_ReceiverIsNullOrUndefined, context, target,
argc);
}
@ -129,7 +129,7 @@ TF_BUILTIN(Call_ReceiverIsNotNullOrUndefined_WithFeedback,
auto receiver = Parameter<Object>(Descriptor::kReceiver);
CollectCallFeedback(
target, [=] { return receiver; }, context, feedback_vector, slot);
TailCallBuiltin(Builtins::kCall_ReceiverIsNotNullOrUndefined, context, target,
TailCallBuiltin(Builtin::kCall_ReceiverIsNotNullOrUndefined, context, target,
argc);
}
@ -142,7 +142,7 @@ TF_BUILTIN(Call_ReceiverIsAny_WithFeedback, CallOrConstructBuiltinsAssembler) {
auto receiver = Parameter<Object>(Descriptor::kReceiver);
CollectCallFeedback(
target, [=] { return receiver; }, context, feedback_vector, slot);
TailCallBuiltin(Builtins::kCall_ReceiverIsAny, context, target, argc);
TailCallBuiltin(Builtin::kCall_ReceiverIsAny, context, target, argc);
}
void CallOrConstructBuiltinsAssembler::CallOrConstructWithArrayLike(
@ -407,7 +407,7 @@ void CallOrConstructBuiltinsAssembler::CallOrConstructWithSpread(
GetProperty(context, spread, IteratorSymbolConstant());
GotoIfNot(TaggedIsCallable(iterator_fn), &if_iterator_fn_not_callable);
TNode<JSArray> list =
CAST(CallBuiltin(Builtins::kIterableToListMayPreserveHoles, context,
CAST(CallBuiltin(Builtin::kIterableToListMayPreserveHoles, context,
spread, iterator_fn));
var_js_array = list;
@ -464,7 +464,7 @@ void CallOrConstructBuiltinsAssembler::CallOrConstructWithSpread(
template <class Descriptor>
void CallOrConstructBuiltinsAssembler::CallReceiver(
Builtins::Name id, base::Optional<TNode<Object>> receiver) {
Builtin id, base::Optional<TNode<Object>> receiver) {
static_assert(std::is_same<Descriptor,
CallTrampoline_Baseline_CompactDescriptor>::value,
"Incompatible Descriptor");
@ -481,7 +481,7 @@ void CallOrConstructBuiltinsAssembler::CallReceiver(
template <class Descriptor>
void CallOrConstructBuiltinsAssembler::CallReceiver(
Builtins::Name id, TNode<Int32T> argc, TNode<UintPtrT> slot,
Builtin id, TNode<Int32T> argc, TNode<UintPtrT> slot,
base::Optional<TNode<Object>> maybe_receiver) {
auto target = Parameter<Object>(Descriptor::kFunction);
auto context = LoadContextFromBaseline();

View File

@ -31,10 +31,9 @@ class CallOrConstructBuiltinsAssembler : public CodeStubAssembler {
TNode<Context> context);
template <class Descriptor>
void CallReceiver(Builtins::Name id,
base::Optional<TNode<Object>> = base::nullopt);
void CallReceiver(Builtin id, base::Optional<TNode<Object>> = base::nullopt);
template <class Descriptor>
void CallReceiver(Builtins::Name id, TNode<Int32T> argc, TNode<UintPtrT> slot,
void CallReceiver(Builtin id, TNode<Int32T> argc, TNode<UintPtrT> slot,
base::Optional<TNode<Object>> = base::nullopt);
enum class CallFunctionTemplateMode : uint8_t {

View File

@ -1368,7 +1368,7 @@ void CollectionsBuiltinsAssembler::SameValueZeroString(
GotoIf(TaggedIsSmi(candidate_key), if_not_same);
GotoIfNot(IsString(CAST(candidate_key)), if_not_same);
Branch(TaggedEqual(CallBuiltin(Builtins::kStringEqual, NoContextConstant(),
Branch(TaggedEqual(CallBuiltin(Builtin::kStringEqual, NoContextConstant(),
key_string, candidate_key),
TrueConstant()),
if_same, if_not_same);
@ -1490,7 +1490,7 @@ CollectionsBuiltinsAssembler::Transition(
var_table = CAST(next_table);
var_index = SmiUntag(
CAST(CallBuiltin(Builtins::kOrderedHashTableHealIndex,
CAST(CallBuiltin(Builtin::kOrderedHashTableHealIndex,
NoContextConstant(), table, SmiTag(index))));
Goto(&loop);
}
@ -1568,8 +1568,8 @@ TF_BUILTIN(MapPrototypeGet, CollectionsBuiltinsAssembler) {
const TNode<Object> table =
LoadObjectField<Object>(CAST(receiver), JSMap::kTableOffset);
TNode<Smi> index = CAST(
CallBuiltin(Builtins::kFindOrderedHashMapEntry, context, table, key));
TNode<Smi> index =
CAST(CallBuiltin(Builtin::kFindOrderedHashMapEntry, context, table, key));
Label if_found(this), if_not_found(this);
Branch(SmiGreaterThanOrEqual(index, SmiConstant(0)), &if_found,
@ -1594,8 +1594,8 @@ TF_BUILTIN(MapPrototypeHas, CollectionsBuiltinsAssembler) {
const TNode<Object> table =
LoadObjectField(CAST(receiver), JSMap::kTableOffset);
TNode<Smi> index = CAST(
CallBuiltin(Builtins::kFindOrderedHashMapEntry, context, table, key));
TNode<Smi> index =
CAST(CallBuiltin(Builtin::kFindOrderedHashMapEntry, context, table, key));
Label if_found(this), if_not_found(this);
Branch(SmiGreaterThanOrEqual(index, SmiConstant(0)), &if_found,
@ -2744,7 +2744,7 @@ TF_BUILTIN(WeakMapGet, WeakCollectionsBuiltinsAssembler) {
const TNode<EphemeronHashTable> table = LoadTable(CAST(receiver));
const TNode<Smi> index =
CAST(CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key));
CAST(CallBuiltin(Builtin::kWeakMapLookupHashIndex, context, table, key));
GotoIf(TaggedEqual(index, SmiConstant(-1)), &return_undefined);
@ -2766,7 +2766,7 @@ TF_BUILTIN(WeakMapPrototypeHas, WeakCollectionsBuiltinsAssembler) {
const TNode<EphemeronHashTable> table = LoadTable(CAST(receiver));
const TNode<Object> index =
CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key);
CallBuiltin(Builtin::kWeakMapLookupHashIndex, context, table, key);
GotoIf(TaggedEqual(index, SmiConstant(-1)), &return_false);
@ -2866,7 +2866,7 @@ TF_BUILTIN(WeakMapPrototypeDelete, CodeStubAssembler) {
ThrowIfNotInstanceType(context, receiver, JS_WEAK_MAP_TYPE,
"WeakMap.prototype.delete");
Return(CallBuiltin(Builtins::kWeakCollectionDelete, context, receiver, key));
Return(CallBuiltin(Builtin::kWeakCollectionDelete, context, receiver, key));
}
TF_BUILTIN(WeakMapPrototypeSet, WeakCollectionsBuiltinsAssembler) {
@ -2882,7 +2882,7 @@ TF_BUILTIN(WeakMapPrototypeSet, WeakCollectionsBuiltinsAssembler) {
GotoIfNotJSReceiver(key, &throw_invalid_key);
Return(
CallBuiltin(Builtins::kWeakCollectionSet, context, receiver, key, value));
CallBuiltin(Builtin::kWeakCollectionSet, context, receiver, key, value));
BIND(&throw_invalid_key);
ThrowTypeError(context, MessageTemplate::kInvalidWeakMapKey, key);
@ -2899,7 +2899,7 @@ TF_BUILTIN(WeakSetPrototypeAdd, WeakCollectionsBuiltinsAssembler) {
Label throw_invalid_value(this);
GotoIfNotJSReceiver(value, &throw_invalid_value);
Return(CallBuiltin(Builtins::kWeakCollectionSet, context, receiver, value,
Return(CallBuiltin(Builtin::kWeakCollectionSet, context, receiver, value,
TrueConstant()));
BIND(&throw_invalid_value);
@ -2914,8 +2914,7 @@ TF_BUILTIN(WeakSetPrototypeDelete, CodeStubAssembler) {
ThrowIfNotInstanceType(context, receiver, JS_WEAK_SET_TYPE,
"WeakSet.prototype.delete");
Return(
CallBuiltin(Builtins::kWeakCollectionDelete, context, receiver, value));
Return(CallBuiltin(Builtin::kWeakCollectionDelete, context, receiver, value));
}
TF_BUILTIN(WeakSetPrototypeHas, WeakCollectionsBuiltinsAssembler) {
@ -2930,7 +2929,7 @@ TF_BUILTIN(WeakSetPrototypeHas, WeakCollectionsBuiltinsAssembler) {
const TNode<EphemeronHashTable> table = LoadTable(CAST(receiver));
const TNode<Object> index =
CallBuiltin(Builtins::kWeakMapLookupHashIndex, context, table, key);
CallBuiltin(Builtin::kWeakMapLookupHashIndex, context, table, key);
GotoIf(TaggedEqual(index, SmiConstant(-1)), &return_false);

View File

@ -125,7 +125,7 @@ BUILTIN(ConsoleTimeStamp) {
namespace {
void InstallContextFunction(Isolate* isolate, Handle<JSObject> target,
const char* name, Builtins::Name builtin_id,
const char* name, Builtin builtin_id,
int context_id, Handle<Object> context_name) {
Factory* const factory = isolate->factory();
@ -163,7 +163,7 @@ BUILTIN(ConsoleContext) {
Factory* const factory = isolate->factory();
Handle<String> name = factory->InternalizeUtf8String("Context");
Handle<SharedFunctionInfo> info =
factory->NewSharedFunctionInfoForBuiltin(name, Builtins::kIllegal);
factory->NewSharedFunctionInfoForBuiltin(name, Builtin::kIllegal);
info->set_language_mode(LanguageMode::kSloppy);
Handle<JSFunction> cons =
@ -178,17 +178,17 @@ BUILTIN(ConsoleContext) {
int id = isolate->last_console_context_id() + 1;
isolate->set_last_console_context_id(id);
#define CONSOLE_BUILTIN_SETUP(call, name) \
InstallContextFunction(isolate, context, #name, Builtins::kConsole##call, \
id, args.at(1));
#define CONSOLE_BUILTIN_SETUP(call, name) \
InstallContextFunction(isolate, context, #name, Builtin::kConsole##call, id, \
args.at(1));
CONSOLE_METHOD_LIST(CONSOLE_BUILTIN_SETUP)
#undef CONSOLE_BUILTIN_SETUP
InstallContextFunction(isolate, context, "time", Builtins::kConsoleTime, id,
InstallContextFunction(isolate, context, "time", Builtin::kConsoleTime, id,
args.at(1));
InstallContextFunction(isolate, context, "timeEnd", Builtins::kConsoleTimeEnd,
InstallContextFunction(isolate, context, "timeEnd", Builtin::kConsoleTimeEnd,
id, args.at(1));
InstallContextFunction(isolate, context, "timeStamp",
Builtins::kConsoleTimeStamp, id, args.at(1));
Builtin::kConsoleTimeStamp, id, args.at(1));
return *context;
}

View File

@ -76,11 +76,10 @@ void CallOrConstructBuiltinsAssembler::BuildConstruct(
&if_construct_array, &allocation_site);
BIND(&if_construct_generic);
TailCallBuiltin(Builtins::kConstruct, eager_context, target, new_target,
argc);
TailCallBuiltin(Builtin::kConstruct, eager_context, target, new_target, argc);
BIND(&if_construct_array);
TailCallBuiltin(Builtins::kArrayConstructorImpl, eager_context, target,
TailCallBuiltin(Builtin::kArrayConstructorImpl, eager_context, target,
new_target, argc, allocation_site.value());
}
@ -256,7 +255,7 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
shared_function_info);
StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
Handle<Code> lazy_builtin_handle =
isolate()->builtins()->builtin_handle(Builtins::kCompileLazy);
isolate()->builtins()->builtin_handle(Builtin::kCompileLazy);
TNode<Code> lazy_builtin = HeapConstant(lazy_builtin_handle);
StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
Return(result);

View File

@ -208,15 +208,15 @@ TF_BUILTIN(DatePrototypeToPrimitive, CodeStubAssembler) {
GotoIf(TaggedIsSmi(hint), &hint_is_invalid);
GotoIfNot(IsString(CAST(hint)), &hint_is_invalid);
GotoIf(TaggedEqual(
CallBuiltin(Builtins::kStringEqual, context, hint, number_string),
CallBuiltin(Builtin::kStringEqual, context, hint, number_string),
TrueConstant()),
&hint_is_number);
GotoIf(TaggedEqual(
CallBuiltin(Builtins::kStringEqual, context, hint, default_string),
CallBuiltin(Builtin::kStringEqual, context, hint, default_string),
TrueConstant()),
&hint_is_string);
GotoIf(TaggedEqual(
CallBuiltin(Builtins::kStringEqual, context, hint, string_string),
CallBuiltin(Builtin::kStringEqual, context, hint, string_string),
TrueConstant()),
&hint_is_string);
Goto(&hint_is_invalid);

View File

@ -87,7 +87,7 @@ void GeneratorBuiltinsAssembler::InnerResume(
StoreObjectFieldNoWriteBarrier(
receiver, JSGeneratorObject::kContinuationOffset, closed);
// Return the wrapped result.
args->PopAndReturn(CallBuiltin(Builtins::kCreateIterResultObject, context,
args->PopAndReturn(CallBuiltin(Builtin::kCreateIterResultObject, context,
result, TrueConstant()));
}
@ -97,11 +97,11 @@ void GeneratorBuiltinsAssembler::InnerResume(
TNode<Object> result;
switch (resume_mode) {
case JSGeneratorObject::kNext:
result = CallBuiltin(Builtins::kCreateIterResultObject, context,
result = CallBuiltin(Builtin::kCreateIterResultObject, context,
UndefinedConstant(), TrueConstant());
break;
case JSGeneratorObject::kReturn:
result = CallBuiltin(Builtins::kCreateIterResultObject, context, value,
result = CallBuiltin(Builtin::kCreateIterResultObject, context, value,
TrueConstant());
break;
case JSGeneratorObject::kThrow:

View File

@ -46,7 +46,7 @@ TF_BUILTIN(GlobalIsFinite, CodeStubAssembler) {
{
// Need to convert {num_heap_object} to a Number first.
var_num =
CallBuiltin(Builtins::kNonNumberToNumber, context, num_heap_object);
CallBuiltin(Builtin::kNonNumberToNumber, context, num_heap_object);
Goto(&loop);
}
}
@ -94,7 +94,7 @@ TF_BUILTIN(GlobalIsNaN, CodeStubAssembler) {
{
// Need to convert {num_heap_object} to a Number first.
var_num =
CallBuiltin(Builtins::kNonNumberToNumber, context, num_heap_object);
CallBuiltin(Builtin::kNonNumberToNumber, context, num_heap_object);
Goto(&loop);
}
}

View File

@ -611,9 +611,9 @@ TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
BIND(&if_proxy);
{
TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
GotoIf(IsPrivateSymbol(name), &slow);
TailCallBuiltin(Builtins::kProxyDeleteProperty, context, receiver, name,
TailCallBuiltin(Builtin::kProxyDeleteProperty, context, receiver, name,
language_mode);
}
@ -678,7 +678,7 @@ class SetOrCopyDataPropertiesAssembler : public CodeStubAssembler {
ForEachEnumerableOwnProperty(
context, source_map, CAST(source), kEnumerationOrder,
[=](TNode<Name> key, TNode<Object> value) {
CallBuiltin(Builtins::kSetPropertyInLiteral, context, target, key,
CallBuiltin(Builtin::kSetPropertyInLiteral, context, target, key,
value);
},
if_runtime);
@ -1054,12 +1054,12 @@ TF_BUILTIN(GetProperty, CodeStubAssembler) {
BIND(&if_proxy);
{
// Convert the {key} to a Name first.
TNode<Object> name = CallBuiltin(Builtins::kToName, context, key);
TNode<Object> name = CallBuiltin(Builtin::kToName, context, key);
// The {object} is a JSProxy instance, look up the {name} on it, passing
// {object} both as receiver and holder. If {name} is absent we can safely
// return undefined from here.
TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name, object,
TailCallBuiltin(Builtin::kProxyGetProperty, context, object, name, object,
SmiConstant(OnNonExistent::kReturnUndefined));
}
}
@ -1119,7 +1119,7 @@ TF_BUILTIN(GetPropertyWithReceiver, CodeStubAssembler) {
BIND(&if_proxy);
{
// Convert the {key} to a Name first.
TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
// Proxy cannot handle private symbol so bailout.
GotoIf(IsPrivateSymbol(name), &if_slow);
@ -1127,8 +1127,8 @@ TF_BUILTIN(GetPropertyWithReceiver, CodeStubAssembler) {
// The {object} is a JSProxy instance, look up the {name} on it, passing
// {object} both as receiver and holder. If {name} is absent we can safely
// return undefined from here.
TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name,
receiver, on_non_existent);
TailCallBuiltin(Builtin::kProxyGetProperty, context, object, name, receiver,
on_non_existent);
}
}

View File

@ -142,7 +142,7 @@ TF_BUILTIN(StringPrototypeToLowerCaseIntl, IntlBuiltinsAssembler) {
TNode<String> string =
ToThisString(context, maybe_string, "String.prototype.toLowerCase");
Return(CallBuiltin(Builtins::kStringToLowerCaseIntl, context, string));
Return(CallBuiltin(Builtin::kStringToLowerCaseIntl, context, string));
}
void IntlBuiltinsAssembler::ListFormatCommon(TNode<Context> context,
@ -165,7 +165,7 @@ void IntlBuiltinsAssembler::ListFormatCommon(TNode<Context> context,
{
// 4. Let stringList be ? StringListFromIterable(list).
TNode<Object> string_list =
CallBuiltin(Builtins::kStringListFromIterable, context, list);
CallBuiltin(Builtin::kStringListFromIterable, context, list);
// 6. Return ? FormatList(lf, stringList).
args.PopAndReturn(

View File

@ -223,7 +223,7 @@ namespace {
Handle<JSFunction> CreateBoundFunction(Isolate* isolate,
Handle<JSObject> object,
Builtins::Name builtin_id, int len) {
Builtin builtin_id, int len) {
Handle<NativeContext> native_context(isolate->context().native_context(),
isolate);
Handle<Context> context = isolate->factory()->NewBuiltinContext(
@ -469,7 +469,7 @@ BUILTIN(NumberFormatPrototypeFormatNumber) {
}
Handle<JSFunction> new_bound_format_function = CreateBoundFunction(
isolate, number_format, Builtins::kNumberFormatInternalFormatNumber, 1);
isolate, number_format, Builtin::kNumberFormatInternalFormatNumber, 1);
// 4. c. Set nf.[[BoundFormat]] to F.
number_format->set_bound_format(*new_bound_format_function);
@ -541,7 +541,7 @@ BUILTIN(DateTimeFormatPrototypeFormat) {
}
Handle<JSFunction> new_bound_format_function = CreateBoundFunction(
isolate, format, Builtins::kDateTimeFormatInternalFormat, 1);
isolate, format, Builtin::kDateTimeFormatInternalFormat, 1);
// 4.c. Set dtf.[[BoundFormat]] to F.
format->set_bound_format(*new_bound_format_function);
@ -964,7 +964,7 @@ BUILTIN(CollatorPrototypeCompare) {
}
Handle<JSFunction> new_bound_compare_function = CreateBoundFunction(
isolate, collator, Builtins::kCollatorInternalCompare, 2);
isolate, collator, Builtin::kCollatorInternalCompare, 2);
// 4.c. Set collator.[[BoundCompare]] to F.
collator->set_bound_compare(*new_bound_compare_function);
@ -1113,7 +1113,7 @@ BUILTIN(V8BreakIteratorPrototypeAdoptText) {
}
Handle<JSFunction> new_bound_adopt_text_function = CreateBoundFunction(
isolate, break_iterator, Builtins::kV8BreakIteratorInternalAdoptText, 1);
isolate, break_iterator, Builtin::kV8BreakIteratorInternalAdoptText, 1);
break_iterator->set_bound_adopt_text(*new_bound_adopt_text_function);
return *new_bound_adopt_text_function;
}
@ -1149,7 +1149,7 @@ BUILTIN(V8BreakIteratorPrototypeFirst) {
}
Handle<JSFunction> new_bound_first_function = CreateBoundFunction(
isolate, break_iterator, Builtins::kV8BreakIteratorInternalFirst, 0);
isolate, break_iterator, Builtin::kV8BreakIteratorInternalFirst, 0);
break_iterator->set_bound_first(*new_bound_first_function);
return *new_bound_first_function;
}
@ -1179,7 +1179,7 @@ BUILTIN(V8BreakIteratorPrototypeNext) {
}
Handle<JSFunction> new_bound_next_function = CreateBoundFunction(
isolate, break_iterator, Builtins::kV8BreakIteratorInternalNext, 0);
isolate, break_iterator, Builtin::kV8BreakIteratorInternalNext, 0);
break_iterator->set_bound_next(*new_bound_next_function);
return *new_bound_next_function;
}
@ -1208,7 +1208,7 @@ BUILTIN(V8BreakIteratorPrototypeCurrent) {
}
Handle<JSFunction> new_bound_current_function = CreateBoundFunction(
isolate, break_iterator, Builtins::kV8BreakIteratorInternalCurrent, 0);
isolate, break_iterator, Builtin::kV8BreakIteratorInternalCurrent, 0);
break_iterator->set_bound_current(*new_bound_current_function);
return *new_bound_current_function;
}
@ -1237,7 +1237,7 @@ BUILTIN(V8BreakIteratorPrototypeBreakType) {
}
Handle<JSFunction> new_bound_break_type_function = CreateBoundFunction(
isolate, break_iterator, Builtins::kV8BreakIteratorInternalBreakType, 0);
isolate, break_iterator, Builtin::kV8BreakIteratorInternalBreakType, 0);
break_iterator->set_bound_break_type(*new_bound_break_type_function);
return *new_bound_break_type_function;
}

View File

@ -305,10 +305,10 @@ TF_BUILTIN(IterableToListMayPreserveHoles, IteratorBuiltinsAssembler) {
GotoIfNot(IsFastJSArrayWithNoCustomIteration(context, iterable), &slow_path);
// The fast path will copy holes to the new array.
TailCallBuiltin(Builtins::kCloneFastJSArray, context, iterable);
TailCallBuiltin(Builtin::kCloneFastJSArray, context, iterable);
BIND(&slow_path);
TailCallBuiltin(Builtins::kIterableToList, context, iterable, iterator_fn);
TailCallBuiltin(Builtin::kIterableToList, context, iterable, iterator_fn);
}
void IteratorBuiltinsAssembler::FastIterableToList(
@ -323,7 +323,7 @@ void IteratorBuiltinsAssembler::FastIterableToList(
// Fast path for fast JSArray.
*var_result = CAST(
CallBuiltin(Builtins::kCloneFastJSArrayFillingHoles, context, iterable));
CallBuiltin(Builtin::kCloneFastJSArrayFillingHoles, context, iterable));
Goto(&done);
BIND(&check_string);
@ -339,7 +339,7 @@ void IteratorBuiltinsAssembler::FastIterableToList(
GotoIf(
IntPtrGreaterThan(length, IntPtrConstant(JSArray::kMaxFastArrayLength)),
slow);
*var_result = CAST(CallBuiltin(Builtins::kStringToList, context, iterable));
*var_result = CAST(CallBuiltin(Builtin::kStringToList, context, iterable));
Goto(&done);
}
@ -351,7 +351,7 @@ void IteratorBuiltinsAssembler::FastIterableToList(
BIND(&map_fast_call);
*var_result =
CAST(CallBuiltin(Builtins::kMapIteratorToList, context, iterable));
CAST(CallBuiltin(Builtin::kMapIteratorToList, context, iterable));
Goto(&done);
}
@ -363,7 +363,7 @@ void IteratorBuiltinsAssembler::FastIterableToList(
BIND(&set_fast_call);
*var_result =
CAST(CallBuiltin(Builtins::kSetOrSetIteratorToList, context, iterable));
CAST(CallBuiltin(Builtin::kSetOrSetIteratorToList, context, iterable));
Goto(&done);
}
@ -403,7 +403,7 @@ TF_BUILTIN(IterableToListWithSymbolLookup, IteratorBuiltinsAssembler) {
BIND(&slow_path);
{
TNode<Object> iterator_fn = GetIteratorMethod(context, iterable);
TailCallBuiltin(Builtins::kIterableToList, context, iterable, iterator_fn);
TailCallBuiltin(Builtin::kIterableToList, context, iterable, iterator_fn);
}
}
@ -418,7 +418,7 @@ TF_BUILTIN(GetIteratorWithFeedbackLazyDeoptContinuation,
// Note, that the builtin also expects the call_slot as a Smi.
TNode<Object> result =
CallBuiltin(Builtins::kCallIteratorWithFeedback, context, receiver,
CallBuiltin(Builtin::kCallIteratorWithFeedback, context, receiver,
iterator_method, call_slot_smi, feedback);
Return(result);
}
@ -431,7 +431,7 @@ TF_BUILTIN(IterableToFixedArrayWithSymbolLookupSlow,
auto iterable = Parameter<Object>(Descriptor::kIterable);
TNode<Object> iterator_fn = GetIteratorMethod(context, iterable);
TailCallBuiltin(Builtins::kIterableToFixedArray, context, iterable,
TailCallBuiltin(Builtin::kIterableToFixedArray, context, iterable,
iterator_fn);
}

View File

@ -207,7 +207,7 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask(
{
ScopedExceptionHandler handler(this, &if_exception, &var_exception);
CallBuiltin(Builtins::kPromiseResolveThenableJob, native_context,
CallBuiltin(Builtin::kPromiseResolveThenableJob, native_context,
promise_to_resolve, thenable, then);
}
@ -251,7 +251,7 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask(
{
ScopedExceptionHandler handler(this, &if_exception, &var_exception);
CallBuiltin(Builtins::kPromiseFulfillReactionJob, microtask_context,
CallBuiltin(Builtin::kPromiseFulfillReactionJob, microtask_context,
argument, job_handler, promise_or_capability);
}
@ -304,7 +304,7 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask(
{
ScopedExceptionHandler handler(this, &if_exception, &var_exception);
CallBuiltin(Builtins::kPromiseRejectReactionJob, microtask_context,
CallBuiltin(Builtin::kPromiseRejectReactionJob, microtask_context,
argument, job_handler, promise_or_capability);
}

View File

@ -443,7 +443,7 @@ TF_BUILTIN(ObjectAssign, ObjectBuiltinsAssembler) {
// 4. For each element nextSource of sources, in ascending index order,
args.ForEach(
[=](TNode<Object> next_source) {
CallBuiltin(Builtins::kSetDataProperties, context, to, next_source);
CallBuiltin(Builtin::kSetDataProperties, context, to, next_source);
},
IntPtrConstant(1));
Goto(&done);
@ -569,7 +569,7 @@ TF_BUILTIN(ObjectHasOwn, ObjectBuiltinsAssembler) {
ThrowTypeError(context, MessageTemplate::kUndefinedOrNullToObject);
BIND(&not_undefined_nor_null);
Return(CallBuiltin(Builtins::kObjectPrototypeHasOwnProperty, context, target,
Return(CallBuiltin(Builtin::kObjectPrototypeHasOwnProperty, context, target,
new_target, Int32Constant(2), object, key));
}
@ -1303,7 +1303,7 @@ TF_BUILTIN(ObjectGetOwnPropertyDescriptor, ObjectBuiltinsAssembler) {
TNode<JSReceiver> object = ToObject_Inline(context, object_input);
// 2. Let key be ? ToPropertyKey(P).
key = CallBuiltin(Builtins::kToName, context, key);
key = CallBuiltin(Builtin::kToName, context, key);
// 3. Let desc be ? obj.[[GetOwnProperty]](key).
Label if_keyisindex(this), if_iskeyunique(this),

View File

@ -53,7 +53,7 @@ TNode<RawPtrT> RegExpBuiltinsAssembler::LoadCodeObjectEntry(TNode<Code> code) {
TNode<Int32T> builtin_index =
LoadObjectField<Int32T>(code, Code::kBuiltinIndexOffset);
{
GotoIfNot(Word32Equal(builtin_index, Int32Constant(Builtins::kNoBuiltinId)),
GotoIfNot(Word32Equal(builtin_index, Int32Constant(Builtin::kNoBuiltinId)),
&if_code_is_off_heap);
var_result = ReinterpretCast<RawPtrT>(
IntPtrAdd(BitcastTaggedToWord(code),
@ -226,7 +226,7 @@ TNode<JSRegExpResult> RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
// to avoid an unnecessary write barrier storing the first result.
TNode<String> first =
CAST(CallBuiltin(Builtins::kSubString, context, string, start, end));
CAST(CallBuiltin(Builtin::kSubString, context, string, start, end));
// Load flags and check if the result object needs to have indices.
const TNode<Smi> flags =
@ -269,7 +269,7 @@ TNode<JSRegExpResult> RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo(
CAST(UnsafeLoadFixedArrayElement(match_info, from_cursor_plus1));
TNode<String> capture =
CAST(CallBuiltin(Builtins::kSubString, context, string, start, end));
CAST(CallBuiltin(Builtin::kSubString, context, string, start, end));
UnsafeStoreFixedArrayElement(result_elements, to_cursor, capture);
Goto(&next_iter);
@ -765,7 +765,7 @@ TNode<HeapObject> RegExpBuiltinsAssembler::RegExpExecInternal(
{
// TODO(jgruber): A call with 4 args stresses register allocation, this
// should probably just be inlined.
var_result = CAST(CallBuiltin(Builtins::kRegExpExecAtom, context, regexp,
var_result = CAST(CallBuiltin(Builtin::kRegExpExecAtom, context, regexp,
string, last_index, match_info));
Goto(&out);
}
@ -961,7 +961,7 @@ TF_BUILTIN(RegExpExecAtom, RegExpBuiltinsAssembler) {
IntPtrConstant(0)));
const TNode<Smi> match_from =
CAST(CallBuiltin(Builtins::kStringIndexOf, context, subject_string,
CAST(CallBuiltin(Builtin::kStringIndexOf, context, subject_string,
needle_string, last_index));
Label if_failure(this), if_success(this);
@ -1588,7 +1588,7 @@ TNode<JSArray> RegExpBuiltinsAssembler::RegExpPrototypeSplitBody(
native_context, Context::REGEXP_LAST_MATCH_INFO_INDEX);
const TNode<Object> match_indices =
CallBuiltin(Builtins::kRegExpExecInternal, context, regexp, string,
CallBuiltin(Builtin::kRegExpExecInternal, context, regexp, string,
SmiZero(), last_match_info);
Label return_singleton_array(this);
@ -1682,7 +1682,7 @@ TNode<JSArray> RegExpBuiltinsAssembler::RegExpPrototypeSplitBody(
{
const TNode<Smi> from = last_matched_until;
const TNode<Smi> to = match_from;
array.Push(CallBuiltin(Builtins::kSubString, context, string, from, to));
array.Push(CallBuiltin(Builtin::kSubString, context, string, from, to));
GotoIf(WordEqual(array.length(), int_limit), &out);
}
@ -1718,7 +1718,7 @@ TNode<JSArray> RegExpBuiltinsAssembler::RegExpPrototypeSplitBody(
BIND(&select_capture);
{
var_value =
CallBuiltin(Builtins::kSubString, context, string, from, to);
CallBuiltin(Builtin::kSubString, context, string, from, to);
Goto(&store_value);
}
@ -1753,7 +1753,7 @@ TNode<JSArray> RegExpBuiltinsAssembler::RegExpPrototypeSplitBody(
{
const TNode<Smi> from = var_last_matched_until.value();
const TNode<Smi> to = string_length;
array.Push(CallBuiltin(Builtins::kSubString, context, string, from, to));
array.Push(CallBuiltin(Builtin::kSubString, context, string, from, to));
Goto(&out);
}

View File

@ -955,9 +955,8 @@ const TNode<Smi> StringBuiltinsAssembler::IndexOfDollarChar(
const TNode<Context> context, const TNode<String> string) {
const TNode<String> dollar_string = HeapConstant(
isolate()->factory()->LookupSingleCharacterStringFromCode('$'));
const TNode<Smi> dollar_ix =
CAST(CallBuiltin(Builtins::kStringIndexOf, context, string, dollar_string,
SmiConstant(0)));
const TNode<Smi> dollar_ix = CAST(CallBuiltin(
Builtin::kStringIndexOf, context, string, dollar_string, SmiConstant(0)));
return dollar_ix;
}
@ -987,7 +986,7 @@ TNode<String> StringBuiltinsAssembler::GetSubstitution(
CSA_ASSERT(this, TaggedIsPositiveSmi(dollar_index));
const TNode<Object> matched =
CallBuiltin(Builtins::kStringSubstring, context, subject_string,
CallBuiltin(Builtin::kStringSubstring, context, subject_string,
SmiUntag(match_start_index), SmiUntag(match_end_index));
const TNode<String> replacement_string = CAST(
CallRuntime(Runtime::kGetSubstitution, context, matched, subject_string,
@ -1022,7 +1021,7 @@ TF_BUILTIN(StringPrototypeReplace, StringBuiltinsAssembler) {
RootIndex::kreplace_symbol,
Context::REGEXP_REPLACE_FUNCTION_INDEX},
[=]() {
Return(CallBuiltin(Builtins::kRegExpReplace, context, search, receiver,
Return(CallBuiltin(Builtin::kRegExpReplace, context, search, receiver,
replace));
},
[=](TNode<Object> fn) {
@ -1071,7 +1070,7 @@ TF_BUILTIN(StringPrototypeReplace, StringBuiltinsAssembler) {
// (2-byte).
const TNode<Smi> match_start_index =
CAST(CallBuiltin(Builtins::kStringIndexOf, context, subject_string,
CAST(CallBuiltin(Builtin::kStringIndexOf, context, subject_string,
search_string, smi_zero));
// Early exit if no match found.
@ -1109,7 +1108,7 @@ TF_BUILTIN(StringPrototypeReplace, StringBuiltinsAssembler) {
GotoIf(SmiEqual(match_start_index, smi_zero), &next);
const TNode<String> prefix =
CAST(CallBuiltin(Builtins::kStringSubstring, context, subject_string,
CAST(CallBuiltin(Builtin::kStringSubstring, context, subject_string,
IntPtrConstant(0), SmiUntag(match_start_index)));
var_result = prefix;
@ -1131,7 +1130,7 @@ TF_BUILTIN(StringPrototypeReplace, StringBuiltinsAssembler) {
match_start_index, subject_string);
const TNode<String> replacement_string =
ToString_Inline(context, replacement);
var_result = CAST(CallBuiltin(Builtins::kStringAdd_CheckNone, context,
var_result = CAST(CallBuiltin(Builtin::kStringAdd_CheckNone, context,
var_result.value(), replacement_string));
Goto(&out);
}
@ -1142,7 +1141,7 @@ TF_BUILTIN(StringPrototypeReplace, StringBuiltinsAssembler) {
const TNode<Object> replacement =
GetSubstitution(context, subject_string, match_start_index,
match_end_index, replace_string);
var_result = CAST(CallBuiltin(Builtins::kStringAdd_CheckNone, context,
var_result = CAST(CallBuiltin(Builtin::kStringAdd_CheckNone, context,
var_result.value(), replacement));
Goto(&out);
}
@ -1150,10 +1149,10 @@ TF_BUILTIN(StringPrototypeReplace, StringBuiltinsAssembler) {
BIND(&out);
{
const TNode<Object> suffix =
CallBuiltin(Builtins::kStringSubstring, context, subject_string,
CallBuiltin(Builtin::kStringSubstring, context, subject_string,
SmiUntag(match_end_index), subject_length);
const TNode<Object> result = CallBuiltin(
Builtins::kStringAdd_CheckNone, context, var_result.value(), suffix);
Builtin::kStringAdd_CheckNone, context, var_result.value(), suffix);
Return(result);
}
}
@ -1208,7 +1207,7 @@ TF_BUILTIN(StringPrototypeMatchAll, StringBuiltinsAssembler) {
TNode<String> flags_string = ToString_Inline(context, flags);
TNode<String> global_char_string = StringConstant("g");
TNode<Smi> global_ix =
CAST(CallBuiltin(Builtins::kStringIndexOf, context, flags_string,
CAST(CallBuiltin(Builtin::kStringIndexOf, context, flags_string,
global_char_string, SmiConstant(0)));
Branch(SmiEqual(global_ix, SmiConstant(-1)), &throw_exception, &next);
}
@ -1364,8 +1363,8 @@ TF_BUILTIN(StringPrototypeSplit, StringBuiltinsAssembler) {
RootIndex::ksplit_symbol,
Context::REGEXP_SPLIT_FUNCTION_INDEX},
[&]() {
args.PopAndReturn(CallBuiltin(Builtins::kRegExpSplit, context,
separator, receiver, limit));
args.PopAndReturn(CallBuiltin(Builtin::kRegExpSplit, context, separator,
receiver, limit));
},
[&](TNode<Object> fn) {
args.PopAndReturn(Call(context, fn, separator, receiver, limit));

View File

@ -123,7 +123,7 @@ BUILTIN(IsTraceCategoryEnabled) {
return isolate->heap()->ToBoolean(enabled);
}
// Builtins::kTrace(phase, category, name, id, data) : bool
// Builtin::kTrace(phase, category, name, id, data) : bool
BUILTIN(Trace) {
HandleScope handle_scope(isolate);

View File

@ -102,7 +102,7 @@ TF_BUILTIN(TypedArrayConstructor, TypedArrayBuiltinsAssembler) {
Label throwtypeerror(this, Label::kDeferred);
GotoIf(IsUndefined(new_target), &throwtypeerror);
TNode<Object> result = CallBuiltin(Builtins::kCreateTypedArray, context,
TNode<Object> result = CallBuiltin(Builtin::kCreateTypedArray, context,
target, new_target, arg1, arg2, arg3);
args.PopAndReturn(result);

View File

@ -51,7 +51,7 @@ class CodeAssemblerState;
void Builtins::Generate_##Name(compiler::CodeAssemblerState* state) { \
Name##Assembler assembler(state); \
state->SetInitialDebugInformation(#Name, __FILE__, __LINE__); \
if (Builtins::KindOf(Builtins::k##Name) == Builtins::TFJ) { \
if (Builtins::KindOf(Builtin::k##Name) == Builtins::TFJ) { \
assembler.PerformStackCheck(assembler.GetJSContextParameter()); \
} \
assembler.Generate##Name##Impl(); \

View File

@ -88,30 +88,30 @@ const BuiltinMetadata builtin_metadata[] = {BUILTIN_LIST(
} // namespace
BytecodeOffset Builtins::GetContinuationBytecodeOffset(Name name) {
DCHECK(Builtins::KindOf(name) == TFJ || Builtins::KindOf(name) == TFC ||
Builtins::KindOf(name) == TFS);
return BytecodeOffset(BytecodeOffset::kFirstBuiltinContinuationId + name);
BytecodeOffset Builtins::GetContinuationBytecodeOffset(Builtin builtin) {
DCHECK(Builtins::KindOf(builtin) == TFJ || Builtins::KindOf(builtin) == TFC ||
Builtins::KindOf(builtin) == TFS);
return BytecodeOffset(BytecodeOffset::kFirstBuiltinContinuationId + builtin);
}
Builtins::Name Builtins::GetBuiltinFromBytecodeOffset(BytecodeOffset id) {
Builtin Builtins::GetBuiltinFromBytecodeOffset(BytecodeOffset id) {
int builtin_index = id.ToInt() - BytecodeOffset::kFirstBuiltinContinuationId;
DCHECK(Builtins::KindOf(builtin_index) == TFJ ||
Builtins::KindOf(builtin_index) == TFC ||
Builtins::KindOf(builtin_index) == TFS);
return static_cast<Name>(builtin_index);
return static_cast<Builtin>(builtin_index);
}
void Builtins::TearDown() { initialized_ = false; }
const char* Builtins::Lookup(Address pc) {
// Off-heap pc's can be looked up through binary search.
Builtins::Name builtin = InstructionStream::TryLookupCode(isolate_, pc);
Builtin builtin = InstructionStream::TryLookupCode(isolate_, pc);
if (Builtins::IsBuiltinId(builtin)) return name(builtin);
// May be called during initialization (disassembler).
if (initialized_) {
for (int i = 0; i < builtin_count; i++) {
for (int i = 0; i < kBuiltinCount; i++) {
if (isolate_->heap()->builtin(i).contains(isolate_, pc)) return name(i);
}
}
@ -177,19 +177,19 @@ Handle<Code> Builtins::builtin_handle(int index) {
}
// static
int Builtins::GetStackParameterCount(Name name) {
DCHECK(Builtins::KindOf(name) == TFJ);
return builtin_metadata[name].data.parameter_count;
int Builtins::GetStackParameterCount(Builtin builtin) {
DCHECK(Builtins::KindOf(builtin) == TFJ);
return builtin_metadata[static_cast<int>(builtin)].data.parameter_count;
}
// static
CallInterfaceDescriptor Builtins::CallInterfaceDescriptorFor(Name name) {
CallInterfaceDescriptor Builtins::CallInterfaceDescriptorFor(Builtin builtin) {
CallDescriptors::Key key;
switch (name) {
switch (builtin) {
// This macro is deliberately crafted so as to emit very little code,
// in order to keep binary size of this function under control.
#define CASE_OTHER(Name, ...) \
case k##Name: { \
case Builtin::k##Name: { \
key = Builtin_##Name##_InterfaceDescriptor::key(); \
break; \
}
@ -197,7 +197,7 @@ CallInterfaceDescriptor Builtins::CallInterfaceDescriptorFor(Name name) {
CASE_OTHER, IGNORE_BUILTIN, CASE_OTHER)
#undef CASE_OTHER
default:
Builtins::Kind kind = Builtins::KindOf(name);
Builtins::Kind kind = Builtins::KindOf(builtin);
DCHECK_NE(BCH, kind);
if (kind == TFJ || kind == CPP) {
return JSTrampolineDescriptor{};
@ -208,16 +208,16 @@ CallInterfaceDescriptor Builtins::CallInterfaceDescriptorFor(Name name) {
}
// static
Callable Builtins::CallableFor(Isolate* isolate, Name name) {
Handle<Code> code = isolate->builtins()->builtin_handle(name);
return Callable{code, CallInterfaceDescriptorFor(name)};
Callable Builtins::CallableFor(Isolate* isolate, Builtin builtin) {
Handle<Code> code = isolate->builtins()->builtin_handle(builtin);
return Callable{code, CallInterfaceDescriptorFor(builtin)};
}
// static
bool Builtins::HasJSLinkage(int builtin_index) {
Name name = static_cast<Name>(builtin_index);
DCHECK_NE(BCH, Builtins::KindOf(name));
return CallInterfaceDescriptorFor(name) == JSTrampolineDescriptor{};
Builtin index = static_cast<Builtin>(builtin_index);
DCHECK_NE(BCH, Builtins::KindOf(index));
return CallInterfaceDescriptorFor(index) == JSTrampolineDescriptor{};
}
// static
@ -229,7 +229,7 @@ const char* Builtins::name(int index) {
void Builtins::PrintBuiltinCode() {
DCHECK(FLAG_print_builtin_code);
#ifdef ENABLE_DISASSEMBLER
for (int i = 0; i < builtin_count; i++) {
for (int i = 0; i < kBuiltinCount; i++) {
const char* builtin_name = name(i);
Handle<Code> code = builtin_handle(i);
if (PassesFilter(CStrVector(builtin_name),
@ -245,7 +245,7 @@ void Builtins::PrintBuiltinCode() {
void Builtins::PrintBuiltinSize() {
DCHECK(FLAG_print_builtin_size);
for (int i = 0; i < builtin_count; i++) {
for (int i = 0; i < kBuiltinCount; i++) {
const char* builtin_name = name(i);
const char* kind = KindNameOf(i);
Code code = builtin(i);
@ -270,7 +270,7 @@ bool Builtins::IsBuiltinHandle(Handle<HeapObject> maybe_code,
Heap* heap = isolate_->heap();
Address handle_location = maybe_code.address();
Address start = heap->builtin_address(0);
Address end = heap->builtin_address(Builtins::builtin_count);
Address end = heap->builtin_address(Builtins::kBuiltinCount);
if (handle_location >= end) return false;
if (handle_location < start) return false;
*index = static_cast<int>(handle_location - start) >> kSystemPointerSizeLog2;
@ -289,7 +289,7 @@ bool Builtins::IsIsolateIndependentBuiltin(const Code code) {
void Builtins::InitializeBuiltinEntryTable(Isolate* isolate) {
EmbeddedData d = EmbeddedData::FromBlob(isolate);
Address* builtin_entry_table = isolate->builtin_entry_table();
for (int i = 0; i < builtin_count; i++) {
for (int i = 0; i < kBuiltinCount; i++) {
// TODO(jgruber,chromium:1020986): Remove the CHECK once the linked issue is
// resolved.
CHECK(Builtins::IsBuiltinId(isolate->heap()->builtin(i).builtin_index()));
@ -314,8 +314,8 @@ void Builtins::EmitCodeCreateEvents(Isolate* isolate) {
Builtins::name(i)));
}
STATIC_ASSERT(kLastBytecodeHandlerPlusOne == builtin_count);
for (; i < builtin_count; i++) {
STATIC_ASSERT(kLastBytecodeHandlerPlusOne == kBuiltinCount);
for (; i < kBuiltinCount; i++) {
Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate);
interpreter::Bytecode bytecode =
builtin_metadata[i].data.bytecode_and_scale.bytecode;
@ -408,6 +408,10 @@ Handle<ByteArray> Builtins::GenerateOffHeapTrampolineRelocInfo(
return reloc_info;
}
Builtins::Kind Builtins::KindOf(Builtin builtin) {
return KindOf(static_cast<int>(builtin));
}
// static
Builtins::Kind Builtins::KindOf(int index) {
DCHECK(IsBuiltinId(index));
@ -473,25 +477,25 @@ bool Builtins::CodeObjectIsExecutable(int builtin_index) {
// currently cause problems if they're not executable. This list should be
// pared down as much as possible.
switch (builtin_index) {
case Builtins::kInterpreterEntryTrampoline:
case Builtins::kCompileLazy:
case Builtins::kCompileLazyDeoptimizedCode:
case Builtins::kCallFunction_ReceiverIsNullOrUndefined:
case Builtins::kCallFunction_ReceiverIsNotNullOrUndefined:
case Builtins::kCallFunction_ReceiverIsAny:
case Builtins::kCallBoundFunction:
case Builtins::kCall_ReceiverIsNullOrUndefined:
case Builtins::kCall_ReceiverIsNotNullOrUndefined:
case Builtins::kCall_ReceiverIsAny:
case Builtins::kHandleApiCall:
case Builtins::kInstantiateAsmJs:
case Builtin::kInterpreterEntryTrampoline:
case Builtin::kCompileLazy:
case Builtin::kCompileLazyDeoptimizedCode:
case Builtin::kCallFunction_ReceiverIsNullOrUndefined:
case Builtin::kCallFunction_ReceiverIsNotNullOrUndefined:
case Builtin::kCallFunction_ReceiverIsAny:
case Builtin::kCallBoundFunction:
case Builtin::kCall_ReceiverIsNullOrUndefined:
case Builtin::kCall_ReceiverIsNotNullOrUndefined:
case Builtin::kCall_ReceiverIsAny:
case Builtin::kHandleApiCall:
case Builtin::kInstantiateAsmJs:
#if V8_ENABLE_WEBASSEMBLY
case Builtins::kGenericJSToWasmWrapper:
case Builtin::kGenericJSToWasmWrapper:
#endif // V8_ENABLE_WEBASSEMBLY
// TODO(delphick): Remove this when calls to it have the trampoline inlined
// or are converted to use kCallBuiltinPointer.
case Builtins::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit:
case Builtin::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit:
return true;
default:
#if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
@ -505,12 +509,12 @@ bool Builtins::CodeObjectIsExecutable(int builtin_index) {
}
}
Builtins::Name ExampleBuiltinForTorqueFunctionPointerType(
Builtin ExampleBuiltinForTorqueFunctionPointerType(
size_t function_pointer_type_id) {
switch (function_pointer_type_id) {
#define FUNCTION_POINTER_ID_CASE(id, name) \
case id: \
return Builtins::k##name;
return Builtin::k##name;
TORQUE_FUNCTION_POINTER_TYPE_TO_BUILTIN_MAP(FUNCTION_POINTER_ID_CASE)
#undef FUNCTION_POINTER_ID_CASE
default:

View File

@ -34,7 +34,20 @@ static constexpr T FirstFromVarArgs(T x, ...) noexcept {
// Convenience macro to avoid generating named accessors for all builtins.
#define BUILTIN_CODE(isolate, name) \
(isolate)->builtins()->builtin_handle(Builtins::k##name)
(isolate)->builtins()->builtin_handle(Builtin::k##name)
enum Builtin : int32_t {
kNoBuiltinId = -1,
#define DEF_ENUM(Name, ...) k##Name,
BUILTIN_LIST(DEF_ENUM, DEF_ENUM, DEF_ENUM, DEF_ENUM, DEF_ENUM, DEF_ENUM,
DEF_ENUM)
#undef DEF_ENUM
#define EXTRACT_NAME(Name, ...) k##Name,
// Define kFirstBytecodeHandler,
kFirstBytecodeHandler =
FirstFromVarArgs(BUILTIN_LIST_BYTECODE_HANDLERS(EXTRACT_NAME) 0)
#undef EXTRACT_NAME
};
class Builtins {
public:
@ -48,82 +61,71 @@ class Builtins {
// Disassembler support.
const char* Lookup(Address pc);
enum Name : int32_t {
kNoBuiltinId = -1,
#define DEF_ENUM(Name, ...) k##Name,
BUILTIN_LIST(DEF_ENUM, DEF_ENUM, DEF_ENUM, DEF_ENUM, DEF_ENUM, DEF_ENUM,
DEF_ENUM)
#undef DEF_ENUM
builtin_count,
#define EXTRACT_NAME(Name, ...) k##Name,
// Define kFirstBytecodeHandler,
kFirstBytecodeHandler =
FirstFromVarArgs(BUILTIN_LIST_BYTECODE_HANDLERS(EXTRACT_NAME) 0)
#undef EXTRACT_NAME
};
#define ADD_ONE(Name, ...) +1
static constexpr int kBuiltinCount = 0 BUILTIN_LIST(
ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE, ADD_ONE);
#undef ADD_ONE
static constexpr int kFirstWideBytecodeHandler =
kFirstBytecodeHandler + kNumberOfBytecodeHandlers;
Builtin::kFirstBytecodeHandler + kNumberOfBytecodeHandlers;
static constexpr int kFirstExtraWideBytecodeHandler =
kFirstWideBytecodeHandler + kNumberOfWideBytecodeHandlers;
static constexpr int kLastBytecodeHandlerPlusOne =
kFirstExtraWideBytecodeHandler + kNumberOfWideBytecodeHandlers;
STATIC_ASSERT(kLastBytecodeHandlerPlusOne == builtin_count);
STATIC_ASSERT(kLastBytecodeHandlerPlusOne == kBuiltinCount);
static constexpr bool IsBuiltinId(int maybe_id) {
STATIC_ASSERT(kNoBuiltinId == -1);
return static_cast<uint32_t>(maybe_id) <
static_cast<uint32_t>(builtin_count);
static_cast<uint32_t>(kBuiltinCount);
}
// The different builtin kinds are documented in builtins-definitions.h.
enum Kind { CPP, TFJ, TFC, TFS, TFH, BCH, ASM };
static BytecodeOffset GetContinuationBytecodeOffset(Name name);
static Name GetBuiltinFromBytecodeOffset(BytecodeOffset);
static BytecodeOffset GetContinuationBytecodeOffset(Builtin builtin);
static Builtin GetBuiltinFromBytecodeOffset(BytecodeOffset);
static Name GetRecordWriteStub(RememberedSetAction remembered_set_action,
SaveFPRegsMode fp_mode) {
static constexpr Builtin GetRecordWriteStub(
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
switch (remembered_set_action) {
case RememberedSetAction::kEmit:
switch (fp_mode) {
case SaveFPRegsMode::kIgnore:
return Builtins::kRecordWriteEmitRememberedSetIgnoreFP;
return Builtin::kRecordWriteEmitRememberedSetIgnoreFP;
case SaveFPRegsMode::kSave:
return Builtins::kRecordWriteEmitRememberedSetSaveFP;
return Builtin::kRecordWriteEmitRememberedSetSaveFP;
}
case RememberedSetAction::kOmit:
switch (fp_mode) {
case SaveFPRegsMode::kIgnore:
return Builtins::kRecordWriteOmitRememberedSetIgnoreFP;
return Builtin::kRecordWriteOmitRememberedSetIgnoreFP;
case SaveFPRegsMode::kSave:
return Builtins::kRecordWriteOmitRememberedSetSaveFP;
return Builtin::kRecordWriteOmitRememberedSetSaveFP;
}
}
UNREACHABLE();
}
static constexpr Name GetEphemeronKeyBarrierStub(SaveFPRegsMode fp_mode) {
static constexpr Builtin GetEphemeronKeyBarrierStub(SaveFPRegsMode fp_mode) {
switch (fp_mode) {
case SaveFPRegsMode::kIgnore:
return Builtins::kEphemeronKeyBarrierIgnoreFP;
return Builtin::kEphemeronKeyBarrierIgnoreFP;
case SaveFPRegsMode::kSave:
return Builtins::kEphemeronKeyBarrierSaveFP;
return Builtin::kEphemeronKeyBarrierSaveFP;
}
}
#ifdef V8_IS_TSAN
static Name GetTSANRelaxedStoreStub(SaveFPRegsMode fp_mode, int size) {
static Builtin GetTSANRelaxedStoreStub(SaveFPRegsMode fp_mode, int size) {
if (size == kInt32Size) {
return fp_mode == SaveFPRegsMode::kIgnore
? Builtins::kTSANRelaxedStore32IgnoreFP
: Builtins::kTSANRelaxedStore32SaveFP;
? Builtin::kTSANRelaxedStore32IgnoreFP
: Builtin::kTSANRelaxedStore32SaveFP;
} else {
CHECK_EQ(size, kInt64Size);
return fp_mode == SaveFPRegsMode::kIgnore
? Builtins::kTSANRelaxedStore64IgnoreFP
: Builtins::kTSANRelaxedStore64SaveFP;
? Builtin::kTSANRelaxedStore64IgnoreFP
: Builtin::kTSANRelaxedStore64SaveFP;
}
}
#endif // V8_IS_TSAN
@ -142,11 +144,12 @@ class Builtins {
V8_EXPORT_PRIVATE Code builtin(int index);
V8_EXPORT_PRIVATE Handle<Code> builtin_handle(int index);
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Name name);
V8_EXPORT_PRIVATE static Callable CallableFor(Isolate* isolate, Name name);
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin);
V8_EXPORT_PRIVATE static Callable CallableFor(Isolate* isolate,
Builtin builtin);
static bool HasJSLinkage(int index);
static int GetStackParameterCount(Name name);
static int GetStackParameterCount(Builtin builtin);
static const char* name(int index);
@ -159,6 +162,7 @@ class Builtins {
static Address CppEntryOf(int index);
static Kind KindOf(int index);
static Kind KindOf(Builtin builtin);
static const char* KindNameOf(int index);
static bool IsCpp(int index);
@ -301,7 +305,7 @@ class Builtins {
friend class SetupIsolateDelegate;
};
Builtins::Name ExampleBuiltinForTorqueFunctionPointerType(
Builtin ExampleBuiltinForTorqueFunctionPointerType(
size_t function_pointer_type_id);
} // namespace internal

View File

@ -340,7 +340,7 @@ namespace {
// using JSEntryFunction = GeneratedCode<Address(
// Address root_register_value, MicrotaskQueue* microtask_queue)>;
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
Label invoke, handler_entry, exit;
Label not_outermost_js, not_outermost_js_2;
@ -452,18 +452,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
@ -1641,8 +1640,8 @@ void Builtins::Generate_InterpreterEnterAtBytecode(MacroAssembler* masm) {
}
// static
void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
auto descriptor = Builtins::CallInterfaceDescriptorFor(
Builtins::kBaselineOutOfLinePrologue);
auto descriptor =
Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
Register arg_count = descriptor.GetRegisterParameter(
BaselineOutOfLinePrologueDescriptor::kJavaScriptCallArgCount);
Register frame_size = descriptor.GetRegisterParameter(

View File

@ -348,7 +348,7 @@ constexpr int kPushedStackSpace =
//
// Passes through a0, a1, a2, a3 and stack to JSEntryTrampoline.
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
Label invoke, handler_entry, exit;
int pushed_stack_space = kCArgsSlotsSize;
@ -500,18 +500,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
@ -1508,7 +1507,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
__ Addu(sp, sp,
Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
__ Pop(ra);
__ LoadEntryFromBuiltinIndex(t0);
__ LoadEntryFromBuiltin(t0);
__ Jump(t0);
}
} // namespace

View File

@ -481,7 +481,7 @@ namespace {
// using JSEntryFunction = GeneratedCode<Address(
// Address root_register_value, MicrotaskQueue* microtask_queue)>;
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
Label invoke, handler_entry, exit;
{
@ -665,18 +665,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
@ -1526,7 +1525,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
__ Daddu(sp, sp,
Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
__ Pop(ra);
__ LoadEntryFromBuiltinIndex(t0);
__ LoadEntryFromBuiltin(t0);
__ Jump(t0);
}
} // namespace

View File

@ -499,7 +499,7 @@ namespace {
// using JSEntryFunction = GeneratedCode<Address(
// Address root_register_value, MicrotaskQueue* microtask_queue)>;
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
// The register state is either:
// r3: root_register_value
// r4: code entry
@ -669,18 +669,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,

View File

@ -520,7 +520,7 @@ namespace {
// using JSEntryFunction = GeneratedCode<Address(
// Address root_register_value, MicrotaskQueue* microtask_queue)>;
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
Label invoke, handler_entry, exit;
{
@ -697,18 +697,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
@ -1065,8 +1064,8 @@ static void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(
void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
UseScratchRegisterScope temps(masm);
temps.Include(kScratchReg.bit() | kScratchReg2.bit());
auto descriptor = Builtins::CallInterfaceDescriptorFor(
Builtins::kBaselineOutOfLinePrologue);
auto descriptor =
Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
Register closure = descriptor.GetRegisterParameter(
BaselineOutOfLinePrologueDescriptor::kClosure);
// Load the feedback vector from the closure.
@ -1804,7 +1803,7 @@ void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
__ Add64(sp, sp,
Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
__ Pop(ra);
__ LoadEntryFromBuiltinIndex(t6);
__ LoadEntryFromBuiltin(t6);
__ Jump(t6);
}
} // namespace

View File

@ -496,7 +496,7 @@ constexpr int kPushedStackSpace =
// using JSEntryFunction = GeneratedCode<Address(
// Address root_register_value, MicrotaskQueue* microtask_queue)>;
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
// The register state is either:
// r2: root register value
// r3: code entry
@ -704,18 +704,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,

View File

@ -101,9 +101,9 @@ Code BuildWithMacroAssembler(Isolate* isolate, int32_t builtin_index,
int handler_table_offset = 0;
// JSEntry builtins are a special case and need to generate a handler table.
DCHECK_EQ(Builtins::KindOf(Builtins::kJSEntry), Builtins::ASM);
DCHECK_EQ(Builtins::KindOf(Builtins::kJSConstructEntry), Builtins::ASM);
DCHECK_EQ(Builtins::KindOf(Builtins::kJSRunMicrotasksEntry), Builtins::ASM);
DCHECK_EQ(Builtins::KindOf(Builtin::kJSEntry), Builtins::ASM);
DCHECK_EQ(Builtins::KindOf(Builtin::kJSConstructEntry), Builtins::ASM);
DCHECK_EQ(Builtins::KindOf(Builtin::kJSRunMicrotasksEntry), Builtins::ASM);
if (Builtins::IsJSEntryVariant(builtin_index)) {
handler_table_offset = HandlerTable::EmitReturnTableStart(&masm);
HandlerTable::EmitReturnEntry(
@ -210,7 +210,7 @@ void SetupIsolateDelegate::PopulateWithPlaceholders(Isolate* isolate) {
// support circular references between builtins.
Builtins* builtins = isolate->builtins();
HandleScope scope(isolate);
for (int i = 0; i < Builtins::builtin_count; i++) {
for (int i = 0; i < Builtins::kBuiltinCount; i++) {
Handle<Code> placeholder = BuildPlaceholder(isolate, i);
AddBuiltin(builtins, i, *placeholder);
}
@ -227,7 +227,7 @@ void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) {
RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET);
for (int i = 0; i < Builtins::builtin_count; i++) {
for (int i = 0; i < Builtins::kBuiltinCount; i++) {
Code code = builtins->builtin(i);
bool flush_icache = false;
for (RelocIterator it(code, kRelocMask); !it.done(); it.next()) {
@ -330,18 +330,18 @@ void SetupIsolateDelegate::SetupBuiltinsInternal(Isolate* isolate) {
#undef BUILD_TFH
#undef BUILD_BCH
#undef BUILD_ASM
CHECK_EQ(Builtins::builtin_count, index);
CHECK_EQ(Builtins::kBuiltinCount, index);
ReplacePlaceholders(isolate);
#define SET_PROMISE_REJECTION_PREDICTION(Name) \
builtins->builtin(Builtins::k##Name).set_is_promise_rejection(true);
builtins->builtin(Builtin::k##Name).set_is_promise_rejection(true);
BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(SET_PROMISE_REJECTION_PREDICTION)
#undef SET_PROMISE_REJECTION_PREDICTION
#define SET_EXCEPTION_CAUGHT_PREDICTION(Name) \
builtins->builtin(Builtins::k##Name).set_is_exception_caught(true);
builtins->builtin(Builtin::k##Name).set_is_exception_caught(true);
BUILTIN_EXCEPTION_CAUGHT_PREDICTION_LIST(SET_EXCEPTION_CAUGHT_PREDICTION)
#undef SET_EXCEPTION_CAUGHT_PREDICTION

View File

@ -7,11 +7,11 @@
namespace typed_array {
const kBuiltinNameFrom: constexpr string = '%TypedArray%.from';
type BuiltinsName extends int31 constexpr 'Builtins::Name';
type BuiltinsName extends int31 constexpr 'Builtin';
const kTypedArrayPrototypeValues: constexpr BuiltinsName
generates 'Builtins::kTypedArrayPrototypeValues';
generates 'Builtin::kTypedArrayPrototypeValues';
const kArrayPrototypeValues: constexpr BuiltinsName
generates 'Builtins::kArrayPrototypeValues';
generates 'Builtin::kArrayPrototypeValues';
extern builtin IterableToList(implicit context: Context)(JSAny, JSAny): JSArray;
@ -90,7 +90,7 @@ TypedArrayFrom(js-implicit context: NativeContext, receiver: JSAny)(
typeswitch (source) {
case (sourceArray: JSArray): {
// Check that the iterator function is exactly
// Builtins::kArrayPrototypeValues.
// Builtin::kArrayPrototypeValues.
if (!TaggedEqual(
iteratorFn.shared_function_info.function_data,
SmiConstant(kArrayPrototypeValues))) {
@ -108,7 +108,7 @@ TypedArrayFrom(js-implicit context: NativeContext, receiver: JSAny)(
if (IsDetachedBuffer(sourceBuffer)) goto UseUserProvidedIterator;
// Check that the iterator function is exactly
// Builtins::kTypedArrayPrototypeValues.
// Builtin::kTypedArrayPrototypeValues.
if (!TaggedEqual(
iteratorFn.shared_function_info.function_data,
SmiConstant(kTypedArrayPrototypeValues)))

View File

@ -333,7 +333,7 @@ namespace {
// using JSEntryFunction = GeneratedCode<Address(
// Address root_register_value, MicrotaskQueue* microtask_queue)>;
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
Builtins::Name entry_trampoline) {
Builtin entry_trampoline) {
Label invoke, handler_entry, exit;
Label not_outermost_js, not_outermost_js_2;
@ -506,18 +506,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kJSEntryTrampoline);
Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
Builtins::kJSConstructEntryTrampoline);
Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
Builtins::kRunMicrotasksTrampoline);
Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
@ -1634,8 +1633,8 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) {
}
#endif
auto descriptor = Builtins::CallInterfaceDescriptorFor(
Builtins::kBaselineOutOfLinePrologue);
auto descriptor =
Builtins::CallInterfaceDescriptorFor(Builtin::kBaselineOutOfLinePrologue);
Register closure = descriptor.GetRegisterParameter(
BaselineOutOfLinePrologueDescriptor::kClosure);
// Load the feedback vector from the closure.

View File

@ -166,7 +166,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
DCHECK_IMPLIES(options().use_pc_relative_calls_and_jumps,
Builtins::IsIsolateIndependentBuiltin(*code));
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index);
@ -181,7 +181,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
int offset = IsolateData::builtin_entry_slot_offset(
static_cast<Builtins::Name>(code->builtin_index()));
static_cast<Builtin>(code->builtin_index()));
ldr(scratch, MemOperand(kRootRegister, offset));
Jump(scratch, cond);
return;
@ -258,7 +258,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
DCHECK_IMPLIES(options().use_pc_relative_calls_and_jumps,
Builtins::IsIsolateIndependentBuiltin(*code));
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index);
@ -271,7 +271,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
// creation at runtime. At this point, Code space isn't restricted to a
// size s.t. pc-relative calls may be used.
int offset = IsolateData::builtin_entry_slot_offset(
static_cast<Builtins::Name>(code->builtin_index()));
static_cast<Builtin>(code->builtin_index()));
ldr(ip, MemOperand(kRootRegister, offset));
Call(ip, cond);
return;
@ -306,13 +306,12 @@ void TurboAssembler::CallBuiltinByIndex(Register builtin_index) {
Call(builtin_index);
}
void TurboAssembler::LoadEntryFromBuiltinIndex(Builtins::Name builtin_index,
Register destination) {
ldr(destination, EntryFromBuiltinIndexAsOperand(builtin_index));
void TurboAssembler::LoadEntryFromBuiltin(Builtin builtin_index,
Register destination) {
ldr(destination, EntryFromBuiltinAsOperand(builtin_index));
}
MemOperand TurboAssembler::EntryFromBuiltinIndexAsOperand(
Builtins::Name builtin_index) {
MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin_index) {
DCHECK(root_array_available());
return MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin_index));
@ -1917,7 +1916,7 @@ void TurboAssembler::TruncateDoubleToI(Isolate* isolate, Zone* zone,
if (false) {
#endif // V8_ENABLE_WEBASSEMBLY
} else if (options().inline_offheap_trampolines) {
CallBuiltin(Builtins::kDoubleToI);
CallBuiltin(Builtin::kDoubleToI);
} else {
Call(BUILTIN_CODE(isolate, DoubleToI), RelocInfo::CODE_TARGET);
}
@ -2609,9 +2608,9 @@ void TurboAssembler::ResetSpeculationPoisonRegister() {
mov(kSpeculationPoisonRegister, Operand(-1));
}
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label* ret, Label*) {
void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
BlockConstPoolScope block_const_pool(this);
ldr(ip, MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(target)));

View File

@ -310,9 +310,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
bool check_constant_pool = true);
void Call(Label* target);
MemOperand EntryFromBuiltinIndexAsOperand(Builtins::Name builtin_index);
void LoadEntryFromBuiltinIndex(Builtins::Name builtin_index,
Register destination);
MemOperand EntryFromBuiltinAsOperand(Builtin builtin_index);
void LoadEntryFromBuiltin(Builtin builtin_index, Register destination);
// Load the builtin given by the Smi in |builtin_index| into the same
// register.
void LoadEntryFromBuiltinIndex(Register builtin_index);
@ -329,7 +328,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// The return address on the stack is used by frame iteration.
void StoreReturnAddressAndCall(Register target);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -1829,7 +1829,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
Builtins::IsIsolateIndependentBuiltin(*code));
if (options().inline_offheap_trampolines) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index)) {
// Inline the trampoline.
CHECK_EQ(cond, Condition::al); // Implement if necessary.
@ -1877,7 +1877,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode) {
BlockPoolsScope scope(this);
if (options().inline_offheap_trampolines) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index)) {
// Inline the trampoline.
CallBuiltin(builtin_index);
@ -1924,13 +1924,12 @@ void TurboAssembler::LoadEntryFromBuiltinIndex(Register builtin_index) {
}
}
void TurboAssembler::LoadEntryFromBuiltinIndex(Builtins::Name builtin_index,
Register destination) {
Ldr(destination, EntryFromBuiltinIndexAsOperand(builtin_index));
void TurboAssembler::LoadEntryFromBuiltin(Builtin builtin_index,
Register destination) {
Ldr(destination, EntryFromBuiltinAsOperand(builtin_index));
}
MemOperand TurboAssembler::EntryFromBuiltinIndexAsOperand(
Builtins::Name builtin_index) {
MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin_index) {
DCHECK(root_array_available());
return MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin_index));
@ -1944,7 +1943,7 @@ void TurboAssembler::CallBuiltinByIndex(Register builtin_index) {
void TurboAssembler::CallBuiltin(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
if (options().short_builtin_calls) {
EmbeddedData d = EmbeddedData::FromBlob(isolate());
Address entry = d.InstructionStartOfBuiltin(builtin_index);
@ -1964,7 +1963,7 @@ void TurboAssembler::CallBuiltin(int builtin_index) {
void TurboAssembler::TailCallBuiltin(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
if (options().short_builtin_calls) {
EmbeddedData d = EmbeddedData::FromBlob(isolate());
Address entry = d.InstructionStartOfBuiltin(builtin_index);
@ -2100,8 +2099,8 @@ bool TurboAssembler::IsNearCallOffset(int64_t offset) {
}
void TurboAssembler::CallForDeoptimization(
Builtins::Name target, int deopt_id, Label* exit, DeoptimizeKind kind,
Label* ret, Label* jump_deoptimization_entry_label) {
Builtin target, int deopt_id, Label* exit, DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label) {
BlockPoolsScope scope(this);
bl(jump_deoptimization_entry_label);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
@ -2496,7 +2495,7 @@ void TurboAssembler::TruncateDoubleToI(Isolate* isolate, Zone* zone,
if (false) {
#endif // V8_ENABLE_WEBASSEMBLY
} else if (options().inline_offheap_trampolines) {
CallBuiltin(Builtins::kDoubleToI);
CallBuiltin(Builtin::kDoubleToI);
} else {
Call(BUILTIN_CODE(isolate, DoubleToI), RelocInfo::CODE_TARGET);
}

View File

@ -973,17 +973,16 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// Load the builtin given by the Smi in |builtin_index| into the same
// register.
void LoadEntryFromBuiltinIndex(Register builtin_index);
void LoadEntryFromBuiltinIndex(Builtins::Name builtin_index,
Register destination);
MemOperand EntryFromBuiltinIndexAsOperand(Builtins::Name builtin_index);
void LoadEntryFromBuiltin(Builtin builtin_index, Register destination);
MemOperand EntryFromBuiltinAsOperand(Builtin builtin_index);
void CallBuiltinByIndex(Register builtin_index);
void CallBuiltin(Builtins::Name builtin) {
// TODO(11527): drop the int overload in favour of the Builtins::Name one.
void CallBuiltin(Builtin builtin) {
// TODO(11527): drop the int overload in favour of the Builtin one.
return CallBuiltin(static_cast<int>(builtin));
}
void CallBuiltin(int builtin_index);
void TailCallBuiltin(Builtins::Name builtin) {
// TODO(11527): drop the int overload in favour of the Builtins::Name one.
void TailCallBuiltin(Builtin builtin) {
// TODO(11527): drop the int overload in favour of the Builtin one.
return TailCallBuiltin(static_cast<int>(builtin));
}
void TailCallBuiltin(int builtin_index);
@ -998,7 +997,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// The return address on the stack is used by frame iteration.
void StoreReturnAddressAndCall(Register target);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -70,61 +70,61 @@ Handle<Code> CodeFactory::CEntry(Isolate* isolate, int result_size,
// static
Callable CodeFactory::ApiGetter(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kCallApiGetter);
return Builtins::CallableFor(isolate, Builtin::kCallApiGetter);
}
// static
Callable CodeFactory::CallApiCallback(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kCallApiCallback);
return Builtins::CallableFor(isolate, Builtin::kCallApiCallback);
}
// static
Callable CodeFactory::LoadGlobalIC(Isolate* isolate, TypeofMode typeof_mode) {
return typeof_mode == TypeofMode::kNotInside
? Builtins::CallableFor(isolate, Builtins::kLoadGlobalICTrampoline)
? Builtins::CallableFor(isolate, Builtin::kLoadGlobalICTrampoline)
: Builtins::CallableFor(
isolate, Builtins::kLoadGlobalICInsideTypeofTrampoline);
isolate, Builtin::kLoadGlobalICInsideTypeofTrampoline);
}
// static
Callable CodeFactory::LoadGlobalICInOptimizedCode(Isolate* isolate,
TypeofMode typeof_mode) {
return typeof_mode == TypeofMode::kNotInside
? Builtins::CallableFor(isolate, Builtins::kLoadGlobalIC)
? Builtins::CallableFor(isolate, Builtin::kLoadGlobalIC)
: Builtins::CallableFor(isolate,
Builtins::kLoadGlobalICInsideTypeof);
Builtin::kLoadGlobalICInsideTypeof);
}
Callable CodeFactory::StoreOwnIC(Isolate* isolate) {
// TODO(ishell): Currently we use StoreOwnIC only for storing properties that
// already exist in the boilerplate therefore we can use StoreIC.
return Builtins::CallableFor(isolate, Builtins::kStoreICTrampoline);
return Builtins::CallableFor(isolate, Builtin::kStoreICTrampoline);
}
Callable CodeFactory::StoreOwnICInOptimizedCode(Isolate* isolate) {
// TODO(ishell): Currently we use StoreOwnIC only for storing properties that
// already exist in the boilerplate therefore we can use StoreIC.
return Builtins::CallableFor(isolate, Builtins::kStoreIC);
return Builtins::CallableFor(isolate, Builtin::kStoreIC);
}
Callable CodeFactory::KeyedStoreIC_SloppyArguments(Isolate* isolate,
KeyedAccessStoreMode mode) {
Builtins::Name builtin_index;
Builtin builtin_index;
switch (mode) {
case STANDARD_STORE:
builtin_index = Builtins::kKeyedStoreIC_SloppyArguments_Standard;
builtin_index = Builtin::kKeyedStoreIC_SloppyArguments_Standard;
break;
case STORE_AND_GROW_HANDLE_COW:
builtin_index =
Builtins::kKeyedStoreIC_SloppyArguments_GrowNoTransitionHandleCOW;
Builtin::kKeyedStoreIC_SloppyArguments_GrowNoTransitionHandleCOW;
break;
case STORE_IGNORE_OUT_OF_BOUNDS:
builtin_index =
Builtins::kKeyedStoreIC_SloppyArguments_NoTransitionIgnoreOOB;
Builtin::kKeyedStoreIC_SloppyArguments_NoTransitionIgnoreOOB;
break;
case STORE_HANDLE_COW:
builtin_index =
Builtins::kKeyedStoreIC_SloppyArguments_NoTransitionHandleCOW;
Builtin::kKeyedStoreIC_SloppyArguments_NoTransitionHandleCOW;
break;
default:
UNREACHABLE();
@ -134,22 +134,22 @@ Callable CodeFactory::KeyedStoreIC_SloppyArguments(Isolate* isolate,
Callable CodeFactory::ElementsTransitionAndStore(Isolate* isolate,
KeyedAccessStoreMode mode) {
Builtins::Name builtin_index;
Builtin builtin_index;
switch (mode) {
case STANDARD_STORE:
builtin_index = Builtins::kElementsTransitionAndStore_Standard;
builtin_index = Builtin::kElementsTransitionAndStore_Standard;
break;
case STORE_AND_GROW_HANDLE_COW:
builtin_index =
Builtins::kElementsTransitionAndStore_GrowNoTransitionHandleCOW;
Builtin::kElementsTransitionAndStore_GrowNoTransitionHandleCOW;
break;
case STORE_IGNORE_OUT_OF_BOUNDS:
builtin_index =
Builtins::kElementsTransitionAndStore_NoTransitionIgnoreOOB;
Builtin::kElementsTransitionAndStore_NoTransitionIgnoreOOB;
break;
case STORE_HANDLE_COW:
builtin_index =
Builtins::kElementsTransitionAndStore_NoTransitionHandleCOW;
Builtin::kElementsTransitionAndStore_NoTransitionHandleCOW;
break;
default:
UNREACHABLE();
@ -159,19 +159,19 @@ Callable CodeFactory::ElementsTransitionAndStore(Isolate* isolate,
Callable CodeFactory::StoreFastElementIC(Isolate* isolate,
KeyedAccessStoreMode mode) {
Builtins::Name builtin_index;
Builtin builtin_index;
switch (mode) {
case STANDARD_STORE:
builtin_index = Builtins::kStoreFastElementIC_Standard;
builtin_index = Builtin::kStoreFastElementIC_Standard;
break;
case STORE_AND_GROW_HANDLE_COW:
builtin_index = Builtins::kStoreFastElementIC_GrowNoTransitionHandleCOW;
builtin_index = Builtin::kStoreFastElementIC_GrowNoTransitionHandleCOW;
break;
case STORE_IGNORE_OUT_OF_BOUNDS:
builtin_index = Builtins::kStoreFastElementIC_NoTransitionIgnoreOOB;
builtin_index = Builtin::kStoreFastElementIC_NoTransitionIgnoreOOB;
break;
case STORE_HANDLE_COW:
builtin_index = Builtins::kStoreFastElementIC_NoTransitionHandleCOW;
builtin_index = Builtin::kStoreFastElementIC_NoTransitionHandleCOW;
break;
default:
UNREACHABLE();
@ -183,27 +183,27 @@ Callable CodeFactory::StoreFastElementIC(Isolate* isolate,
Callable CodeFactory::BinaryOperation(Isolate* isolate, Operation op) {
switch (op) {
case Operation::kShiftRight:
return Builtins::CallableFor(isolate, Builtins::kShiftRight);
return Builtins::CallableFor(isolate, Builtin::kShiftRight);
case Operation::kShiftLeft:
return Builtins::CallableFor(isolate, Builtins::kShiftLeft);
return Builtins::CallableFor(isolate, Builtin::kShiftLeft);
case Operation::kShiftRightLogical:
return Builtins::CallableFor(isolate, Builtins::kShiftRightLogical);
return Builtins::CallableFor(isolate, Builtin::kShiftRightLogical);
case Operation::kAdd:
return Builtins::CallableFor(isolate, Builtins::kAdd);
return Builtins::CallableFor(isolate, Builtin::kAdd);
case Operation::kSubtract:
return Builtins::CallableFor(isolate, Builtins::kSubtract);
return Builtins::CallableFor(isolate, Builtin::kSubtract);
case Operation::kMultiply:
return Builtins::CallableFor(isolate, Builtins::kMultiply);
return Builtins::CallableFor(isolate, Builtin::kMultiply);
case Operation::kDivide:
return Builtins::CallableFor(isolate, Builtins::kDivide);
return Builtins::CallableFor(isolate, Builtin::kDivide);
case Operation::kModulus:
return Builtins::CallableFor(isolate, Builtins::kModulus);
return Builtins::CallableFor(isolate, Builtin::kModulus);
case Operation::kBitwiseOr:
return Builtins::CallableFor(isolate, Builtins::kBitwiseOr);
return Builtins::CallableFor(isolate, Builtin::kBitwiseOr);
case Operation::kBitwiseAnd:
return Builtins::CallableFor(isolate, Builtins::kBitwiseAnd);
return Builtins::CallableFor(isolate, Builtin::kBitwiseAnd);
case Operation::kBitwiseXor:
return Builtins::CallableFor(isolate, Builtins::kBitwiseXor);
return Builtins::CallableFor(isolate, Builtin::kBitwiseXor);
default:
break;
}
@ -228,18 +228,18 @@ Callable CodeFactory::OrdinaryToPrimitive(Isolate* isolate,
Callable CodeFactory::StringAdd(Isolate* isolate, StringAddFlags flags) {
switch (flags) {
case STRING_ADD_CHECK_NONE:
return Builtins::CallableFor(isolate, Builtins::kStringAdd_CheckNone);
return Builtins::CallableFor(isolate, Builtin::kStringAdd_CheckNone);
case STRING_ADD_CONVERT_LEFT:
return Builtins::CallableFor(isolate, Builtins::kStringAddConvertLeft);
return Builtins::CallableFor(isolate, Builtin::kStringAddConvertLeft);
case STRING_ADD_CONVERT_RIGHT:
return Builtins::CallableFor(isolate, Builtins::kStringAddConvertRight);
return Builtins::CallableFor(isolate, Builtin::kStringAddConvertRight);
}
UNREACHABLE();
}
// static
Callable CodeFactory::ResumeGenerator(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kResumeGeneratorTrampoline);
return Builtins::CallableFor(isolate, Builtin::kResumeGeneratorTrampoline);
}
// static
@ -248,10 +248,10 @@ Callable CodeFactory::FastNewFunctionContext(Isolate* isolate,
switch (scope_type) {
case ScopeType::EVAL_SCOPE:
return Builtins::CallableFor(isolate,
Builtins::kFastNewFunctionContextEval);
Builtin::kFastNewFunctionContextEval);
case ScopeType::FUNCTION_SCOPE:
return Builtins::CallableFor(isolate,
Builtins::kFastNewFunctionContextFunction);
Builtin::kFastNewFunctionContextFunction);
default:
UNREACHABLE();
}
@ -268,25 +268,25 @@ Callable CodeFactory::Call_WithFeedback(Isolate* isolate,
switch (mode) {
case ConvertReceiverMode::kNullOrUndefined:
return Builtins::CallableFor(
isolate, Builtins::kCall_ReceiverIsNullOrUndefined_WithFeedback);
isolate, Builtin::kCall_ReceiverIsNullOrUndefined_WithFeedback);
case ConvertReceiverMode::kNotNullOrUndefined:
return Builtins::CallableFor(
isolate, Builtins::kCall_ReceiverIsNotNullOrUndefined_WithFeedback);
isolate, Builtin::kCall_ReceiverIsNotNullOrUndefined_WithFeedback);
case ConvertReceiverMode::kAny:
return Builtins::CallableFor(isolate,
Builtins::kCall_ReceiverIsAny_WithFeedback);
Builtin::kCall_ReceiverIsAny_WithFeedback);
}
UNREACHABLE();
}
// static
Callable CodeFactory::CallWithArrayLike(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kCallWithArrayLike);
return Builtins::CallableFor(isolate, Builtin::kCallWithArrayLike);
}
// static
Callable CodeFactory::CallWithSpread(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kCallWithSpread);
return Builtins::CallableFor(isolate, Builtin::kCallWithSpread);
}
// static
@ -297,48 +297,48 @@ Callable CodeFactory::CallFunction(Isolate* isolate, ConvertReceiverMode mode) {
// static
Callable CodeFactory::CallVarargs(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kCallVarargs);
return Builtins::CallableFor(isolate, Builtin::kCallVarargs);
}
// static
Callable CodeFactory::CallForwardVarargs(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kCallForwardVarargs);
return Builtins::CallableFor(isolate, Builtin::kCallForwardVarargs);
}
// static
Callable CodeFactory::CallFunctionForwardVarargs(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kCallFunctionForwardVarargs);
return Builtins::CallableFor(isolate, Builtin::kCallFunctionForwardVarargs);
}
// static
Callable CodeFactory::Construct(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kConstruct);
return Builtins::CallableFor(isolate, Builtin::kConstruct);
}
// static
Callable CodeFactory::ConstructWithSpread(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kConstructWithSpread);
return Builtins::CallableFor(isolate, Builtin::kConstructWithSpread);
}
// static
Callable CodeFactory::ConstructFunction(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kConstructFunction);
return Builtins::CallableFor(isolate, Builtin::kConstructFunction);
}
// static
Callable CodeFactory::ConstructVarargs(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kConstructVarargs);
return Builtins::CallableFor(isolate, Builtin::kConstructVarargs);
}
// static
Callable CodeFactory::ConstructForwardVarargs(Isolate* isolate) {
return Builtins::CallableFor(isolate, Builtins::kConstructForwardVarargs);
return Builtins::CallableFor(isolate, Builtin::kConstructForwardVarargs);
}
// static
Callable CodeFactory::ConstructFunctionForwardVarargs(Isolate* isolate) {
return Builtins::CallableFor(isolate,
Builtins::kConstructFunctionForwardVarargs);
Builtin::kConstructFunctionForwardVarargs);
}
// static
@ -352,16 +352,16 @@ Callable CodeFactory::InterpreterPushArgsThenCall(
UNREACHABLE();
case InterpreterPushArgsMode::kWithFinalSpread:
return Builtins::CallableFor(
isolate, Builtins::kInterpreterPushArgsThenCallWithFinalSpread);
isolate, Builtin::kInterpreterPushArgsThenCallWithFinalSpread);
case InterpreterPushArgsMode::kOther:
switch (receiver_mode) {
case ConvertReceiverMode::kNullOrUndefined:
return Builtins::CallableFor(
isolate, Builtins::kInterpreterPushUndefinedAndArgsThenCall);
isolate, Builtin::kInterpreterPushUndefinedAndArgsThenCall);
case ConvertReceiverMode::kNotNullOrUndefined:
case ConvertReceiverMode::kAny:
return Builtins::CallableFor(isolate,
Builtins::kInterpreterPushArgsThenCall);
Builtin::kInterpreterPushArgsThenCall);
}
}
UNREACHABLE();
@ -373,13 +373,13 @@ Callable CodeFactory::InterpreterPushArgsThenConstruct(
switch (mode) {
case InterpreterPushArgsMode::kArrayFunction:
return Builtins::CallableFor(
isolate, Builtins::kInterpreterPushArgsThenConstructArrayFunction);
isolate, Builtin::kInterpreterPushArgsThenConstructArrayFunction);
case InterpreterPushArgsMode::kWithFinalSpread:
return Builtins::CallableFor(
isolate, Builtins::kInterpreterPushArgsThenConstructWithFinalSpread);
isolate, Builtin::kInterpreterPushArgsThenConstructWithFinalSpread);
case InterpreterPushArgsMode::kOther:
return Builtins::CallableFor(isolate,
Builtins::kInterpreterPushArgsThenConstruct);
Builtin::kInterpreterPushArgsThenConstruct);
}
UNREACHABLE();
}
@ -401,14 +401,14 @@ Callable CodeFactory::InterpreterCEntry(Isolate* isolate, int result_size) {
// static
Callable CodeFactory::InterpreterOnStackReplacement(Isolate* isolate) {
return Builtins::CallableFor(isolate,
Builtins::kInterpreterOnStackReplacement);
Builtin::kInterpreterOnStackReplacement);
}
// static
Callable CodeFactory::InterpreterOnStackReplacement_ToBaseline(
Isolate* isolate) {
return Builtins::CallableFor(
isolate, Builtins::kInterpreterOnStackReplacement_ToBaseline);
isolate, Builtin::kInterpreterOnStackReplacement_ToBaseline);
}
// static
@ -419,7 +419,7 @@ Callable CodeFactory::ArrayNoArgumentConstructor(
case kind_caps: \
return Builtins::CallableFor( \
isolate, \
Builtins::kArrayNoArgumentConstructor_##kind_camel##_##mode_camel);
Builtin::kArrayNoArgumentConstructor_##kind_camel##_##mode_camel);
if (override_mode == DONT_OVERRIDE && AllocationSite::ShouldTrack(kind)) {
DCHECK(IsSmiElementsKind(kind));
switch (kind) {
@ -453,7 +453,7 @@ Callable CodeFactory::ArraySingleArgumentConstructor(
case kind_caps: \
return Builtins::CallableFor( \
isolate, \
Builtins::kArraySingleArgumentConstructor_##kind_camel##_##mode_camel)
Builtin::kArraySingleArgumentConstructor_##kind_camel##_##mode_camel)
if (override_mode == DONT_OVERRIDE && AllocationSite::ShouldTrack(kind)) {
DCHECK(IsSmiElementsKind(kind));
switch (kind) {

View File

@ -5400,7 +5400,7 @@ TNode<Float64T> CodeStubAssembler::TruncateTaggedToFloat64(
BIND(&if_valueisnotnumber);
{
// Convert the {value} to a Number first.
var_value = CallBuiltin(Builtins::kNonNumberToNumber, context, value);
var_value = CallBuiltin(Builtin::kNonNumberToNumber, context, value);
Goto(&loop);
}
}
@ -5485,8 +5485,8 @@ void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
// Not an oddball either -> convert.
auto builtin = conversion == Object::Conversion::kToNumeric
? Builtins::kNonNumberToNumeric
: Builtins::kNonNumberToNumber;
? Builtin::kNonNumberToNumeric
: Builtin::kNonNumberToNumber;
var_value = CallBuiltin(builtin, context, value);
OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
Goto(&loop);
@ -5751,7 +5751,7 @@ TNode<String> CodeStubAssembler::ToThisString(TNode<Context> context,
Label if_valueisnullorundefined(this, Label::kDeferred);
GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
// Convert the {value} to a String.
var_value = CallBuiltin(Builtins::kToString, context, value);
var_value = CallBuiltin(Builtin::kToString, context, value);
Goto(&if_valueisstring);
BIND(&if_valueisnullorundefined);
@ -5765,7 +5765,7 @@ TNode<String> CodeStubAssembler::ToThisString(TNode<Context> context,
BIND(&if_valueissmi);
{
// The {value} is a Smi, convert it to a String.
var_value = CallBuiltin(Builtins::kNumberToString, context, value);
var_value = CallBuiltin(Builtin::kNumberToString, context, value);
Goto(&if_valueisstring);
}
BIND(&if_valueisstring);
@ -5817,7 +5817,7 @@ TNode<Int32T> CodeStubAssembler::ChangeTaggedNonSmiToInt32(
},
[=] {
return TruncateNumberToWord32(
CAST(CallBuiltin(Builtins::kNonNumberToNumber, context, input)));
CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input)));
});
}
@ -5836,7 +5836,7 @@ TNode<Float64T> CodeStubAssembler::ChangeTaggedToFloat64(TNode<Context> context,
[=] { return LoadHeapNumberValue(CAST(input)); },
[=] {
return ChangeNumberToFloat64(
CAST(CallBuiltin(Builtins::kNonNumberToNumber, context, input)));
CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input)));
});
Goto(&end);
@ -7404,8 +7404,7 @@ TNode<Number> CodeStubAssembler::ToNumber_Inline(TNode<Context> context,
var_result = Select<Number>(
IsHeapNumber(CAST(input)), [=] { return CAST(input); },
[=] {
return CAST(
CallBuiltin(Builtins::kNonNumberToNumber, context, input));
return CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input));
});
Goto(&end);
}
@ -7552,7 +7551,7 @@ void CodeStubAssembler::TaggedToNumeric(TNode<Context> context,
// {heap_object_value} is not a Numeric yet.
GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
*var_numeric = CAST(
CallBuiltin(Builtins::kNonNumberToNumeric, context, heap_object_value));
CallBuiltin(Builtin::kNonNumberToNumeric, context, heap_object_value));
OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
Goto(&done);
@ -7693,7 +7692,7 @@ TNode<String> CodeStubAssembler::ToString_Inline(TNode<Context> context,
Branch(IsString(CAST(input)), &out, &stub_call);
BIND(&stub_call);
var_result = CallBuiltin(Builtins::kToString, context, input);
var_result = CallBuiltin(Builtin::kToString, context, input);
Goto(&out);
BIND(&out);
@ -7702,7 +7701,7 @@ TNode<String> CodeStubAssembler::ToString_Inline(TNode<Context> context,
TNode<JSReceiver> CodeStubAssembler::ToObject(TNode<Context> context,
TNode<Object> input) {
return CAST(CallBuiltin(Builtins::kToObject, context, input));
return CAST(CallBuiltin(Builtin::kToObject, context, input));
}
TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
@ -7734,7 +7733,7 @@ TNode<Number> CodeStubAssembler::ToLength_Inline(TNode<Context> context,
TNode<Smi> smi_zero = SmiConstant(0);
return Select<Number>(
TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
[=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
[=] { return CAST(CallBuiltin(Builtin::kToLength, context, input)); });
}
TNode<Object> CodeStubAssembler::OrdinaryToPrimitive(
@ -9458,7 +9457,7 @@ TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
TNode<NativeContext> creation_context =
GetCreationContext(CAST(holder), if_bailout);
var_value = CallBuiltin(
Builtins::kCallFunctionTemplate_CheckAccessAndCompatibleReceiver,
Builtin::kCallFunctionTemplate_CheckAccessAndCompatibleReceiver,
creation_context, getter, IntPtrConstant(0), receiver);
Goto(&done);
@ -10629,7 +10628,7 @@ TNode<Word32T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Word32T>(
BIND(&convert);
{
var_input = CallBuiltin(Builtins::kNonNumberToNumber, context, input);
var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
Goto(&loop);
}
@ -10677,7 +10676,7 @@ TNode<Float32T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Float32T>(
BIND(&convert);
{
var_input = CallBuiltin(Builtins::kNonNumberToNumber, context, input);
var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
Goto(&loop);
}
@ -10724,7 +10723,7 @@ TNode<Float64T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Float64T>(
BIND(&convert);
{
var_input = CallBuiltin(Builtins::kNonNumberToNumber, context, input);
var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
Goto(&loop);
}
@ -11725,8 +11724,7 @@ TNode<Oddball> CodeStubAssembler::RelationalComparison(
// dedicated ToPrimitive(right, hint Number) operation, as the
// ToNumeric(right) will by itself already invoke ToPrimitive with
// a Number hint.
var_right =
CallBuiltin(Builtins::kNonNumberToNumeric, context(), right);
var_right = CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
Goto(&loop);
}
}
@ -11771,8 +11769,7 @@ TNode<Oddball> CodeStubAssembler::RelationalComparison(
// dedicated ToPrimitive(left, hint Number) operation, as the
// ToNumeric(left) will by itself already invoke ToPrimitive with
// a Number hint.
var_left =
CallBuiltin(Builtins::kNonNumberToNumeric, context(), left);
var_left = CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
Goto(&loop);
}
}
@ -11828,7 +11825,7 @@ TNode<Oddball> CodeStubAssembler::RelationalComparison(
// ToNumeric(right) will by itself already invoke ToPrimitive with
// a Number hint.
var_right =
CallBuiltin(Builtins::kNonNumberToNumeric, context(), right);
CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
Goto(&loop);
}
}
@ -11883,7 +11880,7 @@ TNode<Oddball> CodeStubAssembler::RelationalComparison(
// ToNumeric(right) will by itself already invoke ToPrimitive with
// a Number hint.
var_right =
CallBuiltin(Builtins::kNonNumberToNumeric, context(), right);
CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
Goto(&loop);
}
}
@ -11898,19 +11895,19 @@ TNode<Oddball> CodeStubAssembler::RelationalComparison(
// Both {left} and {right} are strings.
CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
Builtins::Name builtin;
Builtin builtin;
switch (op) {
case Operation::kLessThan:
builtin = Builtins::kStringLessThan;
builtin = Builtin::kStringLessThan;
break;
case Operation::kLessThanOrEqual:
builtin = Builtins::kStringLessThanOrEqual;
builtin = Builtin::kStringLessThanOrEqual;
break;
case Operation::kGreaterThan:
builtin = Builtins::kStringGreaterThan;
builtin = Builtin::kStringGreaterThan;
break;
case Operation::kGreaterThanOrEqual:
builtin = Builtins::kStringGreaterThanOrEqual;
builtin = Builtin::kStringGreaterThanOrEqual;
break;
default:
UNREACHABLE();
@ -11934,8 +11931,8 @@ TNode<Oddball> CodeStubAssembler::RelationalComparison(
&if_right_receiver);
var_left =
CallBuiltin(Builtins::kNonNumberToNumeric, context(), left);
var_right = CallBuiltin(Builtins::kToNumeric, context(), right);
CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
var_right = CallBuiltin(Builtin::kToNumeric, context(), right);
Goto(&loop);
BIND(&if_right_bigint);
@ -11998,9 +11995,8 @@ TNode<Oddball> CodeStubAssembler::RelationalComparison(
GotoIf(IsJSReceiverInstanceType(left_instance_type),
&if_left_receiver);
var_right = CallBuiltin(Builtins::kToNumeric, context(), right);
var_left =
CallBuiltin(Builtins::kNonNumberToNumeric, context(), left);
var_right = CallBuiltin(Builtin::kToNumeric, context(), right);
var_left = CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
Goto(&loop);
BIND(&if_left_receiver);
@ -12309,7 +12305,7 @@ TNode<Oddball> CodeStubAssembler::Equal(TNode<Object> left, TNode<Object> right,
{
GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
result =
CAST(CallBuiltin(Builtins::kStringEqual, context(), left, right));
CAST(CallBuiltin(Builtin::kStringEqual, context(), left, right));
CombineFeedback(var_type_feedback,
SmiOr(CollectFeedbackForString(left_type),
CollectFeedbackForString(right_type)));
@ -12569,7 +12565,7 @@ TNode<Oddball> CodeStubAssembler::Equal(TNode<Object> left, TNode<Object> right,
CombineFeedback(var_type_feedback,
CollectFeedbackForString(right_type));
}
var_right = CallBuiltin(Builtins::kStringToNumber, context(), right);
var_right = CallBuiltin(Builtin::kStringToNumber, context(), right);
Goto(&loop);
}
@ -12774,7 +12770,7 @@ TNode<Oddball> CodeStubAssembler::StrictEqual(
CollectFeedbackForString(rhs_instance_type);
*var_type_feedback = SmiOr(lhs_feedback, rhs_feedback);
}
result = CAST(CallBuiltin(Builtins::kStringEqual,
result = CAST(CallBuiltin(Builtin::kStringEqual,
NoContextConstant(), lhs, rhs));
Goto(&end);
}
@ -13051,7 +13047,7 @@ void CodeStubAssembler::BranchIfSameValue(TNode<Object> lhs, TNode<Object> rhs,
// with the same sequence of characters.
GotoIfNot(IsString(CAST(rhs)), if_false);
const TNode<Object> result = CallBuiltin(
Builtins::kStringEqual, NoContextConstant(), lhs, rhs);
Builtin::kStringEqual, NoContextConstant(), lhs, rhs);
Branch(IsTrue(result), if_true, if_false);
}
@ -13136,13 +13132,13 @@ TNode<Oddball> CodeStubAssembler::HasProperty(TNode<Context> context,
BIND(&if_proxy);
{
TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
switch (mode) {
case kHasProperty:
GotoIf(IsPrivateSymbol(name), &return_false);
result = CAST(
CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
CallBuiltin(Builtin::kProxyHasProperty, context, object, name));
Goto(&end);
break;
case kForInHasProperty:
@ -13433,7 +13429,7 @@ TNode<Oddball> CodeStubAssembler::InstanceOf(TNode<Object> object,
// Use the OrdinaryHasInstance algorithm.
var_result = CAST(
CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object));
CallBuiltin(Builtin::kOrdinaryHasInstance, context, callable, object));
Goto(&return_result);
}
@ -14132,7 +14128,7 @@ TNode<BoolT> CodeStubAssembler::NeedsAnyPromiseHooks(TNode<Uint32T> flags) {
}
TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
CSA_ASSERT(this, SmiBelow(builtin_id, SmiConstant(Builtins::builtin_count)));
CSA_ASSERT(this, SmiBelow(builtin_id, SmiConstant(Builtins::kBuiltinCount)));
TNode<IntPtrT> offset =
ElementOffsetFromIndex(SmiToBInt(builtin_id), SYSTEM_POINTER_ELEMENTS);
@ -14159,7 +14155,7 @@ TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
*data_type_out = Uint16Constant(0);
}
if (if_compile_lazy) {
GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtins::kCompileLazy)),
GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtin::kCompileLazy)),
if_compile_lazy);
}
sfi_code = LoadBuiltin(CAST(sfi_data));

View File

@ -3025,19 +3025,19 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<Object> GetProperty(TNode<Context> context, TNode<Object> receiver,
TNode<Object> name) {
return CallBuiltin(Builtins::kGetProperty, context, receiver, name);
return CallBuiltin(Builtin::kGetProperty, context, receiver, name);
}
TNode<Object> SetPropertyStrict(TNode<Context> context,
TNode<Object> receiver, TNode<Object> key,
TNode<Object> value) {
return CallBuiltin(Builtins::kSetProperty, context, receiver, key, value);
return CallBuiltin(Builtin::kSetProperty, context, receiver, key, value);
}
TNode<Object> SetPropertyInLiteral(TNode<Context> context,
TNode<JSObject> receiver,
TNode<Object> key, TNode<Object> value) {
return CallBuiltin(Builtins::kSetPropertyInLiteral, context, receiver, key,
return CallBuiltin(Builtin::kSetPropertyInLiteral, context, receiver, key,
value);
}
@ -3052,15 +3052,13 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<Object> sync_iterator);
template <class... TArgs>
TNode<Object> CallBuiltin(Builtins::Name id, TNode<Object> context,
TArgs... args) {
TNode<Object> CallBuiltin(Builtin id, TNode<Object> context, TArgs... args) {
return CallStub<Object>(Builtins::CallableFor(isolate(), id), context,
args...);
}
template <class... TArgs>
void TailCallBuiltin(Builtins::Name id, TNode<Object> context,
TArgs... args) {
void TailCallBuiltin(Builtin id, TNode<Object> context, TArgs... args) {
return TailCallStub(Builtins::CallableFor(isolate(), id), context, args...);
}

View File

@ -231,12 +231,11 @@ Operand TurboAssembler::HeapObjectAsOperand(Handle<HeapObject> object) {
if (isolate()->roots_table().IsRootHandle(object, &root_index)) {
return Operand(kRootRegister, RootRegisterOffsetForRootIndex(root_index));
} else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) {
return Operand(kRootRegister,
RootRegisterOffsetForBuiltinIndex(builtin_index));
return Operand(kRootRegister, RootRegisterOffsetForBuiltin(builtin_index));
} else if (object.is_identical_to(code_object_) &&
Builtins::IsBuiltinId(maybe_builtin_index_)) {
return Operand(kRootRegister,
RootRegisterOffsetForBuiltinIndex(maybe_builtin_index_));
RootRegisterOffsetForBuiltin(maybe_builtin_index_));
} else {
// Objects in the constants table need an additional indirection, which
// cannot be represented as a single Operand.
@ -2173,7 +2172,7 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
DCHECK_IMPLIES(options().isolate_independent_code,
Builtins::IsIsolateIndependentBuiltin(*code_object));
if (options().inline_offheap_trampolines) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index)) {
// Inline the trampoline.
CallBuiltin(builtin_index);
@ -2207,14 +2206,13 @@ void TurboAssembler::CallBuiltinByIndex(Register builtin_index) {
void TurboAssembler::CallBuiltin(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
call(entry, RelocInfo::OFF_HEAP_TARGET);
}
Operand TurboAssembler::EntryFromBuiltinIndexAsOperand(
Builtins::Name builtin_index) {
Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin_index) {
return Operand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin_index));
}
@ -2290,11 +2288,11 @@ void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
DCHECK_IMPLIES(options().isolate_independent_code,
Builtins::IsIsolateIndependentBuiltin(*code_object));
if (options().inline_offheap_trampolines) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index)) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
jmp(entry, RelocInfo::OFF_HEAP_TARGET);
@ -2391,9 +2389,9 @@ void TurboAssembler::ComputeCodeStartAddress(Register dst) {
}
}
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label* ret, Label*) {
void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
CallBuiltin(target);
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
(kind == DeoptimizeKind::kLazy)

View File

@ -139,7 +139,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public SharedTurboAssembler {
void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); }
void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); }
Operand EntryFromBuiltinIndexAsOperand(Builtins::Name builtin_index);
Operand EntryFromBuiltinAsOperand(Builtin builtin_index);
void Call(Register reg) { call(reg); }
void Call(Operand op) { call(op); }
@ -170,7 +170,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public SharedTurboAssembler {
void Trap();
void DebugBreak();
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -505,7 +505,7 @@ constexpr Register RunMicrotasksDescriptor::MicrotaskQueueRegister() {
#define DEFINE_STATIC_BUILTIN_DESCRIPTOR_GETTER(Name, DescriptorName) \
template <> \
struct CallInterfaceDescriptorFor<Builtins::k##Name> { \
struct CallInterfaceDescriptorFor<Builtin::k##Name> { \
using type = DescriptorName##Descriptor; \
};
BUILTIN_LIST(IGNORE_BUILTIN, IGNORE_BUILTIN,
@ -515,7 +515,7 @@ BUILTIN_LIST(IGNORE_BUILTIN, IGNORE_BUILTIN,
#undef DEFINE_STATIC_BUILTIN_DESCRIPTOR_GETTER
#define DEFINE_STATIC_BUILTIN_DESCRIPTOR_GETTER(Name, ...) \
template <> \
struct CallInterfaceDescriptorFor<Builtins::k##Name> { \
struct CallInterfaceDescriptorFor<Builtin::k##Name> { \
using type = Name##Descriptor; \
};
BUILTIN_LIST_TFS(DEFINE_STATIC_BUILTIN_DESCRIPTOR_GETTER)

View File

@ -502,7 +502,7 @@ class StaticJSCallInterfaceDescriptor
Descriptor>::StaticCallInterfaceDescriptor;
};
template <Builtins::Name kBuiltin>
template <Builtin kBuiltin>
struct CallInterfaceDescriptorFor;
// Stub class replacing std::array<Register, 0>, as a workaround for MSVC's

View File

@ -303,7 +303,7 @@ void TurboAssembler::CallRecordWriteStub(
// Inline the trampoline.
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
li(t9, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -3773,7 +3773,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
DCHECK(RelocInfo::IsCodeTarget(rmode));
BlockTrampolinePoolScope block_trampoline_pool(this);
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_isolate_independent_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) &&
Builtins::IsIsolateIndependent(builtin_index);
@ -3799,7 +3799,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
options().inline_offheap_trampolines) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
li(t9, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -3915,7 +3915,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
BranchDelaySlot bd) {
BlockTrampolinePoolScope block_trampoline_pool(this);
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_isolate_independent_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) &&
Builtins::IsIsolateIndependent(builtin_index);
@ -3939,7 +3939,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
options().inline_offheap_trampolines) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
li(t9, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -5540,9 +5540,9 @@ void TurboAssembler::ResetSpeculationPoisonRegister() {
li(kSpeculationPoisonRegister, -1);
}
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label* ret, Label*) {
void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
BlockTrampolinePoolScope block_trampoline_pool(this);
Lw(t9,
MemOperand(kRootRegister, IsolateData::builtin_entry_slot_offset(target)));

View File

@ -241,7 +241,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// The return address on the stack is used by frame iteration.
void StoreReturnAddressAndCall(Register target);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -301,7 +301,7 @@ void TurboAssembler::CallRecordWriteStub(
// Inline the trampoline.
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
li(t9, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -4344,12 +4344,12 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
Jump(t9, cond, rs, rt, bd);
return;
} else if (options().inline_offheap_trampolines) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) &&
Builtins::IsIsolateIndependent(builtin_index)) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
li(t9, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -4423,12 +4423,12 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
Call(t9, cond, rs, rt, bd);
return;
} else if (options().inline_offheap_trampolines) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) &&
Builtins::IsIsolateIndependent(builtin_index)) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
li(t9, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -6070,9 +6070,9 @@ void TurboAssembler::ResetSpeculationPoisonRegister() {
li(kSpeculationPoisonRegister, -1);
}
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label* ret, Label*) {
void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
BlockTrampolinePoolScope block_trampoline_pool(this);
Ld(t9,
MemOperand(kRootRegister, IsolateData::builtin_entry_slot_offset(target)));

View File

@ -265,7 +265,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// The return address on the stack is used by frame iteration.
void StoreReturnAddressAndCall(Register target);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -176,7 +176,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
DCHECK_IMPLIES(options().isolate_independent_code,
Builtins::IsIsolateIndependentBuiltin(*code));
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index);
@ -265,7 +265,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
DCHECK_IMPLIES(options().use_pc_relative_calls_and_jumps,
Builtins::IsIsolateIndependentBuiltin(*code));
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index);
@ -3265,9 +3265,9 @@ void TurboAssembler::StoreReturnAddressAndCall(Register target) {
SizeOfCodeGeneratedSince(&start_call));
}
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label* ret, Label*) {
void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
BlockTrampolinePoolScope block_trampoline_pool(this);
LoadU64(ip, MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(target)));

View File

@ -435,7 +435,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
JumpMode jump_mode = JumpMode::kJump);
void CallBuiltinByIndex(Register builtin_index);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -300,7 +300,7 @@ void TurboAssembler::CallRecordWriteStub(
// Inline the trampoline. //qj
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
@ -2994,7 +2994,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
DCHECK(RelocInfo::IsCodeTarget(rmode));
BlockTrampolinePoolScope block_trampoline_pool(this);
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_isolate_independent_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) &&
Builtins::IsIsolateIndependent(builtin_index);
@ -3021,7 +3021,7 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
target_is_isolate_independent_builtin) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
li(t6, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -3074,7 +3074,7 @@ void TurboAssembler::Call(Address target, RelocInfo::Mode rmode, Condition cond,
void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
Condition cond, Register rs, const Operand& rt) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_isolate_independent_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) &&
Builtins::IsIsolateIndependent(builtin_index);
@ -3103,7 +3103,7 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
target_is_isolate_independent_builtin) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
li(t6, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -3138,7 +3138,7 @@ void TurboAssembler::CallBuiltinByIndex(Register builtin_index) {
void TurboAssembler::CallBuiltin(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob(isolate());
Address entry = d.InstructionStartOfBuiltin(builtin_index);
if (options().short_builtin_calls) {
@ -3152,7 +3152,7 @@ void TurboAssembler::CallBuiltin(int builtin_index) {
void TurboAssembler::TailCallBuiltin(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob(isolate());
Address entry = d.InstructionStartOfBuiltin(builtin_index);
if (options().short_builtin_calls) {
@ -3163,13 +3163,12 @@ void TurboAssembler::TailCallBuiltin(int builtin_index) {
RecordComment("]");
}
void TurboAssembler::LoadEntryFromBuiltinIndex(Builtins::Name builtin_index,
Register destination) {
void TurboAssembler::LoadEntryFromBuiltin(Builtin builtin_index,
Register destination) {
Ld(destination, EntryFromBuiltinIndexAsOperand(builtin_index));
}
MemOperand TurboAssembler::EntryFromBuiltinIndexAsOperand(
Builtins::Name builtin_index) {
MemOperand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin_index) {
DCHECK(root_array_available());
return MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin_index));
@ -4627,9 +4626,9 @@ void TurboAssembler::ResetSpeculationPoisonRegister() {
li(kSpeculationPoisonRegister, -1);
}
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label* ret, Label*) {
void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
BlockTrampolinePoolScope block_trampoline_pool(this);
Ld(t6,
MemOperand(kRootRegister, IsolateData::builtin_entry_slot_offset(target)));
@ -4681,7 +4680,7 @@ void TurboAssembler::LoadCodeObjectEntry(Register destination,
bind(&if_code_is_off_heap);
Lw(scratch, FieldMemOperand(code_object, Code::kBuiltinIndexOffset));
// TODO(RISCV): https://github.com/v8-riscv/v8/issues/373
Branch(&no_builtin_index, eq, scratch, Operand(Builtins::kNoBuiltinId));
Branch(&no_builtin_index, eq, scratch, Operand(Builtin::kNoBuiltinId));
slli(destination, scratch, kSystemPointerSizeLog2);
Add64(destination, destination, kRootRegister);
Ld(destination,

View File

@ -231,18 +231,17 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// Load the builtin given by the Smi in |builtin_index| into the same
// register.
void LoadEntryFromBuiltinIndex(Register builtin_index);
void LoadEntryFromBuiltinIndex(Builtins::Name builtin_index,
Register destination);
MemOperand EntryFromBuiltinIndexAsOperand(Builtins::Name builtin_index);
void LoadEntryFromBuiltin(Register builtin_index);
void LoadEntryFromBuiltin(Builtin builtin_index, Register destination);
MemOperand EntryFromBuiltinAsOperand(Builtin builtin_index);
void CallBuiltinByIndex(Register builtin_index);
void CallBuiltin(Builtins::Name builtin) {
// TODO(11527): drop the int overload in favour of the Builtins::Name one.
void CallBuiltin(Builtin builtin) {
// TODO(11527): drop the int overload in favour of the Builtin one.
return CallBuiltin(static_cast<int>(builtin));
}
void CallBuiltin(int builtin_index);
void TailCallBuiltin(Builtins::Name builtin) {
// TODO(11527): drop the int overload in favour of the Builtins::Name one.
void TailCallBuiltin(Builtin builtin) {
// TODO(11527): drop the int overload in favour of the Builtin one.
return TailCallBuiltin(static_cast<int>(builtin));
}
void TailCallBuiltin(int builtin_index);
@ -257,7 +256,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// The return address on the stack is used by frame iteration.
void StoreReturnAddressAndCall(Register target);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -401,14 +401,14 @@ void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
DCHECK_IMPLIES(options().isolate_independent_code,
Builtins::IsIsolateIndependentBuiltin(*code));
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index);
if (options().inline_offheap_trampolines && target_is_builtin) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
mov(ip, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -462,14 +462,14 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
DCHECK_IMPLIES(options().isolate_independent_code,
Builtins::IsIsolateIndependentBuiltin(*code));
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
bool target_is_builtin =
isolate()->builtins()->IsBuiltinHandle(code, &builtin_index);
if (target_is_builtin && options().inline_offheap_trampolines) {
// Inline the trampoline.
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
mov(ip, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -935,7 +935,7 @@ void TurboAssembler::CallRecordWriteStub(
Builtins::GetRecordWriteStub(remembered_set_action, fp_mode);
if (options().inline_offheap_trampolines) {
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
EmbeddedData d = EmbeddedData::FromBlob();
Address entry = d.InstructionStartOfBuiltin(builtin_index);
mov(ip, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
@ -4723,9 +4723,9 @@ void TurboAssembler::StoreReturnAddressAndCall(Register target) {
bind(&return_label);
}
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label* ret, Label*) {
void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
LoadU64(ip, MemOperand(kRootRegister,
IsolateData::builtin_entry_slot_offset(target)));
Call(ip);

View File

@ -103,7 +103,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
void Ret() { b(r14); }
void Ret(Condition cond) { b(cond, r14); }
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -38,14 +38,13 @@ void TurboAssemblerBase::IndirectLoadConstant(Register destination,
LoadRoot(destination, root_index);
} else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) {
// Similar to roots, builtins may be loaded from the builtins table.
LoadRootRelative(destination,
RootRegisterOffsetForBuiltinIndex(builtin_index));
LoadRootRelative(destination, RootRegisterOffsetForBuiltin(builtin_index));
} else if (object.is_identical_to(code_object_) &&
Builtins::IsBuiltinId(maybe_builtin_index_)) {
// The self-reference loaded through Codevalue() may also be a builtin
// and thus viable for a fast load.
LoadRootRelative(destination,
RootRegisterOffsetForBuiltinIndex(maybe_builtin_index_));
RootRegisterOffsetForBuiltin(maybe_builtin_index_));
} else {
CHECK(isolate()->IsGeneratingEmbeddedBuiltins());
// Ensure the given object is in the builtins constants table and fetch its
@ -84,8 +83,7 @@ int32_t TurboAssemblerBase::RootRegisterOffsetForRootIndex(
}
// static
int32_t TurboAssemblerBase::RootRegisterOffsetForBuiltinIndex(
int builtin_index) {
int32_t TurboAssemblerBase::RootRegisterOffsetForBuiltin(int builtin_index) {
return IsolateData::builtin_slot_offset(builtin_index);
}

View File

@ -77,7 +77,7 @@ class V8_EXPORT_PRIVATE TurboAssemblerBase : public Assembler {
virtual void LoadRoot(Register destination, RootIndex index) = 0;
static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index);
static int32_t RootRegisterOffsetForBuiltinIndex(int builtin_index);
static int32_t RootRegisterOffsetForBuiltin(int builtin_index);
// Returns the root-relative offset to reference.address().
static intptr_t RootRegisterOffsetForExternalReference(
@ -124,15 +124,15 @@ class V8_EXPORT_PRIVATE TurboAssemblerBase : public Assembler {
bool hard_abort_ = false;
// May be set while generating builtins.
int maybe_builtin_index_ = Builtins::kNoBuiltinId;
int maybe_builtin_index_ = Builtin::kNoBuiltinId;
bool has_frame_ = false;
DISALLOW_IMPLICIT_CONSTRUCTORS(TurboAssemblerBase);
};
// Avoids emitting calls to the {Builtins::kAbort} builtin when emitting debug
// code during the lifetime of this scope object.
// Avoids emitting calls to the {Builtin::kAbort} builtin when emitting
// debug code during the lifetime of this scope object.
class V8_NODISCARD HardAbortScope {
public:
explicit HardAbortScope(TurboAssemblerBase* assembler)

View File

@ -1640,7 +1640,7 @@ void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
DCHECK_IMPLIES(options().isolate_independent_code,
Builtins::IsIsolateIndependentBuiltin(*code_object));
if (options().inline_offheap_trampolines) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index)) {
Label skip;
if (cc != always) {
@ -1683,7 +1683,7 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
DCHECK_IMPLIES(options().isolate_independent_code,
Builtins::IsIsolateIndependentBuiltin(*code_object));
if (options().inline_offheap_trampolines) {
int builtin_index = Builtins::kNoBuiltinId;
int builtin_index = Builtin::kNoBuiltinId;
if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index)) {
// Inline the trampoline.
CallBuiltin(builtin_index);
@ -1694,8 +1694,7 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
call(code_object, rmode);
}
Operand TurboAssembler::EntryFromBuiltinIndexAsOperand(
Builtins::Name builtin_index) {
Operand TurboAssembler::EntryFromBuiltinAsOperand(Builtin builtin_index) {
DCHECK(root_array_available());
return Operand(kRootRegister,
IsolateData::builtin_entry_slot_offset(builtin_index));
@ -1725,7 +1724,7 @@ void TurboAssembler::CallBuiltinByIndex(Register builtin_index) {
void TurboAssembler::CallBuiltin(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
if (options().short_builtin_calls) {
EmbeddedData d = EmbeddedData::FromBlob(isolate());
Address entry = d.InstructionStartOfBuiltin(builtin_index);
@ -1743,7 +1742,7 @@ void TurboAssembler::CallBuiltin(int builtin_index) {
void TurboAssembler::TailCallBuiltin(int builtin_index) {
DCHECK(Builtins::IsBuiltinId(builtin_index));
RecordCommentForOffHeapTrampoline(builtin_index);
CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
CHECK_NE(builtin_index, Builtin::kNoBuiltinId);
if (options().short_builtin_calls) {
EmbeddedData d = EmbeddedData::FromBlob(isolate());
Address entry = d.InstructionStartOfBuiltin(builtin_index);
@ -3359,13 +3358,13 @@ void TurboAssembler::ResetSpeculationPoisonRegister() {
Move(kSpeculationPoisonRegister, -1);
}
void TurboAssembler::CallForDeoptimization(Builtins::Name target, int,
Label* exit, DeoptimizeKind kind,
Label* ret, Label*) {
void TurboAssembler::CallForDeoptimization(Builtin target, int, Label* exit,
DeoptimizeKind kind, Label* ret,
Label*) {
// Note: Assembler::call is used here on purpose to guarantee fixed-size
// exits even on Atom CPUs; see TurboAssembler::Call for Atom-specific
// performance tuning which emits a different instruction sequence.
call(EntryFromBuiltinIndexAsOperand(target));
call(EntryFromBuiltinAsOperand(target));
DCHECK_EQ(SizeOfCodeGeneratedSince(exit),
(kind == DeoptimizeKind::kLazy)
? Deoptimizer::kLazyDeoptExitSize

View File

@ -340,16 +340,16 @@ class V8_EXPORT_PRIVATE TurboAssembler : public SharedTurboAssembler {
void Call(ExternalReference ext);
void Call(Label* target) { call(target); }
Operand EntryFromBuiltinIndexAsOperand(Builtins::Name builtin_index);
Operand EntryFromBuiltinAsOperand(Builtin builtin_index);
Operand EntryFromBuiltinIndexAsOperand(Register builtin_index);
void CallBuiltinByIndex(Register builtin_index);
void CallBuiltin(Builtins::Name builtin) {
// TODO(11527): drop the int overload in favour of the Builtins::Name one.
void CallBuiltin(Builtin builtin) {
// TODO(11527): drop the int overload in favour of the Builtin one.
return CallBuiltin(static_cast<int>(builtin));
}
void CallBuiltin(int builtin_index);
void TailCallBuiltin(Builtins::Name builtin) {
// TODO(11527): drop the int overload in favour of the Builtins::Name one.
void TailCallBuiltin(Builtin builtin) {
// TODO(11527): drop the int overload in favour of the Builtin one.
return TailCallBuiltin(static_cast<int>(builtin));
}
void TailCallBuiltin(int builtin_index);
@ -370,7 +370,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public SharedTurboAssembler {
void RetpolineJump(Register reg);
void CallForDeoptimization(Builtins::Name target, int deopt_id, Label* exit,
void CallForDeoptimization(Builtin target, int deopt_id, Label* exit,
DeoptimizeKind kind, Label* ret,
Label* jump_deoptimization_entry_label);

View File

@ -910,9 +910,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop();
unwinding_info_writer_.MarkBlockWillExit();

View File

@ -942,9 +942,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ Debug("kArchAbortCSAAssert", 0, BREAK);
unwinding_info_writer_.MarkBlockWillExit();
@ -3388,12 +3387,12 @@ void CodeGenerator::PrepareForDeoptimizationExits(
__ bind(&jump_deoptimization_entry_labels_[i]);
DeoptimizeKind kind = static_cast<DeoptimizeKind>(i);
if (kind == DeoptimizeKind::kEagerWithResume) {
__ LoadEntryFromBuiltinIndex(
__ LoadEntryFromBuiltin(
Deoptimizer::GetDeoptWithResumeBuiltin(eager_with_resume_reason),
scratch);
} else {
__ LoadEntryFromBuiltinIndex(Deoptimizer::GetDeoptimizationEntry(kind),
scratch);
__ LoadEntryFromBuiltin(Deoptimizer::GetDeoptimizationEntry(kind),
scratch);
}
__ Jump(scratch);
}

View File

@ -228,7 +228,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
}
tasm()->bind(exit->label());
}
Builtins::Name target =
Builtin target =
deopt_kind == DeoptimizeKind::kEagerWithResume
? Deoptimizer::GetDeoptWithResumeBuiltin(deoptimization_reason)
: Deoptimizer::GetDeoptimizationEntry(deopt_kind);

View File

@ -283,7 +283,7 @@ class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
if (false) {
#endif // V8_ENABLE_WEBASSEMBLY
} else if (tasm()->options().inline_offheap_trampolines) {
__ CallBuiltin(Builtins::kDoubleToI);
__ CallBuiltin(Builtin::kDoubleToI);
} else {
__ Call(BUILTIN_CODE(isolate_, DoubleToI), RelocInfo::CODE_TARGET);
}
@ -928,9 +928,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ int3();
break;

View File

@ -851,9 +851,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop();
break;

View File

@ -813,9 +813,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop();
break;

View File

@ -1121,9 +1121,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop();
break;

View File

@ -786,9 +786,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop();
break;

View File

@ -1297,9 +1297,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop();
break;

View File

@ -237,7 +237,7 @@ class OutOfLineTruncateDoubleToI final : public OutOfLineCode {
} else if (tasm()->options().inline_offheap_trampolines) {
// With embedded builtins we do not need the isolate here. This allows
// the call to be generated asynchronously.
__ CallBuiltin(Builtins::kDoubleToI);
__ CallBuiltin(Builtin::kDoubleToI);
} else {
__ Call(BUILTIN_CODE(isolate_, DoubleToI), RelocInfo::CODE_TARGET);
}
@ -1188,9 +1188,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
__ Call(isolate()->builtins()->builtin_handle(Builtin::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ int3();
unwinding_info_writer_.MarkBlockWillExit();

View File

@ -2286,7 +2286,7 @@ void BytecodeGraphBuilder::VisitCreateClosure() {
const Operator* op = javascript()->CreateClosure(
shared_info.object(),
jsgraph()->isolate()->builtins()->builtin_handle(Builtins::kCompileLazy),
jsgraph()->isolate()->builtins()->builtin_handle(Builtin::kCompileLazy),
allocation);
Node* closure = NewNode(
op, BuildLoadFeedbackCell(bytecode_iterator().GetIndexOperand(1)));
@ -3661,7 +3661,7 @@ void BytecodeGraphBuilder::VisitIncBlockCounter() {
Node* coverage_array_slot =
jsgraph()->Constant(bytecode_iterator().GetIndexOperand(0));
// Lowered by js-intrinsic-lowering to call Builtins::kIncBlockCounter.
// Lowered by js-intrinsic-lowering to call Builtin::kIncBlockCounter.
const Operator* op =
javascript()->CallRuntime(Runtime::kInlineIncBlockCounter);

View File

@ -1616,13 +1616,13 @@ class V8_EXPORT_PRIVATE CodeAssemblerState {
CodeAssemblerState(Isolate* isolate, Zone* zone,
const CallInterfaceDescriptor& descriptor, CodeKind kind,
const char* name, PoisoningMitigationLevel poisoning_level,
int32_t builtin_index = Builtins::kNoBuiltinId);
int32_t builtin_index = Builtin::kNoBuiltinId);
// Create with JSCall linkage.
CodeAssemblerState(Isolate* isolate, Zone* zone, int parameter_count,
CodeKind kind, const char* name,
PoisoningMitigationLevel poisoning_level,
int32_t builtin_index = Builtins::kNoBuiltinId);
int32_t builtin_index = Builtin::kNoBuiltinId);
~CodeAssemblerState();

View File

@ -251,8 +251,7 @@ class EffectControlLinearizer {
Node* BuildTypedArrayDataPointer(Node* base, Node* external);
template <typename... Args>
Node* CallBuiltin(Builtins::Name builtin, Operator::Properties properties,
Args...);
Node* CallBuiltin(Builtin builtin, Operator::Properties properties, Args...);
Node* ChangeBitToTagged(Node* value);
Node* ChangeFloat64ToTagged(Node* value, CheckForMinusZeroMode mode);
@ -3075,7 +3074,7 @@ Node* EffectControlLinearizer::LowerNumberToString(Node* node) {
Node* argument = node->InputAt(0);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kNumberToString);
Builtins::CallableFor(isolate(), Builtin::kNumberToString);
Operator::Properties properties = Operator::kEliminatable;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -3571,7 +3570,7 @@ Node* EffectControlLinearizer::LowerObjectIsUndetectable(Node* node) {
Node* EffectControlLinearizer::LowerTypeOf(Node* node) {
Node* obj = node->InputAt(0);
Callable const callable = Builtins::CallableFor(isolate(), Builtins::kTypeof);
Callable const callable = Builtins::CallableFor(isolate(), Builtin::kTypeof);
Operator::Properties const properties = Operator::kEliminatable;
CallDescriptor::Flags const flags = CallDescriptor::kNoAllocate;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -3652,7 +3651,7 @@ void EffectControlLinearizer::LowerUpdateInterruptBudget(Node* node) {
&if_budget_exhausted, &next);
__ Bind(&if_budget_exhausted);
CallBuiltin(Builtins::kBytecodeBudgetInterruptFromCode,
CallBuiltin(Builtin::kBytecodeBudgetInterruptFromCode,
node->op()->properties(), feedback_cell);
__ Goto(&next);
@ -3663,7 +3662,7 @@ void EffectControlLinearizer::LowerUpdateInterruptBudget(Node* node) {
Node* EffectControlLinearizer::LowerToBoolean(Node* node) {
Node* obj = node->InputAt(0);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kToBoolean);
Builtins::CallableFor(isolate(), Builtin::kToBoolean);
Operator::Properties const properties = Operator::kEliminatable;
CallDescriptor::Flags const flags = CallDescriptor::kNoAllocate;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -3799,16 +3798,16 @@ Node* EffectControlLinearizer::LowerNewArgumentsElements(Node* node) {
CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
Node* frame = __ LoadFramePointer();
Node* arguments_count = NodeProperties::GetValueInput(node, 0);
Builtins::Name builtin_name;
Builtin builtin_name;
switch (type) {
case CreateArgumentsType::kMappedArguments:
builtin_name = Builtins::kNewSloppyArgumentsElements;
builtin_name = Builtin::kNewSloppyArgumentsElements;
break;
case CreateArgumentsType::kUnmappedArguments:
builtin_name = Builtins::kNewStrictArgumentsElements;
builtin_name = Builtin::kNewStrictArgumentsElements;
break;
case CreateArgumentsType::kRestParameter:
builtin_name = Builtins::kNewRestArgumentsElements;
builtin_name = Builtin::kNewRestArgumentsElements;
break;
}
Callable const callable = Builtins::CallableFor(isolate(), builtin_name);
@ -3871,7 +3870,7 @@ Node* EffectControlLinearizer::LowerSameValue(Node* node) {
Node* rhs = node->InputAt(1);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kSameValue);
Builtins::CallableFor(isolate(), Builtin::kSameValue);
Operator::Properties properties = Operator::kEliminatable;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -3886,7 +3885,7 @@ Node* EffectControlLinearizer::LowerSameValueNumbersOnly(Node* node) {
Node* rhs = node->InputAt(1);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kSameValueNumbersOnly);
Builtins::CallableFor(isolate(), Builtin::kSameValueNumbersOnly);
Operator::Properties properties = Operator::kEliminatable;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -3938,7 +3937,7 @@ Node* EffectControlLinearizer::LowerStringToNumber(Node* node) {
Node* string = node->InputAt(0);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kStringToNumber);
Builtins::CallableFor(isolate(), Builtin::kStringToNumber);
Operator::Properties properties = Operator::kEliminatable;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -4102,7 +4101,7 @@ Node* EffectControlLinearizer::LowerStringCodePointAt(Node* node) {
Node* position = node->InputAt(1);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kStringCodePointAt);
Builtins::CallableFor(isolate(), Builtin::kStringCodePointAt);
Operator::Properties properties = Operator::kNoThrow | Operator::kNoWrite;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -4215,7 +4214,7 @@ Node* EffectControlLinearizer::LowerStringToLowerCaseIntl(Node* node) {
Node* receiver = node->InputAt(0);
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kStringToLowerCaseIntl);
Builtins::CallableFor(isolate(), Builtin::kStringToLowerCaseIntl);
Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -4380,8 +4379,7 @@ Node* EffectControlLinearizer::LowerStringIndexOf(Node* node) {
Node* search_string = node->InputAt(1);
Node* position = node->InputAt(2);
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kStringIndexOf);
Callable callable = Builtins::CallableFor(isolate(), Builtin::kStringIndexOf);
Operator::Properties properties = Operator::kEliminatable;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -4396,7 +4394,7 @@ Node* EffectControlLinearizer::LowerStringFromCodePointAt(Node* node) {
Node* index = node->InputAt(1);
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kStringFromCodePointAt);
Builtins::CallableFor(isolate(), Builtin::kStringFromCodePointAt);
Operator::Properties properties = Operator::kEliminatable;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -4432,7 +4430,7 @@ Node* EffectControlLinearizer::LowerStringSubstring(Node* node) {
Node* end = ChangeInt32ToIntPtr(node->InputAt(2));
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kStringSubstring);
Builtins::CallableFor(isolate(), Builtin::kStringSubstring);
Operator::Properties properties = Operator::kEliminatable;
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
@ -4444,17 +4442,17 @@ Node* EffectControlLinearizer::LowerStringSubstring(Node* node) {
Node* EffectControlLinearizer::LowerStringEqual(Node* node) {
return LowerStringComparison(
Builtins::CallableFor(isolate(), Builtins::kStringEqual), node);
Builtins::CallableFor(isolate(), Builtin::kStringEqual), node);
}
Node* EffectControlLinearizer::LowerStringLessThan(Node* node) {
return LowerStringComparison(
Builtins::CallableFor(isolate(), Builtins::kStringLessThan), node);
Builtins::CallableFor(isolate(), Builtin::kStringLessThan), node);
}
Node* EffectControlLinearizer::LowerStringLessThanOrEqual(Node* node) {
return LowerStringComparison(
Builtins::CallableFor(isolate(), Builtins::kStringLessThanOrEqual), node);
Builtins::CallableFor(isolate(), Builtin::kStringLessThanOrEqual), node);
}
Node* EffectControlLinearizer::LowerBigIntAdd(Node* node, Node* frame_state) {
@ -4462,7 +4460,7 @@ Node* EffectControlLinearizer::LowerBigIntAdd(Node* node, Node* frame_state) {
Node* rhs = node->InputAt(1);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kBigIntAddNoThrow);
Builtins::CallableFor(isolate(), Builtin::kBigIntAddNoThrow);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
@ -4483,7 +4481,7 @@ Node* EffectControlLinearizer::LowerBigIntSubtract(Node* node,
Node* rhs = node->InputAt(1);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kBigIntSubtractNoThrow);
Builtins::CallableFor(isolate(), Builtin::kBigIntSubtractNoThrow);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
@ -4500,7 +4498,7 @@ Node* EffectControlLinearizer::LowerBigIntSubtract(Node* node,
Node* EffectControlLinearizer::LowerBigIntNegate(Node* node) {
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kBigIntUnaryMinus);
Builtins::CallableFor(isolate(), Builtin::kBigIntUnaryMinus);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
@ -4849,7 +4847,7 @@ Node* EffectControlLinearizer::LowerEnsureWritableFastElements(Node* node) {
// We need to take a copy of the {elements} and set them up for {object}.
Operator::Properties properties = Operator::kEliminatable;
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kCopyFastSmiOrObjectElements);
Builtins::CallableFor(isolate(), Builtin::kCopyFastSmiOrObjectElements);
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
@ -4884,9 +4882,9 @@ Node* EffectControlLinearizer::LowerMaybeGrowFastElements(Node* node,
Operator::Properties properties = Operator::kEliminatable;
Callable callable =
(params.mode() == GrowFastElementsMode::kDoubleElements)
? Builtins::CallableFor(isolate(), Builtins::kGrowFastDoubleElements)
? Builtins::CallableFor(isolate(), Builtin::kGrowFastDoubleElements)
: Builtins::CallableFor(isolate(),
Builtins::kGrowFastSmiOrObjectElements);
Builtin::kGrowFastSmiOrObjectElements);
CallDescriptor::Flags call_flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
@ -5852,7 +5850,7 @@ void EffectControlLinearizer::LowerRuntimeAbort(Node* node) {
}
template <typename... Args>
Node* EffectControlLinearizer::CallBuiltin(Builtins::Name builtin,
Node* EffectControlLinearizer::CallBuiltin(Builtin builtin,
Operator::Properties properties,
Args... args) {
Callable const callable = Builtins::CallableFor(isolate(), builtin);
@ -5871,7 +5869,7 @@ Node* EffectControlLinearizer::LowerAssertType(Node* node) {
Node* const input = node->InputAt(0);
Node* const min = __ NumberConstant(type.Min());
Node* const max = __ NumberConstant(type.Max());
CallBuiltin(Builtins::kCheckNumberInRange, node->op()->properties(), input,
CallBuiltin(Builtin::kCheckNumberInRange, node->op()->properties(), input,
min, max, __ SmiConstant(node->id()));
return input;
}
@ -5880,7 +5878,7 @@ Node* EffectControlLinearizer::LowerFoldConstant(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kFoldConstant);
Node* original = node->InputAt(0);
Node* constant = node->InputAt(1);
CallBuiltin(Builtins::kCheckSameObject, node->op()->properties(), original,
CallBuiltin(Builtin::kCheckSameObject, node->op()->properties(), original,
constant);
return constant;
}
@ -5912,7 +5910,7 @@ Node* EffectControlLinearizer::LowerConvertReceiver(Node* node) {
// Wrap the primitive {value} into a JSPrimitiveWrapper.
__ Bind(&convert_to_object);
Operator::Properties properties = Operator::kEliminatable;
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
Callable callable = Builtins::CallableFor(isolate(), Builtin::kToObject);
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
@ -5949,7 +5947,7 @@ Node* EffectControlLinearizer::LowerConvertReceiver(Node* node) {
__ GotoIf(__ TaggedEqual(value, __ NullConstant()),
&convert_global_proxy);
Operator::Properties properties = Operator::kEliminatable;
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
Callable callable = Builtins::CallableFor(isolate(), Builtin::kToObject);
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
@ -6310,7 +6308,7 @@ Node* EffectControlLinearizer::LowerFindOrderedHashMapEntry(Node* node) {
{
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kFindOrderedHashMapEntry);
Builtins::CallableFor(isolate(), Builtin::kFindOrderedHashMapEntry);
Operator::Properties const properties = node->op()->properties();
CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(

View File

@ -111,8 +111,8 @@ uint8_t DeoptimizerParameterCountFor(ContinuationFrameStateMode mode) {
}
FrameState CreateBuiltinContinuationFrameStateCommon(
JSGraph* jsgraph, FrameStateType frame_type, Builtins::Name name,
Node* closure, Node* context, Node** parameters, int parameter_count,
JSGraph* jsgraph, FrameStateType frame_type, Builtin name, Node* closure,
Node* context, Node** parameters, int parameter_count,
Node* outer_frame_state,
Handle<SharedFunctionInfo> shared = Handle<SharedFunctionInfo>(),
const wasm::FunctionSig* signature = nullptr) {
@ -147,8 +147,8 @@ FrameState CreateBuiltinContinuationFrameStateCommon(
} // namespace
FrameState CreateStubBuiltinContinuationFrameState(
JSGraph* jsgraph, Builtins::Name name, Node* context,
Node* const* parameters, int parameter_count, Node* outer_frame_state,
JSGraph* jsgraph, Builtin name, Node* context, Node* const* parameters,
int parameter_count, Node* outer_frame_state,
ContinuationFrameStateMode mode, const wasm::FunctionSig* signature) {
Callable callable = Builtins::CallableFor(jsgraph->isolate(), name);
CallInterfaceDescriptor descriptor = callable.descriptor();
@ -180,7 +180,7 @@ FrameState CreateStubBuiltinContinuationFrameState(
FrameStateType frame_state_type = FrameStateType::kBuiltinContinuation;
#if V8_ENABLE_WEBASSEMBLY
if (name == Builtins::kJSToWasmLazyDeoptContinuation) {
if (name == Builtin::kJSToWasmLazyDeoptContinuation) {
CHECK_NOT_NULL(signature);
frame_state_type = FrameStateType::kJSToWasmBuiltinContinuation;
}
@ -201,14 +201,14 @@ FrameState CreateJSWasmCallBuiltinContinuationFrameState(
jsgraph->SmiConstant(wasm_return_kind ? wasm_return_kind.value() : -1);
Node* lazy_deopt_parameters[] = {node_return_type};
return CreateStubBuiltinContinuationFrameState(
jsgraph, Builtins::kJSToWasmLazyDeoptContinuation, context,
jsgraph, Builtin::kJSToWasmLazyDeoptContinuation, context,
lazy_deopt_parameters, arraysize(lazy_deopt_parameters),
outer_frame_state, ContinuationFrameStateMode::LAZY, signature);
}
#endif // V8_ENABLE_WEBASSEMBLY
FrameState CreateJavaScriptBuiltinContinuationFrameState(
JSGraph* jsgraph, const SharedFunctionInfoRef& shared, Builtins::Name name,
JSGraph* jsgraph, const SharedFunctionInfoRef& shared, Builtin name,
Node* target, Node* context, Node* const* stack_parameters,
int stack_parameter_count, Node* outer_frame_state,
ContinuationFrameStateMode mode) {
@ -252,7 +252,7 @@ FrameState CreateGenericLazyDeoptContinuationFrameState(
Node* stack_parameters[]{receiver};
const int stack_parameter_count = arraysize(stack_parameters);
return CreateJavaScriptBuiltinContinuationFrameState(
graph, shared, Builtins::kGenericLazyDeoptContinuation, target, context,
graph, shared, Builtin::kGenericLazyDeoptContinuation, target, context,
stack_parameters, stack_parameter_count, outer_frame_state,
ContinuationFrameStateMode::LAZY);
}

View File

@ -174,7 +174,7 @@ enum class ContinuationFrameStateMode { EAGER, LAZY, LAZY_WITH_CATCH };
class FrameState;
FrameState CreateStubBuiltinContinuationFrameState(
JSGraph* graph, Builtins::Name name, Node* context, Node* const* parameters,
JSGraph* graph, Builtin name, Node* context, Node* const* parameters,
int parameter_count, Node* outer_frame_state,
ContinuationFrameStateMode mode,
const wasm::FunctionSig* signature = nullptr);
@ -186,7 +186,7 @@ FrameState CreateJSWasmCallBuiltinContinuationFrameState(
#endif // V8_ENABLE_WEBASSEMBLY
FrameState CreateJavaScriptBuiltinContinuationFrameState(
JSGraph* graph, const SharedFunctionInfoRef& shared, Builtins::Name name,
JSGraph* graph, const SharedFunctionInfoRef& shared, Builtin name,
Node* target, Node* context, Node* const* stack_parameters,
int stack_parameter_count, Node* outer_frame_state,
ContinuationFrameStateMode mode);

View File

@ -1062,7 +1062,7 @@ void GraphAssembler::InitializeEffectControl(Node* effect, Node* control) {
Operator const* JSGraphAssembler::PlainPrimitiveToNumberOperator() {
if (!to_number_operator_.is_set()) {
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kPlainPrimitiveToNumber);
Builtins::CallableFor(isolate(), Builtin::kPlainPrimitiveToNumber);
CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),

View File

@ -2007,7 +2007,7 @@ SharedFunctionInfoData::SharedFunctionInfoData(
Handle<SharedFunctionInfo> object)
: HeapObjectData(broker, storage, object),
builtin_id_(object->HasBuiltinId() ? object->builtin_id()
: Builtins::kNoBuiltinId),
: Builtin::kNoBuiltinId),
context_header_size_(object->scope_info().ContextHeaderLength()),
GetBytecodeArray_(object->HasBytecodeArray()
? broker->GetOrCreateData(
@ -2021,7 +2021,7 @@ SharedFunctionInfoData::SharedFunctionInfoData(
function_template_info_(nullptr),
template_objects_(broker->zone()),
scope_info_(nullptr) {
DCHECK_EQ(HasBuiltinId_, builtin_id_ != Builtins::kNoBuiltinId);
DCHECK_EQ(HasBuiltinId_, builtin_id_ != Builtin::kNoBuiltinId);
DCHECK_EQ(HasBytecodeArray_, GetBytecodeArray_ != nullptr);
}

View File

@ -841,7 +841,7 @@ class PromiseBuiltinReducerAssembler : public JSCallReducerAssembler {
Handle<FeedbackCell> feedback_cell =
isolate()->factory()->many_closures_cell();
Callable const callable = Builtins::CallableFor(
isolate(), static_cast<Builtins::Name>(shared.builtin_id()));
isolate(), static_cast<Builtin>(shared.builtin_id()));
return AddNode<JSFunction>(graph()->NewNode(
javascript()->CreateClosure(shared.object(), callable.code()),
HeapConstant(feedback_cell), context, effect(), control()));
@ -1237,7 +1237,7 @@ struct ForEachFrameStateParams {
FrameState ForEachLoopLazyFrameState(const ForEachFrameStateParams& params,
TNode<Object> k) {
Builtins::Name builtin = Builtins::kArrayForEachLoopLazyDeoptContinuation;
Builtin builtin = Builtin::kArrayForEachLoopLazyDeoptContinuation;
Node* checkpoint_params[] = {params.receiver, params.callback,
params.this_arg, k, params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
@ -1248,7 +1248,7 @@ FrameState ForEachLoopLazyFrameState(const ForEachFrameStateParams& params,
FrameState ForEachLoopEagerFrameState(const ForEachFrameStateParams& params,
TNode<Object> k) {
Builtins::Name builtin = Builtins::kArrayForEachLoopEagerDeoptContinuation;
Builtin builtin = Builtin::kArrayForEachLoopEagerDeoptContinuation;
Node* checkpoint_params[] = {params.receiver, params.callback,
params.this_arg, k, params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
@ -1317,10 +1317,9 @@ FrameState ReducePreLoopLazyFrameState(const ReduceFrameStateParams& params,
TNode<Object> receiver,
TNode<Object> callback, TNode<Object> k,
TNode<Number> original_length) {
Builtins::Name builtin =
(params.direction == ArrayReduceDirection::kLeft)
? Builtins::kArrayReduceLoopLazyDeoptContinuation
: Builtins::kArrayReduceRightLoopLazyDeoptContinuation;
Builtin builtin = (params.direction == ArrayReduceDirection::kLeft)
? Builtin::kArrayReduceLoopLazyDeoptContinuation
: Builtin::kArrayReduceRightLoopLazyDeoptContinuation;
Node* checkpoint_params[] = {receiver, callback, k, original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared, builtin, params.target, params.context,
@ -1332,10 +1331,10 @@ FrameState ReducePreLoopEagerFrameState(const ReduceFrameStateParams& params,
TNode<Object> receiver,
TNode<Object> callback,
TNode<Number> original_length) {
Builtins::Name builtin =
Builtin builtin =
(params.direction == ArrayReduceDirection::kLeft)
? Builtins::kArrayReducePreLoopEagerDeoptContinuation
: Builtins::kArrayReduceRightPreLoopEagerDeoptContinuation;
? Builtin::kArrayReducePreLoopEagerDeoptContinuation
: Builtin::kArrayReduceRightPreLoopEagerDeoptContinuation;
Node* checkpoint_params[] = {receiver, callback, original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared, builtin, params.target, params.context,
@ -1347,10 +1346,9 @@ FrameState ReduceLoopLazyFrameState(const ReduceFrameStateParams& params,
TNode<Object> receiver,
TNode<Object> callback, TNode<Object> k,
TNode<Number> original_length) {
Builtins::Name builtin =
(params.direction == ArrayReduceDirection::kLeft)
? Builtins::kArrayReduceLoopLazyDeoptContinuation
: Builtins::kArrayReduceRightLoopLazyDeoptContinuation;
Builtin builtin = (params.direction == ArrayReduceDirection::kLeft)
? Builtin::kArrayReduceLoopLazyDeoptContinuation
: Builtin::kArrayReduceRightLoopLazyDeoptContinuation;
Node* checkpoint_params[] = {receiver, callback, k, original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared, builtin, params.target, params.context,
@ -1363,10 +1361,9 @@ FrameState ReduceLoopEagerFrameState(const ReduceFrameStateParams& params,
TNode<Object> callback, TNode<Object> k,
TNode<Number> original_length,
TNode<Object> accumulator) {
Builtins::Name builtin =
(params.direction == ArrayReduceDirection::kLeft)
? Builtins::kArrayReduceLoopEagerDeoptContinuation
: Builtins::kArrayReduceRightLoopEagerDeoptContinuation;
Builtin builtin = (params.direction == ArrayReduceDirection::kLeft)
? Builtin::kArrayReduceLoopEagerDeoptContinuation
: Builtin::kArrayReduceRightLoopEagerDeoptContinuation;
Node* checkpoint_params[] = {receiver, callback, k, original_length,
accumulator};
return CreateJavaScriptBuiltinContinuationFrameState(
@ -1501,7 +1498,7 @@ FrameState MapPreLoopLazyFrameState(const MapFrameStateParams& params) {
params.this_arg, params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared,
Builtins::kArrayMapPreLoopLazyDeoptContinuation, params.target,
Builtin::kArrayMapPreLoopLazyDeoptContinuation, params.target,
params.context, checkpoint_params, arraysize(checkpoint_params),
params.outer_frame_state, ContinuationFrameStateMode::LAZY);
}
@ -1513,7 +1510,7 @@ FrameState MapLoopLazyFrameState(const MapFrameStateParams& params,
params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared,
Builtins::kArrayMapLoopLazyDeoptContinuation, params.target,
Builtin::kArrayMapLoopLazyDeoptContinuation, params.target,
params.context, checkpoint_params, arraysize(checkpoint_params),
params.outer_frame_state, ContinuationFrameStateMode::LAZY);
}
@ -1525,7 +1522,7 @@ FrameState MapLoopEagerFrameState(const MapFrameStateParams& params,
params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared,
Builtins::kArrayMapLoopEagerDeoptContinuation, params.target,
Builtin::kArrayMapLoopEagerDeoptContinuation, params.target,
params.context, checkpoint_params, arraysize(checkpoint_params),
params.outer_frame_state, ContinuationFrameStateMode::EAGER);
}
@ -1625,7 +1622,7 @@ FrameState FilterLoopLazyFrameState(const FilterFrameStateParams& params,
to};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared,
Builtins::kArrayFilterLoopLazyDeoptContinuation, params.target,
Builtin::kArrayFilterLoopLazyDeoptContinuation, params.target,
params.context, checkpoint_params, arraysize(checkpoint_params),
params.outer_frame_state, ContinuationFrameStateMode::LAZY);
}
@ -1634,9 +1631,9 @@ FrameState FilterLoopEagerPostCallbackFrameState(
const FilterFrameStateParams& params, TNode<Number> k, TNode<Number> to,
TNode<Object> element, TNode<Object> callback_value) {
// Note that we are intentionally reusing the
// Builtins::kArrayFilterLoopLazyDeoptContinuation as an *eager* entry point
// in this case. This is safe, because re-evaluating a [ToBoolean] coercion is
// safe.
// Builtin::kArrayFilterLoopLazyDeoptContinuation as an *eager* entry
// point in this case. This is safe, because re-evaluating a [ToBoolean]
// coercion is safe.
Node* checkpoint_params[] = {params.receiver,
params.callback,
params.this_arg,
@ -1648,7 +1645,7 @@ FrameState FilterLoopEagerPostCallbackFrameState(
callback_value};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared,
Builtins::kArrayFilterLoopLazyDeoptContinuation, params.target,
Builtin::kArrayFilterLoopLazyDeoptContinuation, params.target,
params.context, checkpoint_params, arraysize(checkpoint_params),
params.outer_frame_state, ContinuationFrameStateMode::EAGER);
}
@ -1664,7 +1661,7 @@ FrameState FilterLoopEagerFrameState(const FilterFrameStateParams& params,
to};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared,
Builtins::kArrayFilterLoopEagerDeoptContinuation, params.target,
Builtin::kArrayFilterLoopEagerDeoptContinuation, params.target,
params.context, checkpoint_params, arraysize(checkpoint_params),
params.outer_frame_state, ContinuationFrameStateMode::EAGER);
}
@ -1766,10 +1763,9 @@ struct FindFrameStateParams {
FrameState FindLoopLazyFrameState(const FindFrameStateParams& params,
TNode<Number> k, ArrayFindVariant variant) {
Builtins::Name builtin =
(variant == ArrayFindVariant::kFind)
? Builtins::kArrayFindLoopLazyDeoptContinuation
: Builtins::kArrayFindIndexLoopLazyDeoptContinuation;
Builtin builtin = (variant == ArrayFindVariant::kFind)
? Builtin::kArrayFindLoopLazyDeoptContinuation
: Builtin::kArrayFindIndexLoopLazyDeoptContinuation;
Node* checkpoint_params[] = {params.receiver, params.callback,
params.this_arg, k, params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
@ -1780,10 +1776,9 @@ FrameState FindLoopLazyFrameState(const FindFrameStateParams& params,
FrameState FindLoopEagerFrameState(const FindFrameStateParams& params,
TNode<Number> k, ArrayFindVariant variant) {
Builtins::Name builtin =
(variant == ArrayFindVariant::kFind)
? Builtins::kArrayFindLoopEagerDeoptContinuation
: Builtins::kArrayFindIndexLoopEagerDeoptContinuation;
Builtin builtin = (variant == ArrayFindVariant::kFind)
? Builtin::kArrayFindLoopEagerDeoptContinuation
: Builtin::kArrayFindIndexLoopEagerDeoptContinuation;
Node* checkpoint_params[] = {params.receiver, params.callback,
params.this_arg, k, params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
@ -1795,10 +1790,10 @@ FrameState FindLoopEagerFrameState(const FindFrameStateParams& params,
FrameState FindLoopAfterCallbackLazyFrameState(
const FindFrameStateParams& params, TNode<Number> next_k,
TNode<Object> if_found_value, ArrayFindVariant variant) {
Builtins::Name builtin =
Builtin builtin =
(variant == ArrayFindVariant::kFind)
? Builtins::kArrayFindLoopAfterCallbackLazyDeoptContinuation
: Builtins::kArrayFindIndexLoopAfterCallbackLazyDeoptContinuation;
? Builtin::kArrayFindLoopAfterCallbackLazyDeoptContinuation
: Builtin::kArrayFindIndexLoopAfterCallbackLazyDeoptContinuation;
Node* checkpoint_params[] = {params.receiver, params.callback,
params.this_arg, next_k,
params.original_length, if_found_value};
@ -1884,9 +1879,9 @@ struct EverySomeFrameStateParams {
FrameState EverySomeLoopLazyFrameState(const EverySomeFrameStateParams& params,
TNode<Number> k,
ArrayEverySomeVariant variant) {
Builtins::Name builtin = (variant == ArrayEverySomeVariant::kEvery)
? Builtins::kArrayEveryLoopLazyDeoptContinuation
: Builtins::kArraySomeLoopLazyDeoptContinuation;
Builtin builtin = (variant == ArrayEverySomeVariant::kEvery)
? Builtin::kArrayEveryLoopLazyDeoptContinuation
: Builtin::kArraySomeLoopLazyDeoptContinuation;
Node* checkpoint_params[] = {params.receiver, params.callback,
params.this_arg, k, params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
@ -1898,9 +1893,9 @@ FrameState EverySomeLoopLazyFrameState(const EverySomeFrameStateParams& params,
FrameState EverySomeLoopEagerFrameState(const EverySomeFrameStateParams& params,
TNode<Number> k,
ArrayEverySomeVariant variant) {
Builtins::Name builtin = (variant == ArrayEverySomeVariant::kEvery)
? Builtins::kArrayEveryLoopEagerDeoptContinuation
: Builtins::kArraySomeLoopEagerDeoptContinuation;
Builtin builtin = (variant == ArrayEverySomeVariant::kEvery)
? Builtin::kArrayEveryLoopEagerDeoptContinuation
: Builtin::kArraySomeLoopEagerDeoptContinuation;
Node* checkpoint_params[] = {params.receiver, params.callback,
params.this_arg, k, params.original_length};
return CreateJavaScriptBuiltinContinuationFrameState(
@ -1978,14 +1973,14 @@ Callable GetCallableForArrayIndexOfIncludes(ArrayIndexOfIncludesVariant variant,
case PACKED_ELEMENTS:
case HOLEY_ELEMENTS:
return Builtins::CallableFor(isolate,
Builtins::kArrayIndexOfSmiOrObject);
Builtin::kArrayIndexOfSmiOrObject);
case PACKED_DOUBLE_ELEMENTS:
return Builtins::CallableFor(isolate,
Builtins::kArrayIndexOfPackedDoubles);
Builtin::kArrayIndexOfPackedDoubles);
default:
DCHECK_EQ(HOLEY_DOUBLE_ELEMENTS, elements_kind);
return Builtins::CallableFor(isolate,
Builtins::kArrayIndexOfHoleyDoubles);
Builtin::kArrayIndexOfHoleyDoubles);
}
} else {
DCHECK_EQ(variant, ArrayIndexOfIncludesVariant::kIncludes);
@ -1995,14 +1990,14 @@ Callable GetCallableForArrayIndexOfIncludes(ArrayIndexOfIncludesVariant variant,
case PACKED_ELEMENTS:
case HOLEY_ELEMENTS:
return Builtins::CallableFor(isolate,
Builtins::kArrayIncludesSmiOrObject);
Builtin::kArrayIncludesSmiOrObject);
case PACKED_DOUBLE_ELEMENTS:
return Builtins::CallableFor(isolate,
Builtins::kArrayIncludesPackedDoubles);
Builtin::kArrayIncludesPackedDoubles);
default:
DCHECK_EQ(HOLEY_DOUBLE_ELEMENTS, elements_kind);
return Builtins::CallableFor(isolate,
Builtins::kArrayIncludesHoleyDoubles);
Builtin::kArrayIncludesHoleyDoubles);
}
}
UNREACHABLE();
@ -2116,10 +2111,10 @@ FrameState PromiseConstructorLazyFrameState(
jsgraph->TheHoleConstant() /* exception */
};
return CreateJavaScriptBuiltinContinuationFrameState(
jsgraph, params.shared,
Builtins::kPromiseConstructorLazyDeoptContinuation, params.target,
params.context, checkpoint_params, arraysize(checkpoint_params),
constructor_frame_state, ContinuationFrameStateMode::LAZY);
jsgraph, params.shared, Builtin::kPromiseConstructorLazyDeoptContinuation,
params.target, params.context, checkpoint_params,
arraysize(checkpoint_params), constructor_frame_state,
ContinuationFrameStateMode::LAZY);
}
FrameState PromiseConstructorLazyWithCatchFrameState(
@ -2133,7 +2128,7 @@ FrameState PromiseConstructorLazyWithCatchFrameState(
promise, reject};
return CreateJavaScriptBuiltinContinuationFrameState(
params.jsgraph, params.shared,
Builtins::kPromiseConstructorLazyDeoptContinuation, params.target,
Builtin::kPromiseConstructorLazyDeoptContinuation, params.target,
params.context, checkpoint_params, arraysize(checkpoint_params),
constructor_frame_state, ContinuationFrameStateMode::LAZY_WITH_CATCH);
}
@ -3073,8 +3068,7 @@ Reduction JSCallReducer::ReduceReflectGet(Node* node) {
Node* etrue = effect;
Node* vtrue;
{
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kGetProperty);
Callable callable = Builtins::CallableFor(isolate(), Builtin::kGetProperty);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
@ -3737,14 +3731,14 @@ Reduction JSCallReducer::ReduceCallApiFunction(
// and/or the compatible receiver check, so use the generic builtin
// that does those checks dynamically. This is still significantly
// faster than the generic call sequence.
Builtins::Name builtin_name;
Builtin builtin_name;
if (function_template_info.accept_any_receiver()) {
builtin_name = Builtins::kCallFunctionTemplate_CheckCompatibleReceiver;
builtin_name = Builtin::kCallFunctionTemplate_CheckCompatibleReceiver;
} else if (function_template_info.is_signature_undefined()) {
builtin_name = Builtins::kCallFunctionTemplate_CheckAccess;
builtin_name = Builtin::kCallFunctionTemplate_CheckAccess;
} else {
builtin_name =
Builtins::kCallFunctionTemplate_CheckAccessAndCompatibleReceiver;
Builtin::kCallFunctionTemplate_CheckAccessAndCompatibleReceiver;
}
// The CallFunctionTemplate builtin requires the {receiver} to be
@ -4508,333 +4502,333 @@ Reduction JSCallReducer::ReduceJSCall(Node* node,
// Check for known builtin functions.
int builtin_id =
shared.HasBuiltinId() ? shared.builtin_id() : Builtins::kNoBuiltinId;
shared.HasBuiltinId() ? shared.builtin_id() : Builtin::kNoBuiltinId;
switch (builtin_id) {
case Builtins::kArrayConstructor:
case Builtin::kArrayConstructor:
return ReduceArrayConstructor(node);
case Builtins::kBooleanConstructor:
case Builtin::kBooleanConstructor:
return ReduceBooleanConstructor(node);
case Builtins::kFunctionPrototypeApply:
case Builtin::kFunctionPrototypeApply:
return ReduceFunctionPrototypeApply(node);
case Builtins::kFastFunctionPrototypeBind:
case Builtin::kFastFunctionPrototypeBind:
return ReduceFunctionPrototypeBind(node);
case Builtins::kFunctionPrototypeCall:
case Builtin::kFunctionPrototypeCall:
return ReduceFunctionPrototypeCall(node);
case Builtins::kFunctionPrototypeHasInstance:
case Builtin::kFunctionPrototypeHasInstance:
return ReduceFunctionPrototypeHasInstance(node);
case Builtins::kObjectConstructor:
case Builtin::kObjectConstructor:
return ReduceObjectConstructor(node);
case Builtins::kObjectCreate:
case Builtin::kObjectCreate:
return ReduceObjectCreate(node);
case Builtins::kObjectGetPrototypeOf:
case Builtin::kObjectGetPrototypeOf:
return ReduceObjectGetPrototypeOf(node);
case Builtins::kObjectIs:
case Builtin::kObjectIs:
return ReduceObjectIs(node);
case Builtins::kObjectPrototypeGetProto:
case Builtin::kObjectPrototypeGetProto:
return ReduceObjectPrototypeGetProto(node);
case Builtins::kObjectPrototypeHasOwnProperty:
case Builtin::kObjectPrototypeHasOwnProperty:
return ReduceObjectPrototypeHasOwnProperty(node);
case Builtins::kObjectPrototypeIsPrototypeOf:
case Builtin::kObjectPrototypeIsPrototypeOf:
return ReduceObjectPrototypeIsPrototypeOf(node);
case Builtins::kReflectApply:
case Builtin::kReflectApply:
return ReduceReflectApply(node);
case Builtins::kReflectConstruct:
case Builtin::kReflectConstruct:
return ReduceReflectConstruct(node);
case Builtins::kReflectGet:
case Builtin::kReflectGet:
return ReduceReflectGet(node);
case Builtins::kReflectGetPrototypeOf:
case Builtin::kReflectGetPrototypeOf:
return ReduceReflectGetPrototypeOf(node);
case Builtins::kReflectHas:
case Builtin::kReflectHas:
return ReduceReflectHas(node);
case Builtins::kArrayForEach:
case Builtin::kArrayForEach:
return ReduceArrayForEach(node, shared);
case Builtins::kArrayMap:
case Builtin::kArrayMap:
return ReduceArrayMap(node, shared);
case Builtins::kArrayFilter:
case Builtin::kArrayFilter:
return ReduceArrayFilter(node, shared);
case Builtins::kArrayReduce:
case Builtin::kArrayReduce:
return ReduceArrayReduce(node, shared);
case Builtins::kArrayReduceRight:
case Builtin::kArrayReduceRight:
return ReduceArrayReduceRight(node, shared);
case Builtins::kArrayPrototypeFind:
case Builtin::kArrayPrototypeFind:
return ReduceArrayFind(node, shared);
case Builtins::kArrayPrototypeFindIndex:
case Builtin::kArrayPrototypeFindIndex:
return ReduceArrayFindIndex(node, shared);
case Builtins::kArrayEvery:
case Builtin::kArrayEvery:
return ReduceArrayEvery(node, shared);
case Builtins::kArrayIndexOf:
case Builtin::kArrayIndexOf:
return ReduceArrayIndexOf(node);
case Builtins::kArrayIncludes:
case Builtin::kArrayIncludes:
return ReduceArrayIncludes(node);
case Builtins::kArraySome:
case Builtin::kArraySome:
return ReduceArraySome(node, shared);
case Builtins::kArrayPrototypePush:
case Builtin::kArrayPrototypePush:
return ReduceArrayPrototypePush(node);
case Builtins::kArrayPrototypePop:
case Builtin::kArrayPrototypePop:
return ReduceArrayPrototypePop(node);
case Builtins::kArrayPrototypeShift:
case Builtin::kArrayPrototypeShift:
return ReduceArrayPrototypeShift(node);
case Builtins::kArrayPrototypeSlice:
case Builtin::kArrayPrototypeSlice:
return ReduceArrayPrototypeSlice(node);
case Builtins::kArrayPrototypeEntries:
case Builtin::kArrayPrototypeEntries:
return ReduceArrayIterator(node, ArrayIteratorKind::kArrayLike,
IterationKind::kEntries);
case Builtins::kArrayPrototypeKeys:
case Builtin::kArrayPrototypeKeys:
return ReduceArrayIterator(node, ArrayIteratorKind::kArrayLike,
IterationKind::kKeys);
case Builtins::kArrayPrototypeValues:
case Builtin::kArrayPrototypeValues:
return ReduceArrayIterator(node, ArrayIteratorKind::kArrayLike,
IterationKind::kValues);
case Builtins::kArrayIteratorPrototypeNext:
case Builtin::kArrayIteratorPrototypeNext:
return ReduceArrayIteratorPrototypeNext(node);
case Builtins::kArrayIsArray:
case Builtin::kArrayIsArray:
return ReduceArrayIsArray(node);
case Builtins::kArrayBufferIsView:
case Builtin::kArrayBufferIsView:
return ReduceArrayBufferIsView(node);
case Builtins::kDataViewPrototypeGetByteLength:
case Builtin::kDataViewPrototypeGetByteLength:
return ReduceArrayBufferViewAccessor(
node, JS_DATA_VIEW_TYPE,
AccessBuilder::ForJSArrayBufferViewByteLength());
case Builtins::kDataViewPrototypeGetByteOffset:
case Builtin::kDataViewPrototypeGetByteOffset:
return ReduceArrayBufferViewAccessor(
node, JS_DATA_VIEW_TYPE,
AccessBuilder::ForJSArrayBufferViewByteOffset());
case Builtins::kDataViewPrototypeGetUint8:
case Builtin::kDataViewPrototypeGetUint8:
return ReduceDataViewAccess(node, DataViewAccess::kGet,
ExternalArrayType::kExternalUint8Array);
case Builtins::kDataViewPrototypeGetInt8:
case Builtin::kDataViewPrototypeGetInt8:
return ReduceDataViewAccess(node, DataViewAccess::kGet,
ExternalArrayType::kExternalInt8Array);
case Builtins::kDataViewPrototypeGetUint16:
case Builtin::kDataViewPrototypeGetUint16:
return ReduceDataViewAccess(node, DataViewAccess::kGet,
ExternalArrayType::kExternalUint16Array);
case Builtins::kDataViewPrototypeGetInt16:
case Builtin::kDataViewPrototypeGetInt16:
return ReduceDataViewAccess(node, DataViewAccess::kGet,
ExternalArrayType::kExternalInt16Array);
case Builtins::kDataViewPrototypeGetUint32:
case Builtin::kDataViewPrototypeGetUint32:
return ReduceDataViewAccess(node, DataViewAccess::kGet,
ExternalArrayType::kExternalUint32Array);
case Builtins::kDataViewPrototypeGetInt32:
case Builtin::kDataViewPrototypeGetInt32:
return ReduceDataViewAccess(node, DataViewAccess::kGet,
ExternalArrayType::kExternalInt32Array);
case Builtins::kDataViewPrototypeGetFloat32:
case Builtin::kDataViewPrototypeGetFloat32:
return ReduceDataViewAccess(node, DataViewAccess::kGet,
ExternalArrayType::kExternalFloat32Array);
case Builtins::kDataViewPrototypeGetFloat64:
case Builtin::kDataViewPrototypeGetFloat64:
return ReduceDataViewAccess(node, DataViewAccess::kGet,
ExternalArrayType::kExternalFloat64Array);
case Builtins::kDataViewPrototypeSetUint8:
case Builtin::kDataViewPrototypeSetUint8:
return ReduceDataViewAccess(node, DataViewAccess::kSet,
ExternalArrayType::kExternalUint8Array);
case Builtins::kDataViewPrototypeSetInt8:
case Builtin::kDataViewPrototypeSetInt8:
return ReduceDataViewAccess(node, DataViewAccess::kSet,
ExternalArrayType::kExternalInt8Array);
case Builtins::kDataViewPrototypeSetUint16:
case Builtin::kDataViewPrototypeSetUint16:
return ReduceDataViewAccess(node, DataViewAccess::kSet,
ExternalArrayType::kExternalUint16Array);
case Builtins::kDataViewPrototypeSetInt16:
case Builtin::kDataViewPrototypeSetInt16:
return ReduceDataViewAccess(node, DataViewAccess::kSet,
ExternalArrayType::kExternalInt16Array);
case Builtins::kDataViewPrototypeSetUint32:
case Builtin::kDataViewPrototypeSetUint32:
return ReduceDataViewAccess(node, DataViewAccess::kSet,
ExternalArrayType::kExternalUint32Array);
case Builtins::kDataViewPrototypeSetInt32:
case Builtin::kDataViewPrototypeSetInt32:
return ReduceDataViewAccess(node, DataViewAccess::kSet,
ExternalArrayType::kExternalInt32Array);
case Builtins::kDataViewPrototypeSetFloat32:
case Builtin::kDataViewPrototypeSetFloat32:
return ReduceDataViewAccess(node, DataViewAccess::kSet,
ExternalArrayType::kExternalFloat32Array);
case Builtins::kDataViewPrototypeSetFloat64:
case Builtin::kDataViewPrototypeSetFloat64:
return ReduceDataViewAccess(node, DataViewAccess::kSet,
ExternalArrayType::kExternalFloat64Array);
case Builtins::kTypedArrayPrototypeByteLength:
case Builtin::kTypedArrayPrototypeByteLength:
return ReduceArrayBufferViewAccessor(
node, JS_TYPED_ARRAY_TYPE,
AccessBuilder::ForJSArrayBufferViewByteLength());
case Builtins::kTypedArrayPrototypeByteOffset:
case Builtin::kTypedArrayPrototypeByteOffset:
return ReduceArrayBufferViewAccessor(
node, JS_TYPED_ARRAY_TYPE,
AccessBuilder::ForJSArrayBufferViewByteOffset());
case Builtins::kTypedArrayPrototypeLength:
case Builtin::kTypedArrayPrototypeLength:
return ReduceArrayBufferViewAccessor(
node, JS_TYPED_ARRAY_TYPE, AccessBuilder::ForJSTypedArrayLength());
case Builtins::kTypedArrayPrototypeToStringTag:
case Builtin::kTypedArrayPrototypeToStringTag:
return ReduceTypedArrayPrototypeToStringTag(node);
case Builtins::kMathAbs:
case Builtin::kMathAbs:
return ReduceMathUnary(node, simplified()->NumberAbs());
case Builtins::kMathAcos:
case Builtin::kMathAcos:
return ReduceMathUnary(node, simplified()->NumberAcos());
case Builtins::kMathAcosh:
case Builtin::kMathAcosh:
return ReduceMathUnary(node, simplified()->NumberAcosh());
case Builtins::kMathAsin:
case Builtin::kMathAsin:
return ReduceMathUnary(node, simplified()->NumberAsin());
case Builtins::kMathAsinh:
case Builtin::kMathAsinh:
return ReduceMathUnary(node, simplified()->NumberAsinh());
case Builtins::kMathAtan:
case Builtin::kMathAtan:
return ReduceMathUnary(node, simplified()->NumberAtan());
case Builtins::kMathAtanh:
case Builtin::kMathAtanh:
return ReduceMathUnary(node, simplified()->NumberAtanh());
case Builtins::kMathCbrt:
case Builtin::kMathCbrt:
return ReduceMathUnary(node, simplified()->NumberCbrt());
case Builtins::kMathCeil:
case Builtin::kMathCeil:
return ReduceMathUnary(node, simplified()->NumberCeil());
case Builtins::kMathCos:
case Builtin::kMathCos:
return ReduceMathUnary(node, simplified()->NumberCos());
case Builtins::kMathCosh:
case Builtin::kMathCosh:
return ReduceMathUnary(node, simplified()->NumberCosh());
case Builtins::kMathExp:
case Builtin::kMathExp:
return ReduceMathUnary(node, simplified()->NumberExp());
case Builtins::kMathExpm1:
case Builtin::kMathExpm1:
return ReduceMathUnary(node, simplified()->NumberExpm1());
case Builtins::kMathFloor:
case Builtin::kMathFloor:
return ReduceMathUnary(node, simplified()->NumberFloor());
case Builtins::kMathFround:
case Builtin::kMathFround:
return ReduceMathUnary(node, simplified()->NumberFround());
case Builtins::kMathLog:
case Builtin::kMathLog:
return ReduceMathUnary(node, simplified()->NumberLog());
case Builtins::kMathLog1p:
case Builtin::kMathLog1p:
return ReduceMathUnary(node, simplified()->NumberLog1p());
case Builtins::kMathLog10:
case Builtin::kMathLog10:
return ReduceMathUnary(node, simplified()->NumberLog10());
case Builtins::kMathLog2:
case Builtin::kMathLog2:
return ReduceMathUnary(node, simplified()->NumberLog2());
case Builtins::kMathRound:
case Builtin::kMathRound:
return ReduceMathUnary(node, simplified()->NumberRound());
case Builtins::kMathSign:
case Builtin::kMathSign:
return ReduceMathUnary(node, simplified()->NumberSign());
case Builtins::kMathSin:
case Builtin::kMathSin:
return ReduceMathUnary(node, simplified()->NumberSin());
case Builtins::kMathSinh:
case Builtin::kMathSinh:
return ReduceMathUnary(node, simplified()->NumberSinh());
case Builtins::kMathSqrt:
case Builtin::kMathSqrt:
return ReduceMathUnary(node, simplified()->NumberSqrt());
case Builtins::kMathTan:
case Builtin::kMathTan:
return ReduceMathUnary(node, simplified()->NumberTan());
case Builtins::kMathTanh:
case Builtin::kMathTanh:
return ReduceMathUnary(node, simplified()->NumberTanh());
case Builtins::kMathTrunc:
case Builtin::kMathTrunc:
return ReduceMathUnary(node, simplified()->NumberTrunc());
case Builtins::kMathAtan2:
case Builtin::kMathAtan2:
return ReduceMathBinary(node, simplified()->NumberAtan2());
case Builtins::kMathPow:
case Builtin::kMathPow:
return ReduceMathBinary(node, simplified()->NumberPow());
case Builtins::kMathClz32:
case Builtin::kMathClz32:
return ReduceMathClz32(node);
case Builtins::kMathImul:
case Builtin::kMathImul:
return ReduceMathImul(node);
case Builtins::kMathMax:
case Builtin::kMathMax:
return ReduceMathMinMax(node, simplified()->NumberMax(),
jsgraph()->Constant(-V8_INFINITY));
case Builtins::kMathMin:
case Builtin::kMathMin:
return ReduceMathMinMax(node, simplified()->NumberMin(),
jsgraph()->Constant(V8_INFINITY));
case Builtins::kNumberIsFinite:
case Builtin::kNumberIsFinite:
return ReduceNumberIsFinite(node);
case Builtins::kNumberIsInteger:
case Builtin::kNumberIsInteger:
return ReduceNumberIsInteger(node);
case Builtins::kNumberIsSafeInteger:
case Builtin::kNumberIsSafeInteger:
return ReduceNumberIsSafeInteger(node);
case Builtins::kNumberIsNaN:
case Builtin::kNumberIsNaN:
return ReduceNumberIsNaN(node);
case Builtins::kNumberParseInt:
case Builtin::kNumberParseInt:
return ReduceNumberParseInt(node);
case Builtins::kGlobalIsFinite:
case Builtin::kGlobalIsFinite:
return ReduceGlobalIsFinite(node);
case Builtins::kGlobalIsNaN:
case Builtin::kGlobalIsNaN:
return ReduceGlobalIsNaN(node);
case Builtins::kMapPrototypeGet:
case Builtin::kMapPrototypeGet:
return ReduceMapPrototypeGet(node);
case Builtins::kMapPrototypeHas:
case Builtin::kMapPrototypeHas:
return ReduceMapPrototypeHas(node);
case Builtins::kRegExpPrototypeTest:
case Builtin::kRegExpPrototypeTest:
return ReduceRegExpPrototypeTest(node);
case Builtins::kReturnReceiver:
case Builtin::kReturnReceiver:
return ReduceReturnReceiver(node);
case Builtins::kStringPrototypeIndexOf:
case Builtin::kStringPrototypeIndexOf:
return ReduceStringPrototypeIndexOf(node);
case Builtins::kStringPrototypeCharAt:
case Builtin::kStringPrototypeCharAt:
return ReduceStringPrototypeCharAt(node);
case Builtins::kStringPrototypeCharCodeAt:
case Builtin::kStringPrototypeCharCodeAt:
return ReduceStringPrototypeStringAt(simplified()->StringCharCodeAt(),
node);
case Builtins::kStringPrototypeCodePointAt:
case Builtin::kStringPrototypeCodePointAt:
return ReduceStringPrototypeStringAt(simplified()->StringCodePointAt(),
node);
case Builtins::kStringPrototypeSubstring:
case Builtin::kStringPrototypeSubstring:
return ReduceStringPrototypeSubstring(node);
case Builtins::kStringPrototypeSlice:
case Builtin::kStringPrototypeSlice:
return ReduceStringPrototypeSlice(node);
case Builtins::kStringPrototypeSubstr:
case Builtin::kStringPrototypeSubstr:
return ReduceStringPrototypeSubstr(node);
case Builtins::kStringPrototypeStartsWith:
case Builtin::kStringPrototypeStartsWith:
return ReduceStringPrototypeStartsWith(node);
#ifdef V8_INTL_SUPPORT
case Builtins::kStringPrototypeToLowerCaseIntl:
case Builtin::kStringPrototypeToLowerCaseIntl:
return ReduceStringPrototypeToLowerCaseIntl(node);
case Builtins::kStringPrototypeToUpperCaseIntl:
case Builtin::kStringPrototypeToUpperCaseIntl:
return ReduceStringPrototypeToUpperCaseIntl(node);
#endif // V8_INTL_SUPPORT
case Builtins::kStringFromCharCode:
case Builtin::kStringFromCharCode:
return ReduceStringFromCharCode(node);
case Builtins::kStringFromCodePoint:
case Builtin::kStringFromCodePoint:
return ReduceStringFromCodePoint(node);
case Builtins::kStringPrototypeIterator:
case Builtin::kStringPrototypeIterator:
return ReduceStringPrototypeIterator(node);
case Builtins::kStringIteratorPrototypeNext:
case Builtin::kStringIteratorPrototypeNext:
return ReduceStringIteratorPrototypeNext(node);
case Builtins::kStringPrototypeConcat:
case Builtin::kStringPrototypeConcat:
return ReduceStringPrototypeConcat(node);
case Builtins::kTypedArrayPrototypeEntries:
case Builtin::kTypedArrayPrototypeEntries:
return ReduceArrayIterator(node, ArrayIteratorKind::kTypedArray,
IterationKind::kEntries);
case Builtins::kTypedArrayPrototypeKeys:
case Builtin::kTypedArrayPrototypeKeys:
return ReduceArrayIterator(node, ArrayIteratorKind::kTypedArray,
IterationKind::kKeys);
case Builtins::kTypedArrayPrototypeValues:
case Builtin::kTypedArrayPrototypeValues:
return ReduceArrayIterator(node, ArrayIteratorKind::kTypedArray,
IterationKind::kValues);
case Builtins::kPromisePrototypeCatch:
case Builtin::kPromisePrototypeCatch:
return ReducePromisePrototypeCatch(node);
case Builtins::kPromisePrototypeFinally:
case Builtin::kPromisePrototypeFinally:
return ReducePromisePrototypeFinally(node);
case Builtins::kPromisePrototypeThen:
case Builtin::kPromisePrototypeThen:
return ReducePromisePrototypeThen(node);
case Builtins::kPromiseResolveTrampoline:
case Builtin::kPromiseResolveTrampoline:
return ReducePromiseResolveTrampoline(node);
case Builtins::kMapPrototypeEntries:
case Builtin::kMapPrototypeEntries:
return ReduceCollectionIteration(node, CollectionKind::kMap,
IterationKind::kEntries);
case Builtins::kMapPrototypeKeys:
case Builtin::kMapPrototypeKeys:
return ReduceCollectionIteration(node, CollectionKind::kMap,
IterationKind::kKeys);
case Builtins::kMapPrototypeGetSize:
case Builtin::kMapPrototypeGetSize:
return ReduceCollectionPrototypeSize(node, CollectionKind::kMap);
case Builtins::kMapPrototypeValues:
case Builtin::kMapPrototypeValues:
return ReduceCollectionIteration(node, CollectionKind::kMap,
IterationKind::kValues);
case Builtins::kMapIteratorPrototypeNext:
case Builtin::kMapIteratorPrototypeNext:
return ReduceCollectionIteratorPrototypeNext(
node, OrderedHashMap::kEntrySize, factory()->empty_ordered_hash_map(),
FIRST_JS_MAP_ITERATOR_TYPE, LAST_JS_MAP_ITERATOR_TYPE);
case Builtins::kSetPrototypeEntries:
case Builtin::kSetPrototypeEntries:
return ReduceCollectionIteration(node, CollectionKind::kSet,
IterationKind::kEntries);
case Builtins::kSetPrototypeGetSize:
case Builtin::kSetPrototypeGetSize:
return ReduceCollectionPrototypeSize(node, CollectionKind::kSet);
case Builtins::kSetPrototypeValues:
case Builtin::kSetPrototypeValues:
return ReduceCollectionIteration(node, CollectionKind::kSet,
IterationKind::kValues);
case Builtins::kSetIteratorPrototypeNext:
case Builtin::kSetIteratorPrototypeNext:
return ReduceCollectionIteratorPrototypeNext(
node, OrderedHashSet::kEntrySize, factory()->empty_ordered_hash_set(),
FIRST_JS_SET_ITERATOR_TYPE, LAST_JS_SET_ITERATOR_TYPE);
case Builtins::kDatePrototypeGetTime:
case Builtin::kDatePrototypeGetTime:
return ReduceDatePrototypeGetTime(node);
case Builtins::kDateNow:
case Builtin::kDateNow:
return ReduceDateNow(node);
case Builtins::kNumberConstructor:
case Builtin::kNumberConstructor:
return ReduceNumberConstructor(node);
case Builtins::kBigIntAsUintN:
case Builtin::kBigIntAsUintN:
return ReduceBigIntAsUintN(node);
default:
break;
@ -4972,9 +4966,9 @@ Reduction JSCallReducer::ReduceJSConstruct(Node* node) {
// Check for known builtin functions.
int builtin_id = function.shared().HasBuiltinId()
? function.shared().builtin_id()
: Builtins::kNoBuiltinId;
: Builtin::kNoBuiltinId;
switch (builtin_id) {
case Builtins::kArrayConstructor: {
case Builtin::kArrayConstructor: {
// TODO(bmeurer): Deal with Array subclasses here.
// Turn the {node} into a {JSCreateArray} call.
STATIC_ASSERT(JSConstructNode::NewTargetIndex() == 1);
@ -4984,7 +4978,7 @@ Reduction JSCallReducer::ReduceJSConstruct(Node* node) {
node, javascript()->CreateArray(arity, Handle<AllocationSite>()));
return Changed(node);
}
case Builtins::kObjectConstructor: {
case Builtin::kObjectConstructor: {
// If no value is passed, we can immediately lower to a simple
// JSCreate and don't need to do any massaging of the {node}.
if (arity == 0) {
@ -5009,9 +5003,9 @@ Reduction JSCallReducer::ReduceJSConstruct(Node* node) {
}
break;
}
case Builtins::kPromiseConstructor:
case Builtin::kPromiseConstructor:
return ReducePromiseConstructor(node);
case Builtins::kTypedArrayConstructor:
case Builtin::kTypedArrayConstructor:
return ReduceTypedArrayConstructor(node, function.shared());
default:
break;
@ -5835,7 +5829,7 @@ Reduction JSCallReducer::ReduceArrayPrototypeShift(Node* node) {
Node* vfalse1;
{
// Call the generic C++ implementation.
const int builtin_index = Builtins::kArrayShift;
const int builtin_index = Builtin::kArrayShift;
auto call_descriptor = Linkage::GetCEntryStubCallDescriptor(
graph()->zone(), 1, BuiltinArguments::kNumExtraArgsWithReceiver,
Builtins::name(builtin_index), node->op()->properties(),
@ -5955,13 +5949,13 @@ Reduction JSCallReducer::ReduceArrayPrototypeSlice(Node* node) {
// allocation in here. That way we'd even get escape analysis and scalar
// replacement to help in some cases.
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kCloneFastJSArray);
Builtins::CallableFor(isolate(), Builtin::kCloneFastJSArray);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
Operator::kNoThrow | Operator::kNoDeopt);
// Calls to Builtins::kCloneFastJSArray produce COW arrays
// Calls to Builtin::kCloneFastJSArray produce COW arrays
// if the original array is COW
Node* clone = effect = graph()->NewNode(
common()->Call(call_descriptor), jsgraph()->HeapConstant(callable.code()),
@ -6763,7 +6757,7 @@ Node* JSCallReducer::CreateClosureFromBuiltinSharedFunctionInfo(
Handle<FeedbackCell> feedback_cell =
isolate()->factory()->many_closures_cell();
Callable const callable = Builtins::CallableFor(
isolate(), static_cast<Builtins::Name>(shared.builtin_id()));
isolate(), static_cast<Builtin>(shared.builtin_id()));
return graph()->NewNode(
javascript()->CreateClosure(shared.object(), callable.code()),
jsgraph()->HeapConstant(feedback_cell), context, effect, control);
@ -7007,7 +7001,7 @@ Reduction JSCallReducer::ReduceTypedArrayConstructor(
Node* const parameters[] = {jsgraph()->TheHoleConstant()};
int const num_parameters = static_cast<int>(arraysize(parameters));
frame_state = CreateJavaScriptBuiltinContinuationFrameState(
jsgraph(), shared, Builtins::kGenericLazyDeoptContinuation, target,
jsgraph(), shared, Builtin::kGenericLazyDeoptContinuation, target,
context, parameters, num_parameters, frame_state,
ContinuationFrameStateMode::LAZY);
@ -7364,7 +7358,7 @@ Reduction JSCallReducer::ReduceCollectionIteratorPrototypeNext(
simplified()->LoadField(AccessBuilder::ForJSCollectionIteratorIndex()),
receiver, effect, control);
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kOrderedHashTableHealIndex);
Builtins::CallableFor(isolate(), Builtin::kOrderedHashTableHealIndex);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
@ -8011,7 +8005,7 @@ Reduction JSCallReducer::ReduceNumberConstructor(Node* node) {
int stack_parameter_count = arraysize(stack_parameters);
Node* continuation_frame_state =
CreateJavaScriptBuiltinContinuationFrameState(
jsgraph(), shared_info, Builtins::kGenericLazyDeoptContinuation,
jsgraph(), shared_info, Builtin::kGenericLazyDeoptContinuation,
target, context, stack_parameters, stack_parameter_count, frame_state,
ContinuationFrameStateMode::LAZY);

View File

@ -61,7 +61,7 @@ Reduction JSGenericLowering::Reduce(Node* node) {
#define REPLACE_STUB_CALL(Name) \
void JSGenericLowering::LowerJS##Name(Node* node) { \
ReplaceWithBuiltinCall(node, Builtins::k##Name); \
ReplaceWithBuiltinCall(node, Builtin::k##Name); \
}
REPLACE_STUB_CALL(ToLength)
REPLACE_STUB_CALL(ToNumber)
@ -81,8 +81,7 @@ REPLACE_STUB_CALL(RejectPromise)
REPLACE_STUB_CALL(ResolvePromise)
#undef REPLACE_STUB_CALL
void JSGenericLowering::ReplaceWithBuiltinCall(Node* node,
Builtins::Name builtin) {
void JSGenericLowering::ReplaceWithBuiltinCall(Node* node, Builtin builtin) {
CallDescriptor::Flags flags = FrameStateFlagForCall(node);
Callable callable = Builtins::CallableFor(isolate(), builtin);
ReplaceWithBuiltinCall(node, callable, flags);
@ -123,8 +122,8 @@ void JSGenericLowering::ReplaceWithRuntimeCall(Node* node,
}
void JSGenericLowering::ReplaceUnaryOpWithBuiltinCall(
Node* node, Builtins::Name builtin_without_feedback,
Builtins::Name builtin_with_feedback) {
Node* node, Builtin builtin_without_feedback,
Builtin builtin_with_feedback) {
DCHECK(JSOperator::IsUnaryWithFeedback(node->opcode()));
const FeedbackParameter& p = FeedbackParameterOf(node->op());
if (CollectFeedbackInGenericLowering() && p.feedback().IsValid()) {
@ -148,10 +147,10 @@ void JSGenericLowering::ReplaceUnaryOpWithBuiltinCall(
}
}
#define DEF_UNARY_LOWERING(Name) \
void JSGenericLowering::LowerJS##Name(Node* node) { \
ReplaceUnaryOpWithBuiltinCall(node, Builtins::k##Name, \
Builtins::k##Name##_WithFeedback); \
#define DEF_UNARY_LOWERING(Name) \
void JSGenericLowering::LowerJS##Name(Node* node) { \
ReplaceUnaryOpWithBuiltinCall(node, Builtin::k##Name, \
Builtin::k##Name##_WithFeedback); \
}
DEF_UNARY_LOWERING(BitwiseNot)
DEF_UNARY_LOWERING(Decrement)
@ -160,10 +159,10 @@ DEF_UNARY_LOWERING(Negate)
#undef DEF_UNARY_LOWERING
void JSGenericLowering::ReplaceBinaryOpWithBuiltinCall(
Node* node, Builtins::Name builtin_without_feedback,
Builtins::Name builtin_with_feedback) {
Node* node, Builtin builtin_without_feedback,
Builtin builtin_with_feedback) {
DCHECK(JSOperator::IsBinaryWithFeedback(node->opcode()));
Builtins::Name builtin_id;
Builtin builtin_id;
const FeedbackParameter& p = FeedbackParameterOf(node->op());
if (CollectFeedbackInGenericLowering() && p.feedback().IsValid()) {
Node* slot = jsgraph()->UintPtrConstant(p.feedback().slot.ToInt());
@ -181,10 +180,10 @@ void JSGenericLowering::ReplaceBinaryOpWithBuiltinCall(
ReplaceWithBuiltinCall(node, builtin_id);
}
#define DEF_BINARY_LOWERING(Name) \
void JSGenericLowering::LowerJS##Name(Node* node) { \
ReplaceBinaryOpWithBuiltinCall(node, Builtins::k##Name, \
Builtins::k##Name##_WithFeedback); \
#define DEF_BINARY_LOWERING(Name) \
void JSGenericLowering::LowerJS##Name(Node* node) { \
ReplaceBinaryOpWithBuiltinCall(node, Builtin::k##Name, \
Builtin::k##Name##_WithFeedback); \
}
// Binary ops.
DEF_BINARY_LOWERING(Add)
@ -214,7 +213,7 @@ void JSGenericLowering::LowerJSStrictEqual(Node* node) {
DCHECK_EQ(node->op()->ControlInputCount(), 1);
node->RemoveInput(NodeProperties::FirstControlIndex(node));
Builtins::Name builtin_id;
Builtin builtin_id;
const FeedbackParameter& p = FeedbackParameterOf(node->op());
if (CollectFeedbackInGenericLowering() && p.feedback().IsValid()) {
Node* slot = jsgraph()->UintPtrConstant(p.feedback().slot.ToInt());
@ -223,10 +222,10 @@ void JSGenericLowering::LowerJSStrictEqual(Node* node) {
STATIC_ASSERT(JSStrictEqualNode::FeedbackVectorIndex() == 2);
DCHECK_EQ(node->op()->ValueInputCount(), 3);
node->InsertInput(zone(), 2, slot);
builtin_id = Builtins::kStrictEqual_WithFeedback;
builtin_id = Builtin::kStrictEqual_WithFeedback;
} else {
node->RemoveInput(JSStrictEqualNode::FeedbackVectorIndex());
builtin_id = Builtins::kStrictEqual;
builtin_id = Builtin::kStrictEqual;
}
Callable callable = Builtins::CallableFor(isolate(), builtin_id);
@ -266,12 +265,12 @@ void JSGenericLowering::LowerJSHasProperty(Node* node) {
const PropertyAccess& p = n.Parameters();
if (!p.feedback().IsValid()) {
node->RemoveInput(JSHasPropertyNode::FeedbackVectorIndex());
ReplaceWithBuiltinCall(node, Builtins::kHasProperty);
ReplaceWithBuiltinCall(node, Builtin::kHasProperty);
} else {
STATIC_ASSERT(n.FeedbackVectorIndex() == 2);
n->InsertInput(zone(), 2,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kKeyedHasIC);
ReplaceWithBuiltinCall(node, Builtin::kKeyedHasIC);
}
}
@ -287,15 +286,15 @@ void JSGenericLowering::LowerJSLoadProperty(Node* node) {
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(
node, ShouldUseMegamorphicLoadBuiltin(p.feedback(), broker())
? Builtins::kKeyedLoadICTrampoline_Megamorphic
: Builtins::kKeyedLoadICTrampoline);
? Builtin::kKeyedLoadICTrampoline_Megamorphic
: Builtin::kKeyedLoadICTrampoline);
} else {
n->InsertInput(zone(), 2,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(
node, ShouldUseMegamorphicLoadBuiltin(p.feedback(), broker())
? Builtins::kKeyedLoadIC_Megamorphic
: Builtins::kKeyedLoadIC);
? Builtin::kKeyedLoadIC_Megamorphic
: Builtin::kKeyedLoadIC);
}
}
@ -308,7 +307,7 @@ void JSGenericLowering::LowerJSLoadNamed(Node* node) {
if (!p.feedback().IsValid()) {
n->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
ReplaceWithBuiltinCall(node, Builtins::kGetProperty);
ReplaceWithBuiltinCall(node, Builtin::kGetProperty);
} else if (outer_state->opcode() != IrOpcode::kFrameState) {
n->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
@ -316,16 +315,16 @@ void JSGenericLowering::LowerJSLoadNamed(Node* node) {
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(
node, ShouldUseMegamorphicLoadBuiltin(p.feedback(), broker())
? Builtins::kLoadICTrampoline_Megamorphic
: Builtins::kLoadICTrampoline);
? Builtin::kLoadICTrampoline_Megamorphic
: Builtin::kLoadICTrampoline);
} else {
node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
node->InsertInput(zone(), 2,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(
node, ShouldUseMegamorphicLoadBuiltin(p.feedback(), broker())
? Builtins::kLoadIC_Megamorphic
: Builtins::kLoadIC);
? Builtin::kLoadIC_Megamorphic
: Builtin::kLoadIC);
}
}
@ -353,7 +352,7 @@ void JSGenericLowering::LowerJSLoadNamedFromSuper(Node* node) {
node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.name()));
node->InsertInput(zone(), 3,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kLoadSuperIC);
ReplaceWithBuiltinCall(node, Builtin::kLoadSuperIC);
}
void JSGenericLowering::LowerJSLoadGlobal(Node* node) {
@ -401,7 +400,7 @@ void JSGenericLowering::LowerJSGetIterator(Node* node) {
node->InsertInput(zone(), 1, load_slot);
node->InsertInput(zone(), 2, call_slot);
ReplaceWithBuiltinCall(node, Builtins::kGetIteratorWithFeedback);
ReplaceWithBuiltinCall(node, Builtin::kGetIteratorWithFeedback);
}
void JSGenericLowering::LowerJSStoreProperty(Node* node) {
@ -414,11 +413,11 @@ void JSGenericLowering::LowerJSStoreProperty(Node* node) {
n->RemoveInput(n.FeedbackVectorIndex());
node->InsertInput(zone(), 3,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kKeyedStoreICTrampoline);
ReplaceWithBuiltinCall(node, Builtin::kKeyedStoreICTrampoline);
} else {
node->InsertInput(zone(), 3,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kKeyedStoreIC);
ReplaceWithBuiltinCall(node, Builtin::kKeyedStoreIC);
}
}
@ -437,12 +436,12 @@ void JSGenericLowering::LowerJSStoreNamed(Node* node) {
node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
node->InsertInput(zone(), 3,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kStoreICTrampoline);
ReplaceWithBuiltinCall(node, Builtin::kStoreICTrampoline);
} else {
node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
node->InsertInput(zone(), 3,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kStoreIC);
ReplaceWithBuiltinCall(node, Builtin::kStoreIC);
}
}
@ -480,12 +479,12 @@ void JSGenericLowering::LowerJSStoreGlobal(Node* node) {
node->InsertInput(zone(), 0, jsgraph()->HeapConstant(p.name()));
node->InsertInput(zone(), 2,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kStoreGlobalICTrampoline);
ReplaceWithBuiltinCall(node, Builtin::kStoreGlobalICTrampoline);
} else {
node->InsertInput(zone(), 0, jsgraph()->HeapConstant(p.name()));
node->InsertInput(zone(), 2,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kStoreGlobalIC);
ReplaceWithBuiltinCall(node, Builtin::kStoreGlobalIC);
}
}
@ -506,11 +505,11 @@ void JSGenericLowering::LowerJSStoreInArrayLiteral(Node* node) {
RelaxControls(node);
node->InsertInput(zone(), 3,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kStoreInArrayLiteralIC);
ReplaceWithBuiltinCall(node, Builtin::kStoreInArrayLiteralIC);
}
void JSGenericLowering::LowerJSDeleteProperty(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kDeleteProperty);
ReplaceWithBuiltinCall(node, Builtin::kDeleteProperty);
}
void JSGenericLowering::LowerJSGetSuperConstructor(Node* node) {
@ -536,7 +535,7 @@ void JSGenericLowering::LowerJSHasInPrototypeChain(Node* node) {
}
void JSGenericLowering::LowerJSOrdinaryHasInstance(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kOrdinaryHasInstance);
ReplaceWithBuiltinCall(node, Builtin::kOrdinaryHasInstance);
}
void JSGenericLowering::LowerJSHasContextExtension(Node* node) {
@ -554,7 +553,7 @@ void JSGenericLowering::LowerJSStoreContext(Node* node) {
void JSGenericLowering::LowerJSCreate(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kFastNewObject);
ReplaceWithBuiltinCall(node, Builtin::kFastNewObject);
}
@ -622,15 +621,15 @@ void JSGenericLowering::LowerJSObjectIsArray(Node* node) {
}
void JSGenericLowering::LowerJSCreateObject(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kCreateObjectWithoutProperties);
ReplaceWithBuiltinCall(node, Builtin::kCreateObjectWithoutProperties);
}
void JSGenericLowering::LowerJSParseInt(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kParseInt);
ReplaceWithBuiltinCall(node, Builtin::kParseInt);
}
void JSGenericLowering::LowerJSRegExpTest(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kRegExpPrototypeTestFast);
ReplaceWithBuiltinCall(node, Builtin::kRegExpPrototypeTestFast);
}
void JSGenericLowering::LowerJSCreateClosure(Node* node) {
@ -643,7 +642,7 @@ void JSGenericLowering::LowerJSCreateClosure(Node* node) {
// Use the FastNewClosure builtin only for functions allocated in new space.
if (p.allocation() == AllocationType::kYoung) {
ReplaceWithBuiltinCall(node, Builtins::kFastNewClosure);
ReplaceWithBuiltinCall(node, Builtin::kFastNewClosure);
} else {
ReplaceWithRuntimeCall(node, Runtime::kNewClosure_Tenured);
}
@ -671,11 +670,11 @@ void JSGenericLowering::LowerJSCreateFunctionContext(Node* node) {
void JSGenericLowering::LowerJSCreateGeneratorObject(Node* node) {
node->RemoveInput(4); // control
ReplaceWithBuiltinCall(node, Builtins::kCreateGeneratorObject);
ReplaceWithBuiltinCall(node, Builtin::kCreateGeneratorObject);
}
void JSGenericLowering::LowerJSCreateIterResultObject(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kCreateIterResultObject);
ReplaceWithBuiltinCall(node, Builtin::kCreateIterResultObject);
}
void JSGenericLowering::LowerJSCreateStringIterator(Node* node) {
@ -691,7 +690,7 @@ void JSGenericLowering::LowerJSCreatePromise(Node* node) {
}
void JSGenericLowering::LowerJSCreateTypedArray(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kCreateTypedArray);
ReplaceWithBuiltinCall(node, Builtin::kCreateTypedArray);
}
void JSGenericLowering::LowerJSCreateLiteralArray(Node* node) {
@ -707,7 +706,7 @@ void JSGenericLowering::LowerJSCreateLiteralArray(Node* node) {
// without properties up to the number of elements that the stubs can handle.
if ((p.flags() & AggregateLiteral::kIsShallow) != 0 &&
p.length() < ConstructorBuiltins::kMaximumClonedShallowArrayElements) {
ReplaceWithBuiltinCall(node, Builtins::kCreateShallowArrayLiteral);
ReplaceWithBuiltinCall(node, Builtin::kCreateShallowArrayLiteral);
} else {
ReplaceWithRuntimeCall(node, Runtime::kCreateArrayLiteral);
}
@ -728,7 +727,7 @@ void JSGenericLowering::LowerJSGetTemplateObject(Node* node) {
node->InsertInput(zone(), 2,
jsgraph()->UintPtrConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kGetTemplateObject);
ReplaceWithBuiltinCall(node, Builtin::kGetTemplateObject);
}
void JSGenericLowering::LowerJSCreateEmptyLiteralArray(Node* node) {
@ -738,11 +737,11 @@ void JSGenericLowering::LowerJSCreateEmptyLiteralArray(Node* node) {
node->InsertInput(zone(), 1,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
node->RemoveInput(4); // control
ReplaceWithBuiltinCall(node, Builtins::kCreateEmptyArrayLiteral);
ReplaceWithBuiltinCall(node, Builtin::kCreateEmptyArrayLiteral);
}
void JSGenericLowering::LowerJSCreateArrayFromIterable(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kIterableToListWithSymbolLookup);
ReplaceWithBuiltinCall(node, Builtin::kIterableToListWithSymbolLookup);
}
void JSGenericLowering::LowerJSCreateLiteralObject(Node* node) {
@ -759,7 +758,7 @@ void JSGenericLowering::LowerJSCreateLiteralObject(Node* node) {
if ((p.flags() & AggregateLiteral::kIsShallow) != 0 &&
p.length() <=
ConstructorBuiltins::kMaximumClonedShallowObjectProperties) {
ReplaceWithBuiltinCall(node, Builtins::kCreateShallowObjectLiteral);
ReplaceWithBuiltinCall(node, Builtin::kCreateShallowObjectLiteral);
} else {
ReplaceWithRuntimeCall(node, Runtime::kCreateObjectLiteral);
}
@ -772,11 +771,11 @@ void JSGenericLowering::LowerJSCloneObject(Node* node) {
node->InsertInput(zone(), 1, jsgraph()->SmiConstant(p.flags()));
node->InsertInput(zone(), 2,
jsgraph()->TaggedIndexConstant(p.feedback().index()));
ReplaceWithBuiltinCall(node, Builtins::kCloneObjectIC);
ReplaceWithBuiltinCall(node, Builtin::kCloneObjectIC);
}
void JSGenericLowering::LowerJSCreateEmptyLiteralObject(Node* node) {
ReplaceWithBuiltinCall(node, Builtins::kCreateEmptyLiteralObject);
ReplaceWithBuiltinCall(node, Builtin::kCreateEmptyLiteralObject);
}
void JSGenericLowering::LowerJSCreateLiteralRegExp(Node* node) {
@ -787,7 +786,7 @@ void JSGenericLowering::LowerJSCreateLiteralRegExp(Node* node) {
jsgraph()->TaggedIndexConstant(p.feedback().index()));
node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant()));
node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags()));
ReplaceWithBuiltinCall(node, Builtins::kCreateRegExpLiteral);
ReplaceWithBuiltinCall(node, Builtin::kCreateRegExpLiteral);
}
@ -864,7 +863,7 @@ void JSGenericLowering::LowerJSConstruct(Node* node) {
const int stack_argument_count =
arg_count + kReceiver + kMaybeFeedbackVector;
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kConstruct_WithFeedback);
Builtins::CallableFor(isolate(), Builtin::kConstruct_WithFeedback);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
@ -893,7 +892,7 @@ void JSGenericLowering::LowerJSConstruct(Node* node) {
NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
} else {
const int stack_argument_count = arg_count + kReceiver;
Callable callable = Builtins::CallableFor(isolate(), Builtins::kConstruct);
Callable callable = Builtins::CallableFor(isolate(), Builtin::kConstruct);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
@ -926,7 +925,7 @@ void JSGenericLowering::LowerJSConstructWithArrayLike(Node* node) {
const int stack_argument_count =
arg_count - kArgumentList + kReceiver + kMaybeFeedbackVector;
Callable callable = Builtins::CallableFor(
isolate(), Builtins::kConstructWithArrayLike_WithFeedback);
isolate(), Builtin::kConstructWithArrayLike_WithFeedback);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
@ -954,7 +953,7 @@ void JSGenericLowering::LowerJSConstructWithArrayLike(Node* node) {
} else {
const int stack_argument_count = arg_count - kArgumentList + kReceiver;
Callable callable =
Builtins::CallableFor(isolate(), Builtins::kConstructWithArrayLike);
Builtins::CallableFor(isolate(), Builtin::kConstructWithArrayLike);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
@ -989,7 +988,7 @@ void JSGenericLowering::LowerJSConstructWithSpread(Node* node) {
const int stack_argument_count =
arg_count + kReceiver + kMaybeFeedbackVector;
Callable callable = Builtins::CallableFor(
isolate(), Builtins::kConstructWithSpread_WithFeedback);
isolate(), Builtin::kConstructWithSpread_WithFeedback);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
@ -1122,7 +1121,7 @@ void JSGenericLowering::LowerJSCallWithArrayLike(Node* node) {
CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
const int stack_argument_count = arg_count - kArgumentsList + kReceiver;
Callable callable = Builtins::CallableFor(
isolate(), Builtins::kCallWithArrayLike_WithFeedback);
isolate(), Builtin::kCallWithArrayLike_WithFeedback);
auto call_descriptor = Linkage::GetStubCallDescriptor(
zone(), callable.descriptor(), stack_argument_count, flags);
Node* stub_code = jsgraph()->HeapConstant(callable.code());
@ -1184,8 +1183,8 @@ void JSGenericLowering::LowerJSCallWithSpread(Node* node) {
CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
const int stack_argument_count =
arg_count - kTheSpread + kReceiver + kMaybeFeedbackVector;
Callable callable = Builtins::CallableFor(
isolate(), Builtins::kCallWithSpread_WithFeedback);
Callable callable =
Builtins::CallableFor(isolate(), Builtin::kCallWithSpread_WithFeedback);
// If this fails, we might need to update the parameter reordering code
// to ensure that the additional arguments passed via stack are pushed
// between top of stack and JS arguments.
@ -1321,7 +1320,7 @@ void JSGenericLowering::LowerJSForInPrepare(Node* node) {
// thus must not have any control uses. Any previously existing control
// outputs have been replaced by the graph rewrite above.
node->InsertInput(zone(), n.FeedbackVectorIndex(), slot);
ReplaceWithBuiltinCall(node, Builtins::kForInPrepare);
ReplaceWithBuiltinCall(node, Builtin::kForInPrepare);
}
void JSGenericLowering::LowerJSForInNext(Node* node) {
@ -1329,7 +1328,7 @@ void JSGenericLowering::LowerJSForInNext(Node* node) {
node->InsertInput(
zone(), 0,
jsgraph()->UintPtrConstant(n.Parameters().feedback().slot.ToInt()));
ReplaceWithBuiltinCall(node, Builtins::kForInNext);
ReplaceWithBuiltinCall(node, Builtin::kForInNext);
}
void JSGenericLowering::LowerJSLoadMessage(Node* node) {

View File

@ -37,7 +37,7 @@ class JSGenericLowering final : public AdvancedReducer {
#undef DECLARE_LOWER
// Helpers to replace existing nodes with a generic call.
void ReplaceWithBuiltinCall(Node* node, Builtins::Name builtin);
void ReplaceWithBuiltinCall(Node* node, Builtin builtin);
void ReplaceWithBuiltinCall(Node* node, Callable c,
CallDescriptor::Flags flags);
void ReplaceWithBuiltinCall(Node* node, Callable c,
@ -46,11 +46,11 @@ class JSGenericLowering final : public AdvancedReducer {
void ReplaceWithRuntimeCall(Node* node, Runtime::FunctionId f, int args = -1);
void ReplaceUnaryOpWithBuiltinCall(Node* node,
Builtins::Name builtin_without_feedback,
Builtins::Name builtin_with_feedback);
Builtin builtin_without_feedback,
Builtin builtin_with_feedback);
void ReplaceBinaryOpWithBuiltinCall(Node* node,
Builtins::Name builtin_without_feedback,
Builtins::Name builtin_with_feedback);
Builtin builtin_without_feedback,
Builtin builtin_with_feedback);
Zone* zone() const;
Isolate* isolate() const;

View File

@ -98,7 +98,7 @@ Reduction JSIntrinsicLowering::Reduce(Node* node) {
Reduction JSIntrinsicLowering::ReduceCopyDataProperties(Node* node) {
return Change(
node, Builtins::CallableFor(isolate(), Builtins::kCopyDataProperties), 0);
node, Builtins::CallableFor(isolate(), Builtin::kCopyDataProperties), 0);
}
Reduction JSIntrinsicLowering::ReduceCreateIterResultObject(Node* node) {
@ -158,13 +158,13 @@ Reduction JSIntrinsicLowering::ReduceGeneratorClose(Node* node) {
Reduction JSIntrinsicLowering::ReduceAsyncFunctionAwaitCaught(Node* node) {
return Change(
node,
Builtins::CallableFor(isolate(), Builtins::kAsyncFunctionAwaitCaught), 0);
Builtins::CallableFor(isolate(), Builtin::kAsyncFunctionAwaitCaught), 0);
}
Reduction JSIntrinsicLowering::ReduceAsyncFunctionAwaitUncaught(Node* node) {
return Change(
node,
Builtins::CallableFor(isolate(), Builtins::kAsyncFunctionAwaitUncaught),
Builtins::CallableFor(isolate(), Builtin::kAsyncFunctionAwaitUncaught),
0);
}
@ -188,33 +188,31 @@ Reduction JSIntrinsicLowering::ReduceAsyncFunctionResolve(Node* node) {
Reduction JSIntrinsicLowering::ReduceAsyncGeneratorAwaitCaught(Node* node) {
return Change(
node,
Builtins::CallableFor(isolate(), Builtins::kAsyncGeneratorAwaitCaught),
0);
Builtins::CallableFor(isolate(), Builtin::kAsyncGeneratorAwaitCaught), 0);
}
Reduction JSIntrinsicLowering::ReduceAsyncGeneratorAwaitUncaught(Node* node) {
return Change(
node,
Builtins::CallableFor(isolate(), Builtins::kAsyncGeneratorAwaitUncaught),
Builtins::CallableFor(isolate(), Builtin::kAsyncGeneratorAwaitUncaught),
0);
}
Reduction JSIntrinsicLowering::ReduceAsyncGeneratorReject(Node* node) {
return Change(
node, Builtins::CallableFor(isolate(), Builtins::kAsyncGeneratorReject),
node, Builtins::CallableFor(isolate(), Builtin::kAsyncGeneratorReject),
0);
}
Reduction JSIntrinsicLowering::ReduceAsyncGeneratorResolve(Node* node) {
return Change(
node, Builtins::CallableFor(isolate(), Builtins::kAsyncGeneratorResolve),
node, Builtins::CallableFor(isolate(), Builtin::kAsyncGeneratorResolve),
0);
}
Reduction JSIntrinsicLowering::ReduceAsyncGeneratorYield(Node* node) {
return Change(
node, Builtins::CallableFor(isolate(), Builtins::kAsyncGeneratorYield),
0);
node, Builtins::CallableFor(isolate(), Builtin::kAsyncGeneratorYield), 0);
}
Reduction JSIntrinsicLowering::ReduceGeneratorGetResumeMode(Node* node) {
@ -353,7 +351,7 @@ Reduction JSIntrinsicLowering::ReduceIncBlockCounter(Node* node) {
DCHECK(!Linkage::NeedsFrameStateInput(Runtime::kIncBlockCounter));
DCHECK(!Linkage::NeedsFrameStateInput(Runtime::kInlineIncBlockCounter));
return Change(node,
Builtins::CallableFor(isolate(), Builtins::kIncBlockCounter), 0,
Builtins::CallableFor(isolate(), Builtin::kIncBlockCounter), 0,
kDoesNotNeedFrameState);
}

View File

@ -269,7 +269,7 @@ Reduction JSNativeContextSpecialization::ReduceJSAsyncFunctionReject(
// JSRejectPromise operation (which yields undefined).
Node* parameters[] = {promise};
frame_state = CreateStubBuiltinContinuationFrameState(
jsgraph(), Builtins::kAsyncFunctionLazyDeoptContinuation, context,
jsgraph(), Builtin::kAsyncFunctionLazyDeoptContinuation, context,
parameters, arraysize(parameters), frame_state,
ContinuationFrameStateMode::LAZY);
@ -305,7 +305,7 @@ Reduction JSNativeContextSpecialization::ReduceJSAsyncFunctionResolve(
// JSResolvePromise operation (which yields undefined).
Node* parameters[] = {promise};
frame_state = CreateStubBuiltinContinuationFrameState(
jsgraph(), Builtins::kAsyncFunctionLazyDeoptContinuation, context,
jsgraph(), Builtin::kAsyncFunctionLazyDeoptContinuation, context,
parameters, arraysize(parameters), frame_state,
ContinuationFrameStateMode::LAZY);
@ -486,7 +486,7 @@ Reduction JSNativeContextSpecialization::ReduceJSInstanceOf(Node* node) {
// ToBoolean stub that finishes the remaining work of instanceof and returns
// to the caller without duplicating side-effects upon a lazy deopt.
Node* continuation_frame_state = CreateStubBuiltinContinuationFrameState(
jsgraph(), Builtins::kToBooleanLazyDeoptContinuation, context, nullptr,
jsgraph(), Builtin::kToBooleanLazyDeoptContinuation, context, nullptr,
0, frame_state, ContinuationFrameStateMode::LAZY);
// Call the @@hasInstance handler.
@ -1530,7 +1530,7 @@ Reduction JSNativeContextSpecialization::ReduceJSGetIterator(Node* node) {
Node* call_feedback = jsgraph()->HeapConstant(p.callFeedback().vector);
Node* lazy_deopt_parameters[] = {receiver, call_slot, call_feedback};
Node* lazy_deopt_frame_state = CreateStubBuiltinContinuationFrameState(
jsgraph(), Builtins::kGetIteratorWithFeedbackLazyDeoptContinuation,
jsgraph(), Builtin::kGetIteratorWithFeedbackLazyDeoptContinuation,
context, lazy_deopt_parameters, arraysize(lazy_deopt_parameters),
frame_state, ContinuationFrameStateMode::LAZY);
Node* load_property =
@ -1570,7 +1570,7 @@ Reduction JSNativeContextSpecialization::ReduceJSGetIterator(Node* node) {
// Eager deopt of call iterator property
Node* parameters[] = {receiver, load_property, call_slot, call_feedback};
Node* eager_deopt_frame_state = CreateStubBuiltinContinuationFrameState(
jsgraph(), Builtins::kCallIteratorWithFeedback, context, parameters,
jsgraph(), Builtin::kCallIteratorWithFeedback, context, parameters,
arraysize(parameters), frame_state, ContinuationFrameStateMode::EAGER);
Node* deopt_checkpoint = graph()->NewNode(
common()->Checkpoint(), eager_deopt_frame_state, effect, control);

View File

@ -1131,7 +1131,7 @@ Reduction JSTypedLowering::ReduceJSToObject(Node* node) {
Node* rfalse;
{
// Convert {receiver} using the ToObjectStub.
Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
Callable callable = Builtins::CallableFor(isolate(), Builtin::kToObject);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),
@ -1788,7 +1788,7 @@ Reduction JSTypedLowering::ReduceJSCall(Node* node) {
DCHECK(Builtins::HasJSLinkage(shared->builtin_id()));
// Patch {node} to a direct code object call.
Callable callable = Builtins::CallableFor(
isolate(), static_cast<Builtins::Name>(shared->builtin_id()));
isolate(), static_cast<Builtin>(shared->builtin_id()));
CallDescriptor::Flags flags = CallDescriptor::kNeedsFrameState;
const CallInterfaceDescriptor& descriptor = callable.descriptor();
@ -1913,7 +1913,7 @@ Reduction JSTypedLowering::ReduceJSForInNext(Node* node) {
// Filter the {key} to check if it's still a valid property of the
// {receiver} (does the ToName conversion implicitly).
Callable const callable =
Builtins::CallableFor(isolate(), Builtins::kForInFilter);
Builtins::CallableFor(isolate(), Builtin::kForInFilter);
auto call_descriptor = Linkage::GetStubCallDescriptor(
graph()->zone(), callable.descriptor(),
callable.descriptor().GetStackParameterCount(),

Some files were not shown because too many files have changed in this diff Show More