[maglev][arm64] Import baseline push + share more generic nodes
We import baseline push according to our needs. Bug: v8:7700 Change-Id: I2a9d8c2453d44d2da38a9bc32247af1bc3d0c22a Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4080467 Auto-Submit: Victor Gomes <victorgomes@chromium.org> Commit-Queue: Victor Gomes <victorgomes@chromium.org> Reviewed-by: Darius Mercadier <dmercadier@chromium.org> Cr-Commit-Position: refs/heads/main@{#84681}
This commit is contained in:
parent
151ef3189d
commit
e01821f5f2
@ -17,6 +17,51 @@ namespace maglev {
|
|||||||
constexpr Register kScratchRegister = x16;
|
constexpr Register kScratchRegister = x16;
|
||||||
constexpr DoubleRegister kScratchDoubleReg = d30;
|
constexpr DoubleRegister kScratchDoubleReg = d30;
|
||||||
|
|
||||||
|
namespace detail {
|
||||||
|
template <typename Arg>
|
||||||
|
inline Register ToRegister(MaglevAssembler* masm, Register reg, Arg arg) {
|
||||||
|
masm->Move(reg, arg);
|
||||||
|
return reg;
|
||||||
|
}
|
||||||
|
inline Register ToRegister(MaglevAssembler* masm, Register scratch,
|
||||||
|
Register reg) {
|
||||||
|
return reg;
|
||||||
|
}
|
||||||
|
template <typename... Args>
|
||||||
|
struct PushAllHelper;
|
||||||
|
template <typename... Args>
|
||||||
|
inline void PushAll(MaglevAssembler* basm, Args... args) {
|
||||||
|
PushAllHelper<Args...>::Push(basm, args...);
|
||||||
|
}
|
||||||
|
template <>
|
||||||
|
struct PushAllHelper<> {
|
||||||
|
static void Push(MaglevAssembler* basm) {}
|
||||||
|
};
|
||||||
|
template <typename Arg>
|
||||||
|
struct PushAllHelper<Arg> {
|
||||||
|
static void Push(MaglevAssembler* basm, Arg) { FATAL("Unaligned push"); }
|
||||||
|
};
|
||||||
|
template <typename Arg1, typename Arg2, typename... Args>
|
||||||
|
struct PushAllHelper<Arg1, Arg2, Args...> {
|
||||||
|
static void Push(MaglevAssembler* masm, Arg1 arg1, Arg2 arg2, Args... args) {
|
||||||
|
{
|
||||||
|
masm->MacroAssembler::Push(ToRegister(masm, ip0, arg1),
|
||||||
|
ToRegister(masm, ip1, arg2));
|
||||||
|
}
|
||||||
|
PushAll(masm, args...);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} // namespace detail
|
||||||
|
|
||||||
|
template <typename... T>
|
||||||
|
void MaglevAssembler::Push(T... vals) {
|
||||||
|
if (sizeof...(vals) % 2 == 0) {
|
||||||
|
detail::PushAll(this, vals...);
|
||||||
|
} else {
|
||||||
|
detail::PushAll(this, padreg, vals...);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
inline MemOperand MaglevAssembler::StackSlotOperand(StackSlot slot) {
|
inline MemOperand MaglevAssembler::StackSlotOperand(StackSlot slot) {
|
||||||
return MemOperand(fp, slot.index);
|
return MemOperand(fp, slot.index);
|
||||||
}
|
}
|
||||||
@ -94,8 +139,6 @@ inline void MaglevAssembler::JumpIf(Condition cond, Label* target) {
|
|||||||
b(target, cond);
|
b(target, cond);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(victorgomes): We should avoid dong a single push in arm64!
|
|
||||||
inline void MaglevAssembler::Push(Register src) { Push(src, padreg); }
|
|
||||||
inline void MaglevAssembler::Pop(Register dst) { Pop(padreg, dst); }
|
inline void MaglevAssembler::Pop(Register dst) { Pop(padreg, dst); }
|
||||||
|
|
||||||
inline void MaglevAssembler::AssertStackSizeCorrect() {
|
inline void MaglevAssembler::AssertStackSizeCorrect() {
|
||||||
|
@ -94,14 +94,10 @@ UNIMPLEMENTED_NODE_WITH_CALL(Construct)
|
|||||||
UNIMPLEMENTED_NODE_WITH_CALL(ConstructWithSpread)
|
UNIMPLEMENTED_NODE_WITH_CALL(ConstructWithSpread)
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(ConvertReceiver, mode_)
|
UNIMPLEMENTED_NODE_WITH_CALL(ConvertReceiver, mode_)
|
||||||
UNIMPLEMENTED_NODE(ConvertHoleToUndefined)
|
UNIMPLEMENTED_NODE(ConvertHoleToUndefined)
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(CreateArrayLiteral)
|
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(CreateObjectLiteral)
|
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(CreateEmptyObjectLiteral)
|
UNIMPLEMENTED_NODE_WITH_CALL(CreateEmptyObjectLiteral)
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(CreateFunctionContext)
|
UNIMPLEMENTED_NODE_WITH_CALL(CreateFunctionContext)
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(CreateClosure)
|
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(FastCreateClosure)
|
UNIMPLEMENTED_NODE_WITH_CALL(FastCreateClosure)
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(CreateRegExpLiteral)
|
UNIMPLEMENTED_NODE_WITH_CALL(CreateRegExpLiteral)
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(ForInNext)
|
|
||||||
UNIMPLEMENTED_NODE(GeneratorRestoreRegister)
|
UNIMPLEMENTED_NODE(GeneratorRestoreRegister)
|
||||||
UNIMPLEMENTED_NODE(GetSecondReturnedValue)
|
UNIMPLEMENTED_NODE(GetSecondReturnedValue)
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(GetTemplateObject)
|
UNIMPLEMENTED_NODE_WITH_CALL(GetTemplateObject)
|
||||||
@ -185,7 +181,6 @@ UNIMPLEMENTED_NODE(BranchIfFloat64Compare, operation_)
|
|||||||
UNIMPLEMENTED_NODE(BranchIfUndefinedOrNull)
|
UNIMPLEMENTED_NODE(BranchIfUndefinedOrNull)
|
||||||
UNIMPLEMENTED_NODE(BranchIfJSReceiver)
|
UNIMPLEMENTED_NODE(BranchIfJSReceiver)
|
||||||
UNIMPLEMENTED_NODE(Switch)
|
UNIMPLEMENTED_NODE(Switch)
|
||||||
UNIMPLEMENTED_NODE_WITH_CALL(Abort)
|
|
||||||
UNIMPLEMENTED_NODE(Deopt)
|
UNIMPLEMENTED_NODE(Deopt)
|
||||||
|
|
||||||
void Int32AddWithOverflow::SetValueLocationConstraints() {
|
void Int32AddWithOverflow::SetValueLocationConstraints() {
|
||||||
|
@ -151,12 +151,13 @@ class MaglevAssembler : public MacroAssembler {
|
|||||||
inline void Jump(Label* target);
|
inline void Jump(Label* target);
|
||||||
inline void JumpIf(Condition cond, Label* target);
|
inline void JumpIf(Condition cond, Label* target);
|
||||||
|
|
||||||
// TODO(victorgomes): Import baseline Push(T...) methods.
|
// TODO(victorgomes): Import baseline Pop(T...) methods.
|
||||||
inline void Push(Register src);
|
|
||||||
using MacroAssembler::Push;
|
|
||||||
inline void Pop(Register dst);
|
inline void Pop(Register dst);
|
||||||
using MacroAssembler::Pop;
|
using MacroAssembler::Pop;
|
||||||
|
|
||||||
|
template <typename... T>
|
||||||
|
inline void Push(T... vals);
|
||||||
|
|
||||||
void Prologue(Graph* graph);
|
void Prologue(Graph* graph);
|
||||||
|
|
||||||
inline void FinishCode();
|
inline void FinishCode();
|
||||||
|
@ -726,6 +726,36 @@ void ForInPrepare::GenerateCode(MaglevAssembler* masm,
|
|||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int ForInNext::MaxCallStackArgs() const {
|
||||||
|
using D = CallInterfaceDescriptorFor<Builtin::kForInNext>::type;
|
||||||
|
return D::GetStackParameterCount();
|
||||||
|
}
|
||||||
|
void ForInNext::SetValueLocationConstraints() {
|
||||||
|
using D = CallInterfaceDescriptorFor<Builtin::kForInNext>::type;
|
||||||
|
UseFixed(context(), kContextRegister);
|
||||||
|
UseFixed(receiver(), D::GetRegisterParameter(D::kReceiver));
|
||||||
|
UseFixed(cache_array(), D::GetRegisterParameter(D::kCacheArray));
|
||||||
|
UseFixed(cache_type(), D::GetRegisterParameter(D::kCacheType));
|
||||||
|
UseFixed(cache_index(), D::GetRegisterParameter(D::kCacheIndex));
|
||||||
|
DefineAsFixed(this, kReturnRegister0);
|
||||||
|
}
|
||||||
|
void ForInNext::GenerateCode(MaglevAssembler* masm,
|
||||||
|
const ProcessingState& state) {
|
||||||
|
using D = CallInterfaceDescriptorFor<Builtin::kForInNext>::type;
|
||||||
|
DCHECK_EQ(ToRegister(context()), kContextRegister);
|
||||||
|
DCHECK_EQ(ToRegister(receiver()), D::GetRegisterParameter(D::kReceiver));
|
||||||
|
DCHECK_EQ(ToRegister(cache_array()), D::GetRegisterParameter(D::kCacheArray));
|
||||||
|
DCHECK_EQ(ToRegister(cache_type()), D::GetRegisterParameter(D::kCacheType));
|
||||||
|
DCHECK_EQ(ToRegister(cache_index()), D::GetRegisterParameter(D::kCacheIndex));
|
||||||
|
__ Move(D::GetRegisterParameter(D::kSlot), feedback().index());
|
||||||
|
// Feedback vector is pushed into the stack.
|
||||||
|
static_assert(D::GetStackParameterIndex(D::kFeedbackVector) == 0);
|
||||||
|
static_assert(D::GetStackParameterCount() == 1);
|
||||||
|
__ Push(feedback().vector);
|
||||||
|
__ CallBuiltin(Builtin::kForInNext);
|
||||||
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
|
}
|
||||||
|
|
||||||
int GetIterator::MaxCallStackArgs() const {
|
int GetIterator::MaxCallStackArgs() const {
|
||||||
using D = CallInterfaceDescriptorFor<Builtin::kGetIteratorWithFeedback>::type;
|
using D = CallInterfaceDescriptorFor<Builtin::kGetIteratorWithFeedback>::type;
|
||||||
return D::GetStackParameterCount();
|
return D::GetStackParameterCount();
|
||||||
@ -833,6 +863,22 @@ void CreateEmptyArrayLiteral::GenerateCode(MaglevAssembler* masm,
|
|||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int CreateObjectLiteral::MaxCallStackArgs() const {
|
||||||
|
DCHECK_EQ(Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->nargs, 4);
|
||||||
|
return 4;
|
||||||
|
}
|
||||||
|
void CreateObjectLiteral::SetValueLocationConstraints() {
|
||||||
|
DefineAsFixed(this, kReturnRegister0);
|
||||||
|
}
|
||||||
|
void CreateObjectLiteral::GenerateCode(MaglevAssembler* masm,
|
||||||
|
const ProcessingState& state) {
|
||||||
|
__ Move(kContextRegister, masm->native_context().object());
|
||||||
|
__ Push(feedback().vector, TaggedIndex::FromIntptr(feedback().index()),
|
||||||
|
boilerplate_descriptor().object(), Smi::FromInt(flags()));
|
||||||
|
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
|
||||||
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
|
}
|
||||||
|
|
||||||
int CreateShallowArrayLiteral::MaxCallStackArgs() const {
|
int CreateShallowArrayLiteral::MaxCallStackArgs() const {
|
||||||
using D = CallInterfaceDescriptorFor<Builtin::kCreateEmptyArrayLiteral>::type;
|
using D = CallInterfaceDescriptorFor<Builtin::kCreateEmptyArrayLiteral>::type;
|
||||||
return D::GetStackParameterCount();
|
return D::GetStackParameterCount();
|
||||||
@ -854,6 +900,22 @@ void CreateShallowArrayLiteral::GenerateCode(MaglevAssembler* masm,
|
|||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int CreateArrayLiteral::MaxCallStackArgs() const {
|
||||||
|
DCHECK_EQ(Runtime::FunctionForId(Runtime::kCreateArrayLiteral)->nargs, 4);
|
||||||
|
return 4;
|
||||||
|
}
|
||||||
|
void CreateArrayLiteral::SetValueLocationConstraints() {
|
||||||
|
DefineAsFixed(this, kReturnRegister0);
|
||||||
|
}
|
||||||
|
void CreateArrayLiteral::GenerateCode(MaglevAssembler* masm,
|
||||||
|
const ProcessingState& state) {
|
||||||
|
__ Move(kContextRegister, masm->native_context().object());
|
||||||
|
__ Push(feedback().vector, TaggedIndex::FromIntptr(feedback().index()),
|
||||||
|
constant_elements().object(), Smi::FromInt(flags()));
|
||||||
|
__ CallRuntime(Runtime::kCreateArrayLiteral, 4);
|
||||||
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
|
}
|
||||||
|
|
||||||
int CreateShallowObjectLiteral::MaxCallStackArgs() const {
|
int CreateShallowObjectLiteral::MaxCallStackArgs() const {
|
||||||
using D =
|
using D =
|
||||||
CallInterfaceDescriptorFor<Builtin::kCreateShallowObjectLiteral>::type;
|
CallInterfaceDescriptorFor<Builtin::kCreateShallowObjectLiteral>::type;
|
||||||
@ -875,6 +937,36 @@ void CreateShallowObjectLiteral::GenerateCode(MaglevAssembler* masm,
|
|||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int CreateClosure::MaxCallStackArgs() const {
|
||||||
|
DCHECK_EQ(Runtime::FunctionForId(pretenured() ? Runtime::kNewClosure_Tenured
|
||||||
|
: Runtime::kNewClosure)
|
||||||
|
->nargs,
|
||||||
|
2);
|
||||||
|
return 2;
|
||||||
|
}
|
||||||
|
void CreateClosure::SetValueLocationConstraints() {
|
||||||
|
UseFixed(context(), kContextRegister);
|
||||||
|
DefineAsFixed(this, kReturnRegister0);
|
||||||
|
}
|
||||||
|
void CreateClosure::GenerateCode(MaglevAssembler* masm,
|
||||||
|
const ProcessingState& state) {
|
||||||
|
Runtime::FunctionId function_id =
|
||||||
|
pretenured() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
|
||||||
|
__ Push(shared_function_info().object(), feedback_cell().object());
|
||||||
|
__ CallRuntime(function_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
int Abort::MaxCallStackArgs() const {
|
||||||
|
DCHECK_EQ(Runtime::FunctionForId(Runtime::kAbort)->nargs, 1);
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
void Abort::SetValueLocationConstraints() {}
|
||||||
|
void Abort::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) {
|
||||||
|
__ Push(Smi::FromInt(static_cast<int>(reason())));
|
||||||
|
__ CallRuntime(Runtime::kAbort, 1);
|
||||||
|
__ Trap();
|
||||||
|
}
|
||||||
|
|
||||||
int LoadNamedGeneric::MaxCallStackArgs() const {
|
int LoadNamedGeneric::MaxCallStackArgs() const {
|
||||||
return LoadWithVectorDescriptor::GetStackParameterCount();
|
return LoadWithVectorDescriptor::GetStackParameterCount();
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,27 @@ namespace v8 {
|
|||||||
namespace internal {
|
namespace internal {
|
||||||
namespace maglev {
|
namespace maglev {
|
||||||
|
|
||||||
|
namespace detail {
|
||||||
|
template <typename... Args>
|
||||||
|
struct PushAllHelper;
|
||||||
|
template <>
|
||||||
|
struct PushAllHelper<> {
|
||||||
|
static void Push(MaglevAssembler* masm) {}
|
||||||
|
};
|
||||||
|
template <typename Arg, typename... Args>
|
||||||
|
struct PushAllHelper<Arg, Args...> {
|
||||||
|
static void Push(MaglevAssembler* masm, Arg arg, Args... args) {
|
||||||
|
masm->MacroAssembler::Push(arg);
|
||||||
|
PushAllHelper<Args...>::Push(masm, args...);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} // namespace detail
|
||||||
|
|
||||||
|
template <typename... T>
|
||||||
|
void MaglevAssembler::Push(T... vals) {
|
||||||
|
detail::PushAllHelper<T...>::Push(this, vals...);
|
||||||
|
}
|
||||||
|
|
||||||
void MaglevAssembler::Branch(Condition condition, BasicBlock* if_true,
|
void MaglevAssembler::Branch(Condition condition, BasicBlock* if_true,
|
||||||
BasicBlock* if_false, BasicBlock* next_block) {
|
BasicBlock* if_false, BasicBlock* next_block) {
|
||||||
// We don't have any branch probability information, so try to jump
|
// We don't have any branch probability information, so try to jump
|
||||||
@ -222,8 +243,6 @@ inline void MaglevAssembler::JumpIf(Condition cond, Label* target) {
|
|||||||
j(cond, target);
|
j(cond, target);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void MaglevAssembler::Push(Register src) { MacroAssembler::Push(src); }
|
|
||||||
|
|
||||||
inline void MaglevAssembler::Pop(Register dst) { MacroAssembler::Pop(dst); }
|
inline void MaglevAssembler::Pop(Register dst) { MacroAssembler::Pop(dst); }
|
||||||
|
|
||||||
inline void MaglevAssembler::MaterialiseValueNode(Register dst,
|
inline void MaglevAssembler::MaterialiseValueNode(Register dst,
|
||||||
|
@ -198,36 +198,6 @@ void GeneratorRestoreRegister::GenerateCode(MaglevAssembler* masm,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int ForInNext::MaxCallStackArgs() const {
|
|
||||||
using D = CallInterfaceDescriptorFor<Builtin::kForInNext>::type;
|
|
||||||
return D::GetStackParameterCount();
|
|
||||||
}
|
|
||||||
void ForInNext::SetValueLocationConstraints() {
|
|
||||||
using D = CallInterfaceDescriptorFor<Builtin::kForInNext>::type;
|
|
||||||
UseFixed(context(), kContextRegister);
|
|
||||||
UseFixed(receiver(), D::GetRegisterParameter(D::kReceiver));
|
|
||||||
UseFixed(cache_array(), D::GetRegisterParameter(D::kCacheArray));
|
|
||||||
UseFixed(cache_type(), D::GetRegisterParameter(D::kCacheType));
|
|
||||||
UseFixed(cache_index(), D::GetRegisterParameter(D::kCacheIndex));
|
|
||||||
DefineAsFixed(this, kReturnRegister0);
|
|
||||||
}
|
|
||||||
void ForInNext::GenerateCode(MaglevAssembler* masm,
|
|
||||||
const ProcessingState& state) {
|
|
||||||
using D = CallInterfaceDescriptorFor<Builtin::kForInNext>::type;
|
|
||||||
DCHECK_EQ(ToRegister(context()), kContextRegister);
|
|
||||||
DCHECK_EQ(ToRegister(receiver()), D::GetRegisterParameter(D::kReceiver));
|
|
||||||
DCHECK_EQ(ToRegister(cache_array()), D::GetRegisterParameter(D::kCacheArray));
|
|
||||||
DCHECK_EQ(ToRegister(cache_type()), D::GetRegisterParameter(D::kCacheType));
|
|
||||||
DCHECK_EQ(ToRegister(cache_index()), D::GetRegisterParameter(D::kCacheIndex));
|
|
||||||
__ Move(D::GetRegisterParameter(D::kSlot), feedback().index());
|
|
||||||
// Feedback vector is pushed into the stack.
|
|
||||||
static_assert(D::GetStackParameterIndex(D::kFeedbackVector) == 0);
|
|
||||||
static_assert(D::GetStackParameterCount() == 1);
|
|
||||||
__ Push(feedback().vector);
|
|
||||||
__ CallBuiltin(Builtin::kForInNext);
|
|
||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
void GetSecondReturnedValue::SetValueLocationConstraints() {
|
void GetSecondReturnedValue::SetValueLocationConstraints() {
|
||||||
DefineAsFixed(this, kReturnRegister1);
|
DefineAsFixed(this, kReturnRegister1);
|
||||||
}
|
}
|
||||||
@ -250,42 +220,6 @@ void GetSecondReturnedValue::GenerateCode(MaglevAssembler* masm,
|
|||||||
#endif // DEBUG
|
#endif // DEBUG
|
||||||
}
|
}
|
||||||
|
|
||||||
int CreateArrayLiteral::MaxCallStackArgs() const {
|
|
||||||
DCHECK_EQ(Runtime::FunctionForId(Runtime::kCreateArrayLiteral)->nargs, 4);
|
|
||||||
return 4;
|
|
||||||
}
|
|
||||||
void CreateArrayLiteral::SetValueLocationConstraints() {
|
|
||||||
DefineAsFixed(this, kReturnRegister0);
|
|
||||||
}
|
|
||||||
void CreateArrayLiteral::GenerateCode(MaglevAssembler* masm,
|
|
||||||
const ProcessingState& state) {
|
|
||||||
__ Move(kContextRegister, masm->native_context().object());
|
|
||||||
__ Push(feedback().vector);
|
|
||||||
__ Push(TaggedIndex::FromIntptr(feedback().index()));
|
|
||||||
__ Push(constant_elements().object());
|
|
||||||
__ Push(Smi::FromInt(flags()));
|
|
||||||
__ CallRuntime(Runtime::kCreateArrayLiteral, 4);
|
|
||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
int CreateObjectLiteral::MaxCallStackArgs() const {
|
|
||||||
DCHECK_EQ(Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->nargs, 4);
|
|
||||||
return 4;
|
|
||||||
}
|
|
||||||
void CreateObjectLiteral::SetValueLocationConstraints() {
|
|
||||||
DefineAsFixed(this, kReturnRegister0);
|
|
||||||
}
|
|
||||||
void CreateObjectLiteral::GenerateCode(MaglevAssembler* masm,
|
|
||||||
const ProcessingState& state) {
|
|
||||||
__ Move(kContextRegister, masm->native_context().object());
|
|
||||||
__ Push(feedback().vector);
|
|
||||||
__ Push(TaggedIndex::FromIntptr(feedback().index()));
|
|
||||||
__ Push(boilerplate_descriptor().object());
|
|
||||||
__ Push(Smi::FromInt(flags()));
|
|
||||||
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
|
|
||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
int CreateEmptyObjectLiteral::MaxCallStackArgs() const {
|
int CreateEmptyObjectLiteral::MaxCallStackArgs() const {
|
||||||
return AllocateDescriptor::GetStackParameterCount();
|
return AllocateDescriptor::GetStackParameterCount();
|
||||||
}
|
}
|
||||||
@ -385,26 +319,6 @@ void FastCreateClosure::GenerateCode(MaglevAssembler* masm,
|
|||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
int CreateClosure::MaxCallStackArgs() const {
|
|
||||||
DCHECK_EQ(Runtime::FunctionForId(pretenured() ? Runtime::kNewClosure_Tenured
|
|
||||||
: Runtime::kNewClosure)
|
|
||||||
->nargs,
|
|
||||||
2);
|
|
||||||
return 2;
|
|
||||||
}
|
|
||||||
void CreateClosure::SetValueLocationConstraints() {
|
|
||||||
UseFixed(context(), kContextRegister);
|
|
||||||
DefineAsFixed(this, kReturnRegister0);
|
|
||||||
}
|
|
||||||
void CreateClosure::GenerateCode(MaglevAssembler* masm,
|
|
||||||
const ProcessingState& state) {
|
|
||||||
Runtime::FunctionId function_id =
|
|
||||||
pretenured() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
|
|
||||||
__ Push(shared_function_info().object());
|
|
||||||
__ Push(feedback_cell().object());
|
|
||||||
__ CallRuntime(function_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
int CreateRegExpLiteral::MaxCallStackArgs() const {
|
int CreateRegExpLiteral::MaxCallStackArgs() const {
|
||||||
using D = CallInterfaceDescriptorFor<Builtin::kCreateRegExpLiteral>::type;
|
using D = CallInterfaceDescriptorFor<Builtin::kCreateRegExpLiteral>::type;
|
||||||
return D::GetStackParameterCount();
|
return D::GetStackParameterCount();
|
||||||
@ -444,17 +358,6 @@ void GetTemplateObject::GenerateCode(MaglevAssembler* masm,
|
|||||||
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
int Abort::MaxCallStackArgs() const {
|
|
||||||
DCHECK_EQ(Runtime::FunctionForId(Runtime::kAbort)->nargs, 1);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
void Abort::SetValueLocationConstraints() {}
|
|
||||||
void Abort::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) {
|
|
||||||
__ Push(Smi::FromInt(static_cast<int>(reason())));
|
|
||||||
__ CallRuntime(Runtime::kAbort, 1);
|
|
||||||
__ Trap();
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
Condition ToCondition(AssertCondition cond) {
|
Condition ToCondition(AssertCondition cond) {
|
||||||
switch (cond) {
|
switch (cond) {
|
||||||
|
Loading…
Reference in New Issue
Block a user