[compiler] Introduce AbortCSAAssert runtime function

The existing AbortJS runtime function can be disabled via
--disable-abortjs (which the fuzzers use), but we never want to disable
CSA assertions. Hence use a separate runtime function for those.
This will also reduce the size of generated strings, since the
"CSA_ASSERT failed: " prefix is not part of those strings any more.

As a drive-by, this renames all occurences of "DebugAbort" to "AbortJS"
to be consistent in that name.

R=mstarzinger@chromium.org, tebbi@chromium.org

Bug: v8:9453
Change-Id: I52e48032a1d58f296f0364fe8d917e45a2603a2c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1692921
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: Yang Guo <yangguo@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62622}
This commit is contained in:
Clemens Hammacher 2019-07-11 08:08:32 +02:00 committed by Commit Bot
parent 754afd63fb
commit 63bcc12775
38 changed files with 285 additions and 63 deletions

View File

@ -269,6 +269,7 @@ namespace internal {
/* Abort */ \
TFC(Abort, Abort) \
TFC(AbortJS, Abort) \
TFC(AbortCSAAssert, Abort) \
\
/* Built-in functions for Javascript */ \
/* Special internal builtins */ \

View File

@ -865,6 +865,11 @@ TF_BUILTIN(AbortJS, CodeStubAssembler) {
TailCallRuntime(Runtime::kAbortJS, NoContextConstant(), message);
}
TF_BUILTIN(AbortCSAAssert, CodeStubAssembler) {
TNode<String> message = CAST(Parameter(Descriptor::kMessageOrMessageId));
TailCallRuntime(Runtime::kAbortCSAAssert, NoContextConstant(), message);
}
void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
MacroAssembler* masm) {
Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, false);

View File

@ -171,11 +171,10 @@ void CodeStubAssembler::FailAssert(
DCHECK_NOT_NULL(message);
EmbeddedVector<char, 1024> chars;
if (file != nullptr) {
SNPrintF(chars, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
} else {
SNPrintF(chars, "CSA_ASSERT failed: %s\n", message);
SNPrintF(chars, "%s [%s:%d]", message, file, line);
message = chars.begin();
}
Node* message_node = StringConstant(chars.begin());
Node* message_node = StringConstant(message);
#ifdef DEBUG
// Only print the extra nodes in debug builds.
@ -186,7 +185,7 @@ void CodeStubAssembler::FailAssert(
MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
#endif
DebugAbort(message_node);
AbortCSAAssert(message_node);
Unreachable();
}

View File

@ -881,7 +881,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
AssembleArchTableSwitch(instr);
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchDebugAbort:
case kArchAbortJS:
DCHECK(i.InputRegister(0) == r1);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
@ -893,7 +893,24 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
RelocInfo::CODE_TARGET);
}
__ stop("kArchDebugAbort");
__ stop("kArchAbortJS");
unwinding_info_writer_.MarkBlockWillExit();
break;
case kArchAbortCSAAssert:
DCHECK(i.InputRegister(0) == r1);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
} else {
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop("kArchAbortCSAAssert");
unwinding_info_writer_.MarkBlockWillExit();
break;
case kArchDebugBreak:

View File

@ -441,9 +441,14 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitAbortJS(Node* node) {
ArmOperandGenerator g(this);
Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), r1));
Emit(kArchAbortJS, g.NoOutput(), g.UseFixed(node->InputAt(0), r1));
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
ArmOperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), r1));
}
void InstructionSelector::VisitLoad(Node* node) {

View File

@ -794,7 +794,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchLookupSwitch:
AssembleArchLookupSwitch(instr);
break;
case kArchDebugAbort:
case kArchAbortJS:
DCHECK(i.InputRegister(0).is(x1));
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
@ -806,7 +806,24 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
RelocInfo::CODE_TARGET);
}
__ Debug("kArchDebugAbort", 0, BREAK);
__ Debug("kArchAbortJS", 0, BREAK);
unwinding_info_writer_.MarkBlockWillExit();
break;
case kArchAbortCSAAssert:
DCHECK(i.InputRegister(0).is(x1));
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
} else {
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ Debug("kArchAbortCSAAssert", 0, BREAK);
unwinding_info_writer_.MarkBlockWillExit();
break;
case kArchDebugBreak:

View File

@ -535,9 +535,14 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitAbortJS(Node* node) {
Arm64OperandGenerator g(this);
Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), x1));
Emit(kArchAbortJS, g.NoOutput(), g.UseFixed(node->InputAt(0), x1));
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
Arm64OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), x1));
}
void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode,

View File

@ -885,7 +885,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchComment:
__ RecordComment(reinterpret_cast<const char*>(i.InputInt32(0)));
break;
case kArchDebugAbort:
case kArchAbortJS:
DCHECK(i.InputRegister(0) == edx);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
@ -899,6 +899,22 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
__ int3();
break;
case kArchAbortCSAAssert:
DCHECK(i.InputRegister(0) == edx);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
} else {
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ int3();
break;
case kArchDebugBreak:
__ int3();
break;

View File

@ -272,9 +272,14 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitAbortJS(Node* node) {
IA32OperandGenerator g(this);
Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), edx));
Emit(kArchAbortJS, g.NoOutput(), g.UseFixed(node->InputAt(0), edx));
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
IA32OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), edx));
}
void InstructionSelector::VisitLoad(Node* node) {

View File

@ -82,7 +82,8 @@ inline RecordWriteMode WriteBarrierKindToRecordWriteMode(
V(ArchLookupSwitch) \
V(ArchTableSwitch) \
V(ArchNop) \
V(ArchDebugAbort) \
V(ArchAbortJS) \
V(ArchAbortCSAAssert) \
V(ArchDebugBreak) \
V(ArchComment) \
V(ArchThrowTerminator) \

View File

@ -298,7 +298,8 @@ int InstructionScheduler::GetInstructionFlags(const Instruction* instr) const {
case kArchTailCallCodeObject:
case kArchTailCallAddress:
case kArchTailCallWasm:
case kArchDebugAbort:
case kArchAbortJS:
case kArchAbortCSAAssert:
case kArchDebugBreak:
return kHasSideEffect;

View File

@ -1326,8 +1326,11 @@ void InstructionSelector::VisitNode(Node* node) {
case IrOpcode::kStateValues:
case IrOpcode::kObjectState:
return;
case IrOpcode::kDebugAbort:
VisitDebugAbort(node);
case IrOpcode::kAbortJS:
VisitAbortJS(node);
return;
case IrOpcode::kAbortCSAAssert:
VisitAbortCSAAssert(node);
return;
case IrOpcode::kDebugBreak:
VisitDebugBreak(node);

View File

@ -827,7 +827,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchTableSwitch:
AssembleArchTableSwitch(instr);
break;
case kArchDebugAbort:
case kArchAbortJS:
DCHECK(i.InputRegister(0) == a0);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
@ -839,7 +839,23 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
RelocInfo::CODE_TARGET);
}
__ stop("kArchDebugAbort");
__ stop("kArchAbortJS");
break;
case kArchAbortCSAAssert:
DCHECK(i.InputRegister(0) == a0);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
} else {
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop("kArchAbortCSAAssert");
break;
case kArchDebugBreak:
__ stop("kArchDebugBreak");

View File

@ -1352,7 +1352,8 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) {
return AssembleArchLookupSwitchLatency((instr->InputCount() - 2) / 2);
case kArchTableSwitch:
return AssembleArchTableSwitchLatency();
case kArchDebugAbort:
case kArchAbortJS:
case kArchAbortCSAAssert:
return CallLatency() + 1;
case kArchComment:
case kArchDeoptimize:

View File

@ -274,9 +274,14 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(alignment)), 0, nullptr);
}
void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitAbortJS(Node* node) {
MipsOperandGenerator g(this);
Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
Emit(kArchAbortJS, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
MipsOperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
}
void InstructionSelector::VisitLoad(Node* node) {

View File

@ -805,7 +805,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchTableSwitch:
AssembleArchTableSwitch(instr);
break;
case kArchDebugAbort:
case kArchAbortJS:
DCHECK(i.InputRegister(0) == a0);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
@ -817,7 +817,23 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
RelocInfo::CODE_TARGET);
}
__ stop("kArchDebugAbort");
__ stop("kArchAbortJS");
break;
case kArchAbortCSAAssert:
DCHECK(i.InputRegister(0) == a0);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
} else {
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop("kArchAbortCSAAssert");
break;
case kArchDebugBreak:
__ stop("kArchDebugBreak");

View File

@ -1263,7 +1263,8 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) {
return AssembleArchLookupSwitchLatency(instr);
case kArchTableSwitch:
return AssembleArchTableSwitchLatency();
case kArchDebugAbort:
case kArchAbortJS:
case kArchAbortCSAAssert:
return CallLatency() + 1;
case kArchDebugBreak:
return 1;

View File

@ -334,9 +334,14 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(alignment)), 0, nullptr);
}
void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitAbortJS(Node* node) {
Mips64OperandGenerator g(this);
Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
Emit(kArchAbortJS, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
Mips64OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), a0));
}
void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode,

View File

@ -1083,7 +1083,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
AssembleArchTableSwitch(instr);
DCHECK_EQ(LeaveRC, i.OutputRCBit());
break;
case kArchDebugAbort:
case kArchAbortJS:
DCHECK(i.InputRegister(0) == r4);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
@ -1095,7 +1095,23 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
RelocInfo::CODE_TARGET);
}
__ stop("kArchDebugAbort");
__ stop("kArchAbortJS");
break;
case kArchAbortCSAAssert:
DCHECK(i.InputRegister(0) == r4);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
} else {
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop("kArchAbortCSAAssert");
break;
case kArchDebugBreak:
__ stop("kArchDebugBreak");

View File

@ -173,9 +173,14 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitAbortJS(Node* node) {
PPCOperandGenerator g(this);
Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), r4));
Emit(kArchAbortJS, g.NoOutput(), g.UseFixed(node->InputAt(0), r4));
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
PPCOperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), r4));
}
void InstructionSelector::VisitLoad(Node* node) {

View File

@ -1558,7 +1558,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchTableSwitch:
AssembleArchTableSwitch(instr);
break;
case kArchDebugAbort:
case kArchAbortJS:
DCHECK(i.InputRegister(0) == r3);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
@ -1570,7 +1570,23 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
RelocInfo::CODE_TARGET);
}
__ stop("kArchDebugAbort");
__ stop("kArchAbortJS");
break;
case kArchAbortCSAAssert:
DCHECK(i.InputRegister(0) == r3);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
} else {
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ stop("kArchAbortCSAAssert");
break;
case kArchDebugBreak:
__ stop("kArchDebugBreak");

View File

@ -690,9 +690,14 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitAbortJS(Node* node) {
S390OperandGenerator g(this);
Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), r3));
Emit(kArchAbortJS, g.NoOutput(), g.UseFixed(node->InputAt(0), r3));
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
S390OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), r3));
}
void InstructionSelector::VisitLoad(Node* node) {

View File

@ -966,7 +966,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchComment:
__ RecordComment(reinterpret_cast<const char*>(i.InputInt64(0)));
break;
case kArchDebugAbort:
case kArchAbortJS:
DCHECK(i.InputRegister(0) == rdx);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
@ -981,6 +981,23 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ int3();
unwinding_info_writer_.MarkBlockWillExit();
break;
case kArchAbortCSAAssert:
DCHECK(i.InputRegister(0) == rdx);
if (!frame_access_state()->has_frame()) {
// We don't actually want to generate a pile of code for this, so just
// claim there is a stack frame, without generating one.
FrameScope scope(tasm(), StackFrame::NONE);
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
} else {
__ Call(
isolate()->builtins()->builtin_handle(Builtins::kAbortCSAAssert),
RelocInfo::CODE_TARGET);
}
__ int3();
unwinding_info_writer_.MarkBlockWillExit();
break;
case kArchDebugBreak:
__ int3();
break;

View File

@ -309,9 +309,14 @@ void InstructionSelector::VisitStackSlot(Node* node) {
sequence()->AddImmediate(Constant(slot)), 0, nullptr);
}
void InstructionSelector::VisitDebugAbort(Node* node) {
void InstructionSelector::VisitAbortJS(Node* node) {
X64OperandGenerator g(this);
Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), rdx));
Emit(kArchAbortJS, g.NoOutput(), g.UseFixed(node->InputAt(0), rdx));
}
void InstructionSelector::VisitAbortCSAAssert(Node* node) {
X64OperandGenerator g(this);
Emit(kArchAbortCSAAssert, g.NoOutput(), g.UseFixed(node->InputAt(0), rdx));
}
void InstructionSelector::VisitLoad(Node* node) {

View File

@ -227,7 +227,10 @@ void CodeAssembler::GenerateCheckMaybeObjectIsObject(Node* node,
IntPtrConstant(kWeakHeapObjectTag)),
&ok);
Node* message_node = StringConstant(location);
DebugAbort(message_node);
// TODO(clemensh): Avoid {AbortJS} here, as it will be disabled by fuzzers
// (via --disable-abortjs). Remove the {AbortJS} opcode and builtin
// afterwards.
AbortJS(message_node);
Unreachable();
Bind(&ok);
}
@ -409,8 +412,12 @@ void CodeAssembler::ReturnRaw(Node* value) {
return raw_assembler()->Return(value);
}
void CodeAssembler::DebugAbort(Node* message) {
raw_assembler()->DebugAbort(message);
void CodeAssembler::AbortJS(Node* message) {
raw_assembler()->AbortJS(message);
}
void CodeAssembler::AbortCSAAssert(Node* message) {
raw_assembler()->AbortCSAAssert(message);
}
void CodeAssembler::DebugBreak() { raw_assembler()->DebugBreak(); }

View File

@ -893,7 +893,8 @@ class V8_EXPORT_PRIVATE CodeAssembler {
void ReturnRaw(Node* value);
void DebugAbort(Node* message);
void AbortJS(Node* message);
void AbortCSAAssert(Node* message);
void DebugBreak();
void Unreachable();
void Comment(const char* msg) {

View File

@ -46,8 +46,9 @@ Reduction CsaLoadElimination::Reduce(Node* node) {
case IrOpcode::kStoreToObject:
return ReduceStoreToObject(node, ObjectAccessOf(node->op()));
case IrOpcode::kDebugBreak:
case IrOpcode::kDebugAbort:
// Avoid changing optimizations in the presence of debug instructions
case IrOpcode::kAbortJS:
case IrOpcode::kAbortCSAAssert:
// Avoid changing optimizations in the presence of debug instructions.
return PropagateInputState(node);
case IrOpcode::kCall:
return ReduceCall(node);

View File

@ -558,7 +558,8 @@ class MachineRepresentationChecker {
case IrOpcode::kParameter:
case IrOpcode::kProjection:
break;
case IrOpcode::kDebugAbort:
case IrOpcode::kAbortJS:
case IrOpcode::kAbortCSAAssert:
CheckValueInputIsTagged(node, 0);
break;
case IrOpcode::kLoad:

View File

@ -821,12 +821,19 @@ struct MachineOperatorGlobalCache {
};
Word64PoisonOnSpeculation kWord64PoisonOnSpeculation;
struct DebugAbortOperator : public Operator {
DebugAbortOperator()
: Operator(IrOpcode::kDebugAbort, Operator::kNoThrow, "DebugAbort", 1,
1, 1, 0, 1, 0) {}
struct AbortJSOperator : public Operator {
AbortJSOperator()
: Operator(IrOpcode::kAbortJS, Operator::kNoThrow, "AbortJS", 1, 1, 1,
0, 1, 0) {}
};
DebugAbortOperator kDebugAbort;
AbortJSOperator kAbortJS;
struct AbortCSAAssertOperator : public Operator {
AbortCSAAssertOperator()
: Operator(IrOpcode::kAbortCSAAssert, Operator::kNoThrow,
"AbortCSAAssert", 1, 1, 1, 0, 1, 0) {}
};
AbortCSAAssertOperator kAbortCSAAssert;
struct DebugBreakOperator : public Operator {
DebugBreakOperator()
@ -1019,8 +1026,10 @@ const Operator* MachineOperatorBuilder::BitcastMaybeObjectToWord() {
return &cache_.kBitcastMaybeObjectToWord;
}
const Operator* MachineOperatorBuilder::DebugAbort() {
return &cache_.kDebugAbort;
const Operator* MachineOperatorBuilder::AbortJS() { return &cache_.kAbortJS; }
const Operator* MachineOperatorBuilder::AbortCSAAssert() {
return &cache_.kAbortCSAAssert;
}
const Operator* MachineOperatorBuilder::DebugBreak() {

View File

@ -219,7 +219,8 @@ class V8_EXPORT_PRIVATE MachineOperatorBuilder final
AlignmentRequirements::FullUnalignedAccessSupport());
const Operator* Comment(const char* msg);
const Operator* DebugAbort();
const Operator* AbortJS();
const Operator* AbortCSAAssert();
const Operator* DebugBreak();
const Operator* UnsafePointerAdd();

View File

@ -99,7 +99,8 @@ bool CanAllocate(const Node* node) {
case IrOpcode::kBitcastTaggedToWord:
case IrOpcode::kBitcastWordToTagged:
case IrOpcode::kComment:
case IrOpcode::kDebugAbort:
case IrOpcode::kAbortJS:
case IrOpcode::kAbortCSAAssert:
case IrOpcode::kDebugBreak:
case IrOpcode::kDeoptimizeIf:
case IrOpcode::kDeoptimizeUnless:

View File

@ -629,7 +629,8 @@
MACHINE_FLOAT64_BINOP_LIST(V) \
MACHINE_FLOAT64_UNOP_LIST(V) \
MACHINE_WORD64_ATOMIC_OP_LIST(V) \
V(DebugAbort) \
V(AbortJS) \
V(AbortCSAAssert) \
V(DebugBreak) \
V(Comment) \
V(Load) \

View File

@ -556,8 +556,12 @@ void RawMachineAssembler::PopAndReturn(Node* pop, Node* v1, Node* v2, Node* v3,
current_block_ = nullptr;
}
void RawMachineAssembler::DebugAbort(Node* message) {
AddNode(machine()->DebugAbort(), message);
void RawMachineAssembler::AbortJS(Node* message) {
AddNode(machine()->AbortJS(), message);
}
void RawMachineAssembler::AbortCSAAssert(Node* message) {
AddNode(machine()->AbortCSAAssert(), message);
}
void RawMachineAssembler::DebugBreak() { AddNode(machine()->DebugBreak()); }

View File

@ -1019,7 +1019,8 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
void PopAndReturn(Node* pop, Node* v1, Node* v2, Node* v3, Node* v4);
void Bind(RawMachineLabel* label);
void Deoptimize(Node* state);
void DebugAbort(Node* message);
void AbortJS(Node* message);
void AbortCSAAssert(Node* message);
void DebugBreak();
void Unreachable();
void Comment(const std::string& msg);

View File

@ -934,7 +934,8 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) {
break;
case IrOpcode::kComment:
case IrOpcode::kDebugAbort:
case IrOpcode::kAbortJS:
case IrOpcode::kAbortCSAAssert:
case IrOpcode::kDebugBreak:
case IrOpcode::kRetain:
case IrOpcode::kUnsafePointerAdd:

View File

@ -908,6 +908,7 @@ static bool TransitivelyCalledBuiltinHasNoSideEffect(Builtins::Name caller,
// Transitively called Builtins:
case Builtins::kAbort:
case Builtins::kAbortJS:
case Builtins::kAbortCSAAssert:
case Builtins::kAdaptorWithBuiltinExitFrame:
case Builtins::kArrayConstructorImpl:
case Builtins::kArrayEveryLoopContinuation:

View File

@ -839,7 +839,6 @@ RUNTIME_FUNCTION(Runtime_Abort) {
UNREACHABLE();
}
RUNTIME_FUNCTION(Runtime_AbortJS) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
@ -854,6 +853,16 @@ RUNTIME_FUNCTION(Runtime_AbortJS) {
UNREACHABLE();
}
RUNTIME_FUNCTION(Runtime_AbortCSAAssert) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
CONVERT_ARG_HANDLE_CHECKED(String, message, 0);
base::OS::PrintError("abort: CSA_ASSERT failed: %s\n",
message->ToCString().get());
isolate->PrintStack(stderr);
base::OS::Abort();
UNREACHABLE();
}
RUNTIME_FUNCTION(Runtime_DisassembleFunction) {
HandleScope scope(isolate);

View File

@ -434,6 +434,7 @@ namespace internal {
#define FOR_EACH_INTRINSIC_TEST(F, I) \
F(Abort, 1, 1) \
F(AbortJS, 1, 1) \
F(AbortCSAAssert, 1, 1) \
F(ArraySpeciesProtector, 0, 1) \
F(ClearFunctionFeedback, 1, 1) \
F(ClearMegamorphicStubCache, 0, 1) \