[compiler] Add kRetpoline CallDescriptor flag and codegen
This CL adds support for the "retpoline" construction on x64 https://support.google.com/faqs/answer/7625886 which protects against speculative execution of indirect calls. R=mstarzinger@chromium.org,jarin@chromium.org CC=eholk@chromium.org Bug: chromium:798964 Change-Id: I2aa5ab9a62dac53c67061378a0bc9cd2026ca7a2 Reviewed-on: https://chromium-review.googlesource.com/867063 Commit-Queue: Ben Titzer <titzer@chromium.org> Reviewed-by: Jaroslav Sevcik <jarin@chromium.org> Cr-Commit-Position: refs/heads/master@{#50608}
This commit is contained in:
parent
0c28bfb054
commit
19ce4fc96d
@ -182,7 +182,9 @@ class V8_EXPORT_PRIVATE CallDescriptor final
|
||||
// Does not ever try to allocate space on our heap.
|
||||
kNoAllocate = 1u << 4,
|
||||
// Push argument count as part of function prologue.
|
||||
kPushArgumentCount = 1u << 5
|
||||
kPushArgumentCount = 1u << 5,
|
||||
// Use retpoline for this call if indirect.
|
||||
kRetpoline = 1u << 6
|
||||
};
|
||||
typedef base::Flags<Flag> Flags;
|
||||
|
||||
|
@ -593,6 +593,11 @@ void CodeGenerator::BailoutIfDeoptimized() {
|
||||
__ j(not_zero, code, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
inline bool HasCallDescriptorFlag(Instruction* instr,
|
||||
CallDescriptor::Flag flag) {
|
||||
return MiscField::decode(instr->opcode()) & flag;
|
||||
}
|
||||
|
||||
// Assembles an instruction after register allocation, producing machine code.
|
||||
CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
Instruction* instr) {
|
||||
@ -607,7 +612,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
} else {
|
||||
Register reg = i.InputRegister(0);
|
||||
__ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
|
||||
__ call(reg);
|
||||
if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
|
||||
__ RetpolineCall(reg);
|
||||
} else {
|
||||
__ call(reg);
|
||||
}
|
||||
}
|
||||
RecordCallPosition(instr);
|
||||
frame_access_state()->ClearSPDelta();
|
||||
@ -620,11 +629,19 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
if (info()->IsWasm()) {
|
||||
__ near_call(wasm_code, RelocInfo::WASM_CALL);
|
||||
} else {
|
||||
__ Call(wasm_code, RelocInfo::JS_TO_WASM_CALL);
|
||||
if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
|
||||
__ RetpolineCall(wasm_code, RelocInfo::JS_TO_WASM_CALL);
|
||||
} else {
|
||||
__ Call(wasm_code, RelocInfo::JS_TO_WASM_CALL);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Register reg = i.InputRegister(0);
|
||||
__ call(reg);
|
||||
if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
|
||||
__ RetpolineCall(reg);
|
||||
} else {
|
||||
__ call(reg);
|
||||
}
|
||||
}
|
||||
RecordCallPosition(instr);
|
||||
frame_access_state()->ClearSPDelta();
|
||||
@ -643,7 +660,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
} else {
|
||||
Register reg = i.InputRegister(0);
|
||||
__ addp(reg, Immediate(Code::kHeaderSize - kHeapObjectTag));
|
||||
__ jmp(reg);
|
||||
if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
|
||||
__ RetpolineJump(reg);
|
||||
} else {
|
||||
__ jmp(reg);
|
||||
}
|
||||
}
|
||||
unwinding_info_writer_.MarkBlockWillExit();
|
||||
frame_access_state()->ClearSPDelta();
|
||||
@ -662,7 +683,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
}
|
||||
} else {
|
||||
Register reg = i.InputRegister(0);
|
||||
__ jmp(reg);
|
||||
if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
|
||||
__ RetpolineJump(reg);
|
||||
} else {
|
||||
__ jmp(reg);
|
||||
}
|
||||
}
|
||||
unwinding_info_writer_.MarkBlockWillExit();
|
||||
frame_access_state()->ClearSPDelta();
|
||||
@ -672,7 +697,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
|
||||
case kArchTailCallAddress: {
|
||||
CHECK(!HasImmediateInput(instr, 0));
|
||||
Register reg = i.InputRegister(0);
|
||||
__ jmp(reg);
|
||||
if (HasCallDescriptorFlag(instr, CallDescriptor::kRetpoline)) {
|
||||
__ RetpolineJump(reg);
|
||||
} else {
|
||||
__ jmp(reg);
|
||||
}
|
||||
unwinding_info_writer_.MarkBlockWillExit();
|
||||
frame_access_state()->ClearSPDelta();
|
||||
frame_access_state()->SetFrameAccessToDefault();
|
||||
|
@ -4571,6 +4571,11 @@ void Assembler::rorxl(Register dst, const Operand& src, byte imm8) {
|
||||
emit(imm8);
|
||||
}
|
||||
|
||||
void Assembler::pause() {
|
||||
emit(0xF3);
|
||||
emit(0x90);
|
||||
}
|
||||
|
||||
void Assembler::minps(XMMRegister dst, XMMRegister src) {
|
||||
EnsureSpace ensure_space(this);
|
||||
emit_optional_rex_32(dst, src);
|
||||
|
@ -1910,6 +1910,7 @@ class Assembler : public AssemblerBase {
|
||||
void rorxl(Register dst, const Operand& src, byte imm8);
|
||||
|
||||
void lfence();
|
||||
void pause();
|
||||
|
||||
// Check the code size generated from label to here.
|
||||
int SizeOfCodeGeneratedSince(Label* label) {
|
||||
|
@ -1672,6 +1672,51 @@ void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
|
||||
DCHECK_EQ(end_position, pc_offset());
|
||||
}
|
||||
|
||||
void TurboAssembler::RetpolineCall(Register reg) {
|
||||
Label setup_return, setup_target, inner_indirect_branch, capture_spec;
|
||||
|
||||
jmp(&setup_return); // Jump past the entire retpoline below.
|
||||
|
||||
bind(&inner_indirect_branch);
|
||||
call(&setup_target);
|
||||
|
||||
bind(&capture_spec);
|
||||
pause();
|
||||
jmp(&capture_spec);
|
||||
|
||||
bind(&setup_target);
|
||||
movq(Operand(rsp, 0), reg);
|
||||
ret(0);
|
||||
|
||||
bind(&setup_return);
|
||||
call(&inner_indirect_branch); // Callee will return after this instruction.
|
||||
}
|
||||
|
||||
void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
|
||||
#ifdef DEBUG
|
||||
// TODO(titzer): CallSize() is wrong for RetpolineCalls
|
||||
// int end_position = pc_offset() + CallSize(destination);
|
||||
#endif
|
||||
Move(kScratchRegister, destination, rmode);
|
||||
RetpolineCall(kScratchRegister);
|
||||
// TODO(titzer): CallSize() is wrong for RetpolineCalls
|
||||
// DCHECK_EQ(pc_offset(), end_position);
|
||||
}
|
||||
|
||||
void TurboAssembler::RetpolineJump(Register reg) {
|
||||
Label setup_target, capture_spec;
|
||||
|
||||
call(&setup_target);
|
||||
|
||||
bind(&capture_spec);
|
||||
pause();
|
||||
jmp(&capture_spec);
|
||||
|
||||
bind(&setup_target);
|
||||
movq(Operand(rsp, 0), reg);
|
||||
ret(0);
|
||||
}
|
||||
|
||||
void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
|
||||
if (imm8 == 0) {
|
||||
Movd(dst, src);
|
||||
|
@ -345,6 +345,11 @@ class TurboAssembler : public Assembler {
|
||||
void Call(ExternalReference ext);
|
||||
void Call(Label* target) { call(target); }
|
||||
|
||||
void RetpolineCall(Register reg);
|
||||
void RetpolineCall(Address destination, RelocInfo::Mode rmode);
|
||||
|
||||
void RetpolineJump(Register reg);
|
||||
|
||||
void CallForDeoptimization(Address target, RelocInfo::Mode rmode) {
|
||||
call(target, rmode);
|
||||
}
|
||||
|
@ -82,6 +82,7 @@ v8_source_set("cctest_sources") {
|
||||
"compiler/test-run-load-store.cc",
|
||||
"compiler/test-run-machops.cc",
|
||||
"compiler/test-run-native-calls.cc",
|
||||
"compiler/test-run-retpoline.cc",
|
||||
"compiler/test-run-stackcheck.cc",
|
||||
"compiler/test-run-stubs.cc",
|
||||
"compiler/test-run-tail-calls.cc",
|
||||
|
@ -71,6 +71,7 @@
|
||||
'compiler/test-run-load-store.cc',
|
||||
'compiler/test-run-machops.cc',
|
||||
'compiler/test-run-native-calls.cc',
|
||||
'compiler/test-run-retpoline.cc',
|
||||
'compiler/test-run-stackcheck.cc',
|
||||
'compiler/test-run-stubs.cc',
|
||||
'compiler/test-run-tail-calls.cc',
|
||||
|
206
test/cctest/compiler/test-run-retpoline.cc
Normal file
206
test/cctest/compiler/test-run-retpoline.cc
Normal file
@ -0,0 +1,206 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/assembler-inl.h"
|
||||
#include "src/code-stub-assembler.h"
|
||||
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/compiler/code-assembler-tester.h"
|
||||
#include "test/cctest/compiler/function-tester.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
namespace compiler {
|
||||
|
||||
#define __ assembler.
|
||||
|
||||
namespace {
|
||||
|
||||
// Function that takes a number of pointer-sized integer arguments, calculates a
|
||||
// weighted sum of them and returns it.
|
||||
Handle<Code> BuildCallee(Isolate* isolate, CallDescriptor* descriptor) {
|
||||
CodeAssemblerTester tester(isolate, descriptor, "callee");
|
||||
CodeStubAssembler assembler(tester.state());
|
||||
int param_count = static_cast<int>(descriptor->StackParameterCount());
|
||||
Node* sum = __ IntPtrConstant(0);
|
||||
for (int i = 0; i < param_count; ++i) {
|
||||
Node* product = __ IntPtrMul(__ Parameter(i), __ IntPtrConstant(i + 1));
|
||||
sum = __ IntPtrAdd(sum, product);
|
||||
}
|
||||
__ Return(sum);
|
||||
return tester.GenerateCodeCloseAndEscape();
|
||||
}
|
||||
|
||||
// Function that tail-calls another function with a number of pointer-sized
|
||||
// integer arguments.
|
||||
Handle<Code> BuildCaller(Isolate* isolate, CallDescriptor* descriptor,
|
||||
CallDescriptor* callee_descriptor, bool tail) {
|
||||
CodeAssemblerTester tester(isolate, descriptor, "caller");
|
||||
CodeStubAssembler assembler(tester.state());
|
||||
std::vector<Node*> params;
|
||||
// The first parameter is always the callee.
|
||||
Handle<Code> callee = BuildCallee(isolate, callee_descriptor);
|
||||
// defeat the instruction selector.
|
||||
CodeStubAssembler::Variable target_var(&assembler,
|
||||
MachineRepresentation::kTagged);
|
||||
CodeStubAssembler::Label t(&assembler), f(&assembler),
|
||||
end(&assembler, &target_var);
|
||||
__ Branch(__ Int32Constant(0), &t, &f);
|
||||
__ BIND(&t);
|
||||
target_var.Bind(__ HeapConstant(callee));
|
||||
__ Goto(&end);
|
||||
__ BIND(&f);
|
||||
target_var.Bind(__ HeapConstant(callee));
|
||||
__ Goto(&end);
|
||||
__ BIND(&end);
|
||||
params.push_back(target_var.value());
|
||||
|
||||
int param_count = static_cast<int>(callee_descriptor->StackParameterCount());
|
||||
for (int i = 0; i < param_count; ++i) {
|
||||
params.push_back(__ IntPtrConstant(i));
|
||||
}
|
||||
DCHECK_EQ(param_count + 1, params.size());
|
||||
if (tail) {
|
||||
tester.raw_assembler_for_testing()->TailCallN(
|
||||
callee_descriptor, param_count + 1, params.data());
|
||||
} else {
|
||||
Node* result = tester.raw_assembler_for_testing()->CallN(
|
||||
callee_descriptor, param_count + 1, params.data());
|
||||
__ Return(result);
|
||||
}
|
||||
return tester.GenerateCodeCloseAndEscape();
|
||||
}
|
||||
|
||||
// Setup function, which calls "caller".
|
||||
Handle<Code> BuildSetupFunction(Isolate* isolate,
|
||||
CallDescriptor* caller_descriptor,
|
||||
CallDescriptor* callee_descriptor, bool tail) {
|
||||
CodeAssemblerTester tester(isolate, 0);
|
||||
CodeStubAssembler assembler(tester.state());
|
||||
std::vector<Node*> params;
|
||||
// The first parameter is always the callee.
|
||||
params.push_back(__ HeapConstant(
|
||||
BuildCaller(isolate, caller_descriptor, callee_descriptor, tail)));
|
||||
// Set up arguments for "Caller".
|
||||
int param_count = static_cast<int>(caller_descriptor->StackParameterCount());
|
||||
for (int i = 0; i < param_count; ++i) {
|
||||
// Use values that are different from the ones we will pass to this
|
||||
// function's callee later.
|
||||
params.push_back(__ IntPtrConstant(i + 42));
|
||||
}
|
||||
DCHECK_EQ(param_count + 1, params.size());
|
||||
Node* raw_result = tester.raw_assembler_for_testing()->CallN(
|
||||
caller_descriptor, param_count + 1, params.data());
|
||||
__ Return(__ SmiTag(raw_result));
|
||||
return tester.GenerateCodeCloseAndEscape();
|
||||
}
|
||||
|
||||
CallDescriptor* CreateDescriptorForStackArguments(Zone* zone,
|
||||
int stack_param_count) {
|
||||
LocationSignature::Builder locations(zone, 1,
|
||||
static_cast<size_t>(stack_param_count));
|
||||
|
||||
locations.AddReturn(LinkageLocation::ForRegister(kReturnRegister0.code(),
|
||||
MachineType::IntPtr()));
|
||||
|
||||
for (int i = 0; i < stack_param_count; ++i) {
|
||||
locations.AddParam(LinkageLocation::ForCallerFrameSlot(
|
||||
i - stack_param_count, MachineType::IntPtr()));
|
||||
}
|
||||
|
||||
return new (zone)
|
||||
CallDescriptor(CallDescriptor::kCallCodeObject, // kind
|
||||
MachineType::AnyTagged(), // target MachineType
|
||||
LinkageLocation::ForAnyRegister(
|
||||
MachineType::AnyTagged()), // target location
|
||||
locations.Build(), // location_sig
|
||||
stack_param_count, // stack_parameter_count
|
||||
Operator::kNoProperties, // properties
|
||||
kNoCalleeSaved, // callee-saved registers
|
||||
kNoCalleeSaved, // callee-saved fp
|
||||
CallDescriptor::kRetpoline); // flags
|
||||
}
|
||||
|
||||
// Test a tail call from a caller with n parameters to a callee with m
|
||||
// parameters. All parameters are pointer-sized.
|
||||
void TestHelper(int n, int m, bool tail) {
|
||||
HandleAndZoneScope scope;
|
||||
Isolate* isolate = scope.main_isolate();
|
||||
Zone* zone = scope.main_zone();
|
||||
CallDescriptor* caller_descriptor =
|
||||
CreateDescriptorForStackArguments(zone, n);
|
||||
CallDescriptor* callee_descriptor =
|
||||
CreateDescriptorForStackArguments(zone, m);
|
||||
Handle<Code> setup =
|
||||
BuildSetupFunction(isolate, caller_descriptor, callee_descriptor, tail);
|
||||
FunctionTester ft(setup, 0);
|
||||
Handle<Object> result = ft.Call().ToHandleChecked();
|
||||
int expected = 0;
|
||||
for (int i = 0; i < m; ++i) expected += (i + 1) * i;
|
||||
CHECK_EQ(expected, Handle<Smi>::cast(result)->value());
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
#undef __
|
||||
|
||||
TEST(RetpolineOddEven) {
|
||||
TestHelper(1, 0, false);
|
||||
TestHelper(1, 2, false);
|
||||
TestHelper(3, 2, false);
|
||||
TestHelper(3, 4, false);
|
||||
}
|
||||
|
||||
TEST(RetpolineOddEvenTail) {
|
||||
TestHelper(1, 0, true);
|
||||
TestHelper(1, 2, true);
|
||||
TestHelper(3, 2, true);
|
||||
TestHelper(3, 4, true);
|
||||
}
|
||||
|
||||
TEST(RetpolineOddOdd) {
|
||||
TestHelper(1, 1, false);
|
||||
TestHelper(1, 3, false);
|
||||
TestHelper(3, 1, false);
|
||||
TestHelper(3, 3, false);
|
||||
}
|
||||
|
||||
TEST(RetpolineOddOddTail) {
|
||||
TestHelper(1, 1, true);
|
||||
TestHelper(1, 3, true);
|
||||
TestHelper(3, 1, true);
|
||||
TestHelper(3, 3, true);
|
||||
}
|
||||
|
||||
TEST(RetpolineEvenEven) {
|
||||
TestHelper(0, 0, false);
|
||||
TestHelper(0, 2, false);
|
||||
TestHelper(2, 0, false);
|
||||
TestHelper(2, 2, false);
|
||||
}
|
||||
|
||||
TEST(RetpolineEvenEvenTail) {
|
||||
TestHelper(0, 0, true);
|
||||
TestHelper(0, 2, true);
|
||||
TestHelper(2, 0, true);
|
||||
TestHelper(2, 2, true);
|
||||
}
|
||||
|
||||
TEST(RetpolineEvenOdd) {
|
||||
TestHelper(0, 1, false);
|
||||
TestHelper(0, 3, false);
|
||||
TestHelper(2, 1, false);
|
||||
TestHelper(2, 3, false);
|
||||
}
|
||||
|
||||
TEST(RetpolineEvenOddTail) {
|
||||
TestHelper(0, 1, true);
|
||||
TestHelper(0, 3, true);
|
||||
TestHelper(2, 1, true);
|
||||
TestHelper(2, 3, true);
|
||||
}
|
||||
|
||||
} // namespace compiler
|
||||
} // namespace internal
|
||||
} // namespace v8
|
Loading…
Reference in New Issue
Block a user