2016-05-25 08:32:37 +00:00
|
|
|
// Copyright 2016 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2017-11-04 01:03:03 +00:00
|
|
|
#include <atomic>
|
2017-02-03 09:51:04 +00:00
|
|
|
#include <type_traits>
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
#include "src/wasm/wasm-interpreter.h"
|
2016-08-22 13:50:23 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
#include "src/assembler-inl.h"
|
2017-10-23 09:10:10 +00:00
|
|
|
#include "src/boxed-float.h"
|
2017-08-07 11:40:21 +00:00
|
|
|
#include "src/compiler/wasm-compiler.h"
|
2017-02-13 09:52:26 +00:00
|
|
|
#include "src/conversions.h"
|
2017-03-23 09:46:16 +00:00
|
|
|
#include "src/identity-map.h"
|
2017-02-13 09:52:26 +00:00
|
|
|
#include "src/objects-inl.h"
|
2017-11-21 20:00:23 +00:00
|
|
|
#include "src/trap-handler/trap-handler.h"
|
2016-08-22 13:50:23 +00:00
|
|
|
#include "src/utils.h"
|
2016-05-25 08:32:37 +00:00
|
|
|
#include "src/wasm/decoder.h"
|
2017-02-10 01:16:37 +00:00
|
|
|
#include "src/wasm/function-body-decoder-impl.h"
|
2016-12-21 12:42:06 +00:00
|
|
|
#include "src/wasm/function-body-decoder.h"
|
2017-10-02 07:39:30 +00:00
|
|
|
#include "src/wasm/memory-tracing.h"
|
2017-12-05 00:28:35 +00:00
|
|
|
#include "src/wasm/wasm-engine.h"
|
2016-05-25 08:32:37 +00:00
|
|
|
#include "src/wasm/wasm-external-refs.h"
|
2016-12-05 10:02:26 +00:00
|
|
|
#include "src/wasm/wasm-limits.h"
|
2016-05-25 08:32:37 +00:00
|
|
|
#include "src/wasm/wasm-module.h"
|
2017-08-30 13:53:15 +00:00
|
|
|
#include "src/wasm/wasm-objects-inl.h"
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2016-09-20 16:07:25 +00:00
|
|
|
#include "src/zone/accounting-allocator.h"
|
|
|
|
#include "src/zone/zone-containers.h"
|
2016-05-25 08:32:37 +00:00
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
namespace wasm {
|
|
|
|
|
|
|
|
#define TRACE(...) \
|
|
|
|
do { \
|
|
|
|
if (FLAG_trace_wasm_interpreter) PrintF(__VA_ARGS__); \
|
|
|
|
} while (false)
|
|
|
|
|
|
|
|
#define FOREACH_INTERNAL_OPCODE(V) V(Breakpoint, 0xFF)
|
|
|
|
|
2017-03-15 15:57:02 +00:00
|
|
|
#define WASM_CTYPES(V) \
|
2018-05-18 21:47:59 +00:00
|
|
|
V(I32, int32_t) V(I64, int64_t) V(F32, float) V(F64, double) V(S128, Simd128)
|
2017-03-15 15:57:02 +00:00
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
#define FOREACH_SIMPLE_BINOP(V) \
|
|
|
|
V(I32Add, uint32_t, +) \
|
|
|
|
V(I32Sub, uint32_t, -) \
|
|
|
|
V(I32Mul, uint32_t, *) \
|
|
|
|
V(I32And, uint32_t, &) \
|
|
|
|
V(I32Ior, uint32_t, |) \
|
|
|
|
V(I32Xor, uint32_t, ^) \
|
|
|
|
V(I32Eq, uint32_t, ==) \
|
|
|
|
V(I32Ne, uint32_t, !=) \
|
|
|
|
V(I32LtU, uint32_t, <) \
|
|
|
|
V(I32LeU, uint32_t, <=) \
|
|
|
|
V(I32GtU, uint32_t, >) \
|
|
|
|
V(I32GeU, uint32_t, >=) \
|
|
|
|
V(I32LtS, int32_t, <) \
|
|
|
|
V(I32LeS, int32_t, <=) \
|
|
|
|
V(I32GtS, int32_t, >) \
|
|
|
|
V(I32GeS, int32_t, >=) \
|
|
|
|
V(I64Add, uint64_t, +) \
|
|
|
|
V(I64Sub, uint64_t, -) \
|
|
|
|
V(I64Mul, uint64_t, *) \
|
|
|
|
V(I64And, uint64_t, &) \
|
|
|
|
V(I64Ior, uint64_t, |) \
|
|
|
|
V(I64Xor, uint64_t, ^) \
|
|
|
|
V(I64Eq, uint64_t, ==) \
|
|
|
|
V(I64Ne, uint64_t, !=) \
|
|
|
|
V(I64LtU, uint64_t, <) \
|
|
|
|
V(I64LeU, uint64_t, <=) \
|
|
|
|
V(I64GtU, uint64_t, >) \
|
|
|
|
V(I64GeU, uint64_t, >=) \
|
|
|
|
V(I64LtS, int64_t, <) \
|
|
|
|
V(I64LeS, int64_t, <=) \
|
|
|
|
V(I64GtS, int64_t, >) \
|
|
|
|
V(I64GeS, int64_t, >=) \
|
|
|
|
V(F32Add, float, +) \
|
2017-01-16 10:43:03 +00:00
|
|
|
V(F32Sub, float, -) \
|
2016-05-25 08:32:37 +00:00
|
|
|
V(F32Eq, float, ==) \
|
|
|
|
V(F32Ne, float, !=) \
|
|
|
|
V(F32Lt, float, <) \
|
|
|
|
V(F32Le, float, <=) \
|
|
|
|
V(F32Gt, float, >) \
|
|
|
|
V(F32Ge, float, >=) \
|
|
|
|
V(F64Add, double, +) \
|
2017-01-16 10:43:03 +00:00
|
|
|
V(F64Sub, double, -) \
|
2016-05-25 08:32:37 +00:00
|
|
|
V(F64Eq, double, ==) \
|
|
|
|
V(F64Ne, double, !=) \
|
|
|
|
V(F64Lt, double, <) \
|
|
|
|
V(F64Le, double, <=) \
|
|
|
|
V(F64Gt, double, >) \
|
2017-02-03 09:51:04 +00:00
|
|
|
V(F64Ge, double, >=) \
|
|
|
|
V(F32Mul, float, *) \
|
|
|
|
V(F64Mul, double, *) \
|
|
|
|
V(F32Div, float, /) \
|
2016-10-20 14:27:23 +00:00
|
|
|
V(F64Div, double, /)
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
#define FOREACH_OTHER_BINOP(V) \
|
|
|
|
V(I32DivS, int32_t) \
|
|
|
|
V(I32DivU, uint32_t) \
|
|
|
|
V(I32RemS, int32_t) \
|
|
|
|
V(I32RemU, uint32_t) \
|
|
|
|
V(I32Shl, uint32_t) \
|
|
|
|
V(I32ShrU, uint32_t) \
|
|
|
|
V(I32ShrS, int32_t) \
|
|
|
|
V(I64DivS, int64_t) \
|
|
|
|
V(I64DivU, uint64_t) \
|
|
|
|
V(I64RemS, int64_t) \
|
|
|
|
V(I64RemU, uint64_t) \
|
|
|
|
V(I64Shl, uint64_t) \
|
|
|
|
V(I64ShrU, uint64_t) \
|
|
|
|
V(I64ShrS, int64_t) \
|
|
|
|
V(I32Ror, int32_t) \
|
|
|
|
V(I32Rol, int32_t) \
|
|
|
|
V(I64Ror, int64_t) \
|
|
|
|
V(I64Rol, int64_t) \
|
|
|
|
V(F32Min, float) \
|
|
|
|
V(F32Max, float) \
|
|
|
|
V(F64Min, double) \
|
|
|
|
V(F64Max, double) \
|
|
|
|
V(I32AsmjsDivS, int32_t) \
|
|
|
|
V(I32AsmjsDivU, uint32_t) \
|
|
|
|
V(I32AsmjsRemS, int32_t) \
|
2017-10-26 07:45:12 +00:00
|
|
|
V(I32AsmjsRemU, uint32_t) \
|
|
|
|
V(F32CopySign, Float32) \
|
|
|
|
V(F64CopySign, Float64)
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2018-01-16 19:32:52 +00:00
|
|
|
#define FOREACH_I32CONV_FLOATOP(V) \
|
|
|
|
V(I32SConvertF32, int32_t, float) \
|
|
|
|
V(I32SConvertF64, int32_t, double) \
|
|
|
|
V(I32UConvertF32, uint32_t, float) \
|
|
|
|
V(I32UConvertF64, uint32_t, double)
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
#define FOREACH_OTHER_UNOP(V) \
|
|
|
|
V(I32Clz, uint32_t) \
|
|
|
|
V(I32Ctz, uint32_t) \
|
|
|
|
V(I32Popcnt, uint32_t) \
|
|
|
|
V(I32Eqz, uint32_t) \
|
|
|
|
V(I64Clz, uint64_t) \
|
|
|
|
V(I64Ctz, uint64_t) \
|
|
|
|
V(I64Popcnt, uint64_t) \
|
|
|
|
V(I64Eqz, uint64_t) \
|
2017-10-23 09:10:10 +00:00
|
|
|
V(F32Abs, Float32) \
|
|
|
|
V(F32Neg, Float32) \
|
2016-05-25 08:32:37 +00:00
|
|
|
V(F32Ceil, float) \
|
|
|
|
V(F32Floor, float) \
|
|
|
|
V(F32Trunc, float) \
|
|
|
|
V(F32NearestInt, float) \
|
2017-10-23 09:10:10 +00:00
|
|
|
V(F64Abs, Float64) \
|
|
|
|
V(F64Neg, Float64) \
|
2016-05-25 08:32:37 +00:00
|
|
|
V(F64Ceil, double) \
|
|
|
|
V(F64Floor, double) \
|
|
|
|
V(F64Trunc, double) \
|
|
|
|
V(F64NearestInt, double) \
|
|
|
|
V(I32ConvertI64, int64_t) \
|
|
|
|
V(I64SConvertF32, float) \
|
|
|
|
V(I64SConvertF64, double) \
|
|
|
|
V(I64UConvertF32, float) \
|
|
|
|
V(I64UConvertF64, double) \
|
|
|
|
V(I64SConvertI32, int32_t) \
|
|
|
|
V(I64UConvertI32, uint32_t) \
|
|
|
|
V(F32SConvertI32, int32_t) \
|
|
|
|
V(F32UConvertI32, uint32_t) \
|
|
|
|
V(F32SConvertI64, int64_t) \
|
|
|
|
V(F32UConvertI64, uint64_t) \
|
|
|
|
V(F32ConvertF64, double) \
|
|
|
|
V(F32ReinterpretI32, int32_t) \
|
|
|
|
V(F64SConvertI32, int32_t) \
|
|
|
|
V(F64UConvertI32, uint32_t) \
|
|
|
|
V(F64SConvertI64, int64_t) \
|
|
|
|
V(F64UConvertI64, uint64_t) \
|
|
|
|
V(F64ConvertF32, float) \
|
|
|
|
V(F64ReinterpretI64, int64_t) \
|
|
|
|
V(I32AsmjsSConvertF32, float) \
|
|
|
|
V(I32AsmjsUConvertF32, float) \
|
|
|
|
V(I32AsmjsSConvertF64, double) \
|
2017-02-03 09:51:04 +00:00
|
|
|
V(I32AsmjsUConvertF64, double) \
|
|
|
|
V(F32Sqrt, float) \
|
2016-10-20 14:27:23 +00:00
|
|
|
V(F64Sqrt, double)
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
namespace {
|
|
|
|
|
2017-10-26 07:45:12 +00:00
|
|
|
constexpr uint32_t kFloat32SignBitMask = uint32_t{1} << 31;
|
|
|
|
constexpr uint64_t kFloat64SignBitMask = uint64_t{1} << 63;
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int32_t ExecuteI32DivS(int32_t a, int32_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) {
|
|
|
|
*trap = kTrapDivByZero;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
|
|
|
|
*trap = kTrapDivUnrepresentable;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return a / b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32DivU(uint32_t a, uint32_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) {
|
|
|
|
*trap = kTrapDivByZero;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return a / b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int32_t ExecuteI32RemS(int32_t a, int32_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) {
|
|
|
|
*trap = kTrapRemByZero;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
if (b == -1) return 0;
|
|
|
|
return a % b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32RemU(uint32_t a, uint32_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) {
|
|
|
|
*trap = kTrapRemByZero;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return a % b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32Shl(uint32_t a, uint32_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
return a << (b & 0x1F);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32ShrU(uint32_t a, uint32_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
return a >> (b & 0x1F);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int32_t ExecuteI32ShrS(int32_t a, int32_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
return a >> (b & 0x1F);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int64_t ExecuteI64DivS(int64_t a, int64_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) {
|
|
|
|
*trap = kTrapDivByZero;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
if (b == -1 && a == std::numeric_limits<int64_t>::min()) {
|
|
|
|
*trap = kTrapDivUnrepresentable;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return a / b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint64_t ExecuteI64DivU(uint64_t a, uint64_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) {
|
|
|
|
*trap = kTrapDivByZero;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return a / b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int64_t ExecuteI64RemS(int64_t a, int64_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) {
|
|
|
|
*trap = kTrapRemByZero;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
if (b == -1) return 0;
|
|
|
|
return a % b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint64_t ExecuteI64RemU(uint64_t a, uint64_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) {
|
|
|
|
*trap = kTrapRemByZero;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
return a % b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint64_t ExecuteI64Shl(uint64_t a, uint64_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
return a << (b & 0x3F);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint64_t ExecuteI64ShrU(uint64_t a, uint64_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
return a >> (b & 0x3F);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int64_t ExecuteI64ShrS(int64_t a, int64_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
return a >> (b & 0x3F);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32Ror(uint32_t a, uint32_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
uint32_t shift = (b & 0x1F);
|
2016-05-25 08:32:37 +00:00
|
|
|
return (a >> shift) | (a << (32 - shift));
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32Rol(uint32_t a, uint32_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
uint32_t shift = (b & 0x1F);
|
2016-05-25 08:32:37 +00:00
|
|
|
return (a << shift) | (a >> (32 - shift));
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint64_t ExecuteI64Ror(uint64_t a, uint64_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
uint32_t shift = (b & 0x3F);
|
2016-05-25 08:32:37 +00:00
|
|
|
return (a >> shift) | (a << (64 - shift));
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint64_t ExecuteI64Rol(uint64_t a, uint64_t b, TrapReason* trap) {
|
2017-12-02 00:30:37 +00:00
|
|
|
uint32_t shift = (b & 0x3F);
|
2016-05-25 08:32:37 +00:00
|
|
|
return (a << shift) | (a >> (64 - shift));
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32Min(float a, float b, TrapReason* trap) {
|
2016-08-22 13:50:23 +00:00
|
|
|
return JSMin(a, b);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32Max(float a, float b, TrapReason* trap) {
|
2016-08-22 13:50:23 +00:00
|
|
|
return JSMax(a, b);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-10-26 07:45:12 +00:00
|
|
|
inline Float32 ExecuteF32CopySign(Float32 a, Float32 b, TrapReason* trap) {
|
|
|
|
return Float32::FromBits((a.get_bits() & ~kFloat32SignBitMask) |
|
|
|
|
(b.get_bits() & kFloat32SignBitMask));
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64Min(double a, double b, TrapReason* trap) {
|
2016-08-22 13:50:23 +00:00
|
|
|
return JSMin(a, b);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64Max(double a, double b, TrapReason* trap) {
|
2016-08-22 13:50:23 +00:00
|
|
|
return JSMax(a, b);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-10-26 07:45:12 +00:00
|
|
|
inline Float64 ExecuteF64CopySign(Float64 a, Float64 b, TrapReason* trap) {
|
|
|
|
return Float64::FromBits((a.get_bits() & ~kFloat64SignBitMask) |
|
|
|
|
(b.get_bits() & kFloat64SignBitMask));
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int32_t ExecuteI32AsmjsDivS(int32_t a, int32_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) return 0;
|
|
|
|
if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
|
|
|
|
return std::numeric_limits<int32_t>::min();
|
|
|
|
}
|
|
|
|
return a / b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32AsmjsDivU(uint32_t a, uint32_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) return 0;
|
|
|
|
return a / b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int32_t ExecuteI32AsmjsRemS(int32_t a, int32_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) return 0;
|
|
|
|
if (b == -1) return 0;
|
|
|
|
return a % b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32AsmjsRemU(uint32_t a, uint32_t b, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (b == 0) return 0;
|
|
|
|
return a % b;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int32_t ExecuteI32AsmjsSConvertF32(float a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return DoubleToInt32(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32AsmjsUConvertF32(float a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return DoubleToUint32(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int32_t ExecuteI32AsmjsSConvertF64(double a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return DoubleToInt32(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32AsmjsUConvertF64(double a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return DoubleToUint32(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
int32_t ExecuteI32Clz(uint32_t val, TrapReason* trap) {
|
Reland "[bits] Consolidate Count{Leading,Trailing}Zeros"
This is a reland of 7d231e576a6ffd651041094ba10bc5b39777528c, fixed to
avoid instantiating CountLeadingZeros for bits==0.
Original change's description:
> [bits] Consolidate Count{Leading,Trailing}Zeros
>
> Instead of having one method for 32 bit integers and one for 64 bit,
> plus a templatized version to choose from those two, just implement one
> version which handles unsigned integers of any size. Also, make them
> constexpr.
> The Count{Leading,Trailing}Zeros{32,64} methods are kept for now in
> order to keep the amount of code changes small. Also, sometimes it
> improves readability by stating exactly the size of the argument,
> especially for leading zeros (where zero-extending would add more
> leading zeros).
>
> CountLeadingZeros now uses a binary search inspired implementation
> as proposed in Hacker's Delight. It's more than 20% faster on x64 if
> the builtins are disabled.
> CountTrailingZeros falls back to CountPopulation instead of counting in
> a naive loop. This is ~50% faster.
>
> R=mstarzinger@chromium.org
>
> Change-Id: I1d8bf1d7295b930724163248150444bd17fbb34e
> Reviewed-on: https://chromium-review.googlesource.com/741231
> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
> Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#49106}
Change-Id: Icdff2510ec66d1c96a1912cef29d77d8550994ee
Reviewed-on: https://chromium-review.googlesource.com/753903
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49138}
2017-11-06 11:09:53 +00:00
|
|
|
return base::bits::CountLeadingZeros(val);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
uint32_t ExecuteI32Ctz(uint32_t val, TrapReason* trap) {
|
Reland "[bits] Consolidate Count{Leading,Trailing}Zeros"
This is a reland of 7d231e576a6ffd651041094ba10bc5b39777528c, fixed to
avoid instantiating CountLeadingZeros for bits==0.
Original change's description:
> [bits] Consolidate Count{Leading,Trailing}Zeros
>
> Instead of having one method for 32 bit integers and one for 64 bit,
> plus a templatized version to choose from those two, just implement one
> version which handles unsigned integers of any size. Also, make them
> constexpr.
> The Count{Leading,Trailing}Zeros{32,64} methods are kept for now in
> order to keep the amount of code changes small. Also, sometimes it
> improves readability by stating exactly the size of the argument,
> especially for leading zeros (where zero-extending would add more
> leading zeros).
>
> CountLeadingZeros now uses a binary search inspired implementation
> as proposed in Hacker's Delight. It's more than 20% faster on x64 if
> the builtins are disabled.
> CountTrailingZeros falls back to CountPopulation instead of counting in
> a naive loop. This is ~50% faster.
>
> R=mstarzinger@chromium.org
>
> Change-Id: I1d8bf1d7295b930724163248150444bd17fbb34e
> Reviewed-on: https://chromium-review.googlesource.com/741231
> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
> Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#49106}
Change-Id: Icdff2510ec66d1c96a1912cef29d77d8550994ee
Reviewed-on: https://chromium-review.googlesource.com/753903
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49138}
2017-11-06 11:09:53 +00:00
|
|
|
return base::bits::CountTrailingZeros(val);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
uint32_t ExecuteI32Popcnt(uint32_t val, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return base::bits::CountPopulation(val);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32Eqz(uint32_t val, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return val == 0 ? 1 : 0;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
int64_t ExecuteI64Clz(uint64_t val, TrapReason* trap) {
|
Reland "[bits] Consolidate Count{Leading,Trailing}Zeros"
This is a reland of 7d231e576a6ffd651041094ba10bc5b39777528c, fixed to
avoid instantiating CountLeadingZeros for bits==0.
Original change's description:
> [bits] Consolidate Count{Leading,Trailing}Zeros
>
> Instead of having one method for 32 bit integers and one for 64 bit,
> plus a templatized version to choose from those two, just implement one
> version which handles unsigned integers of any size. Also, make them
> constexpr.
> The Count{Leading,Trailing}Zeros{32,64} methods are kept for now in
> order to keep the amount of code changes small. Also, sometimes it
> improves readability by stating exactly the size of the argument,
> especially for leading zeros (where zero-extending would add more
> leading zeros).
>
> CountLeadingZeros now uses a binary search inspired implementation
> as proposed in Hacker's Delight. It's more than 20% faster on x64 if
> the builtins are disabled.
> CountTrailingZeros falls back to CountPopulation instead of counting in
> a naive loop. This is ~50% faster.
>
> R=mstarzinger@chromium.org
>
> Change-Id: I1d8bf1d7295b930724163248150444bd17fbb34e
> Reviewed-on: https://chromium-review.googlesource.com/741231
> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
> Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#49106}
Change-Id: Icdff2510ec66d1c96a1912cef29d77d8550994ee
Reviewed-on: https://chromium-review.googlesource.com/753903
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49138}
2017-11-06 11:09:53 +00:00
|
|
|
return base::bits::CountLeadingZeros(val);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint64_t ExecuteI64Ctz(uint64_t val, TrapReason* trap) {
|
Reland "[bits] Consolidate Count{Leading,Trailing}Zeros"
This is a reland of 7d231e576a6ffd651041094ba10bc5b39777528c, fixed to
avoid instantiating CountLeadingZeros for bits==0.
Original change's description:
> [bits] Consolidate Count{Leading,Trailing}Zeros
>
> Instead of having one method for 32 bit integers and one for 64 bit,
> plus a templatized version to choose from those two, just implement one
> version which handles unsigned integers of any size. Also, make them
> constexpr.
> The Count{Leading,Trailing}Zeros{32,64} methods are kept for now in
> order to keep the amount of code changes small. Also, sometimes it
> improves readability by stating exactly the size of the argument,
> especially for leading zeros (where zero-extending would add more
> leading zeros).
>
> CountLeadingZeros now uses a binary search inspired implementation
> as proposed in Hacker's Delight. It's more than 20% faster on x64 if
> the builtins are disabled.
> CountTrailingZeros falls back to CountPopulation instead of counting in
> a naive loop. This is ~50% faster.
>
> R=mstarzinger@chromium.org
>
> Change-Id: I1d8bf1d7295b930724163248150444bd17fbb34e
> Reviewed-on: https://chromium-review.googlesource.com/741231
> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
> Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#49106}
Change-Id: Icdff2510ec66d1c96a1912cef29d77d8550994ee
Reviewed-on: https://chromium-review.googlesource.com/753903
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49138}
2017-11-06 11:09:53 +00:00
|
|
|
return base::bits::CountTrailingZeros(val);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int64_t ExecuteI64Popcnt(uint64_t val, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return base::bits::CountPopulation(val);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int32_t ExecuteI64Eqz(uint64_t val, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return val == 0 ? 1 : 0;
|
|
|
|
}
|
|
|
|
|
2017-10-23 09:10:10 +00:00
|
|
|
inline Float32 ExecuteF32Abs(Float32 a, TrapReason* trap) {
|
2017-10-26 07:45:12 +00:00
|
|
|
return Float32::FromBits(a.get_bits() & ~kFloat32SignBitMask);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-10-23 09:10:10 +00:00
|
|
|
inline Float32 ExecuteF32Neg(Float32 a, TrapReason* trap) {
|
2017-10-26 07:45:12 +00:00
|
|
|
return Float32::FromBits(a.get_bits() ^ kFloat32SignBitMask);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32Ceil(float a, TrapReason* trap) { return ceilf(a); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32Floor(float a, TrapReason* trap) { return floorf(a); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32Trunc(float a, TrapReason* trap) { return truncf(a); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32NearestInt(float a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return nearbyintf(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32Sqrt(float a, TrapReason* trap) {
|
2016-10-20 14:27:23 +00:00
|
|
|
float result = sqrtf(a);
|
|
|
|
return result;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-10-23 09:10:10 +00:00
|
|
|
inline Float64 ExecuteF64Abs(Float64 a, TrapReason* trap) {
|
2017-10-26 07:45:12 +00:00
|
|
|
return Float64::FromBits(a.get_bits() & ~kFloat64SignBitMask);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-10-23 09:10:10 +00:00
|
|
|
inline Float64 ExecuteF64Neg(Float64 a, TrapReason* trap) {
|
2017-10-26 07:45:12 +00:00
|
|
|
return Float64::FromBits(a.get_bits() ^ kFloat64SignBitMask);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64Ceil(double a, TrapReason* trap) { return ceil(a); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64Floor(double a, TrapReason* trap) { return floor(a); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64Trunc(double a, TrapReason* trap) { return trunc(a); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64NearestInt(double a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return nearbyint(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64Sqrt(double a, TrapReason* trap) { return sqrt(a); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2018-01-16 19:32:52 +00:00
|
|
|
template <typename int_type, typename float_type>
|
|
|
|
int_type ExecuteConvert(float_type a, TrapReason* trap) {
|
|
|
|
if (is_inbounds<int_type>(a)) {
|
|
|
|
return static_cast<int_type>(a);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
*trap = kTrapFloatUnrepresentable;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-01-16 19:32:52 +00:00
|
|
|
template <typename int_type, typename float_type>
|
|
|
|
int_type ExecuteConvertSaturate(float_type a) {
|
2017-12-27 18:54:38 +00:00
|
|
|
TrapReason base_trap = kTrapCount;
|
2018-01-16 19:32:52 +00:00
|
|
|
int32_t val = ExecuteConvert<int_type>(a, &base_trap);
|
2017-12-27 18:54:38 +00:00
|
|
|
if (base_trap == kTrapCount) {
|
|
|
|
return val;
|
|
|
|
}
|
|
|
|
return std::isnan(a) ? 0
|
2018-01-16 19:32:52 +00:00
|
|
|
: (a < static_cast<float_type>(0.0)
|
|
|
|
? std::numeric_limits<int_type>::min()
|
|
|
|
: std::numeric_limits<int_type>::max());
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2018-04-20 07:25:19 +00:00
|
|
|
template <typename dst_type, typename src_type, void (*fn)(Address)>
|
|
|
|
inline dst_type CallExternalIntToFloatFunction(src_type input) {
|
|
|
|
uint8_t data[std::max(sizeof(dst_type), sizeof(src_type))];
|
|
|
|
Address data_addr = reinterpret_cast<Address>(data);
|
|
|
|
WriteUnalignedValue<src_type>(data_addr, input);
|
|
|
|
fn(data_addr);
|
|
|
|
return ReadUnalignedValue<dst_type>(data_addr);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename dst_type, typename src_type, int32_t (*fn)(Address)>
|
|
|
|
inline dst_type CallExternalFloatToIntFunction(src_type input,
|
|
|
|
TrapReason* trap) {
|
|
|
|
uint8_t data[std::max(sizeof(dst_type), sizeof(src_type))];
|
|
|
|
Address data_addr = reinterpret_cast<Address>(data);
|
|
|
|
WriteUnalignedValue<src_type>(data_addr, input);
|
|
|
|
if (!fn(data_addr)) *trap = kTrapFloatUnrepresentable;
|
|
|
|
return ReadUnalignedValue<dst_type>(data_addr);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline uint32_t ExecuteI32ConvertI64(int64_t a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<uint32_t>(a & 0xFFFFFFFF);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
int64_t ExecuteI64SConvertF32(float a, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return CallExternalFloatToIntFunction<int64_t, float,
|
|
|
|
float32_to_int64_wrapper>(a, trap);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2018-01-30 23:43:44 +00:00
|
|
|
int64_t ExecuteI64SConvertSatF32(float a) {
|
|
|
|
TrapReason base_trap = kTrapCount;
|
|
|
|
int64_t val = ExecuteI64SConvertF32(a, &base_trap);
|
|
|
|
if (base_trap == kTrapCount) {
|
|
|
|
return val;
|
|
|
|
}
|
|
|
|
return std::isnan(a) ? 0
|
|
|
|
: (a < 0.0 ? std::numeric_limits<int64_t>::min()
|
|
|
|
: std::numeric_limits<int64_t>::max());
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
int64_t ExecuteI64SConvertF64(double a, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return CallExternalFloatToIntFunction<int64_t, double,
|
|
|
|
float64_to_int64_wrapper>(a, trap);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2018-02-07 19:10:16 +00:00
|
|
|
int64_t ExecuteI64SConvertSatF64(double a) {
|
|
|
|
TrapReason base_trap = kTrapCount;
|
|
|
|
int64_t val = ExecuteI64SConvertF64(a, &base_trap);
|
|
|
|
if (base_trap == kTrapCount) {
|
|
|
|
return val;
|
|
|
|
}
|
|
|
|
return std::isnan(a) ? 0
|
|
|
|
: (a < 0.0 ? std::numeric_limits<int64_t>::min()
|
|
|
|
: std::numeric_limits<int64_t>::max());
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
uint64_t ExecuteI64UConvertF32(float a, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return CallExternalFloatToIntFunction<uint64_t, float,
|
|
|
|
float32_to_uint64_wrapper>(a, trap);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2018-02-07 19:10:16 +00:00
|
|
|
uint64_t ExecuteI64UConvertSatF32(float a) {
|
|
|
|
TrapReason base_trap = kTrapCount;
|
|
|
|
uint64_t val = ExecuteI64UConvertF32(a, &base_trap);
|
|
|
|
if (base_trap == kTrapCount) {
|
|
|
|
return val;
|
|
|
|
}
|
|
|
|
return std::isnan(a) ? 0
|
|
|
|
: (a < 0.0 ? std::numeric_limits<uint64_t>::min()
|
|
|
|
: std::numeric_limits<uint64_t>::max());
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
uint64_t ExecuteI64UConvertF64(double a, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return CallExternalFloatToIntFunction<uint64_t, double,
|
|
|
|
float64_to_uint64_wrapper>(a, trap);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2018-02-07 19:10:16 +00:00
|
|
|
uint64_t ExecuteI64UConvertSatF64(double a) {
|
|
|
|
TrapReason base_trap = kTrapCount;
|
|
|
|
int64_t val = ExecuteI64UConvertF64(a, &base_trap);
|
|
|
|
if (base_trap == kTrapCount) {
|
|
|
|
return val;
|
|
|
|
}
|
|
|
|
return std::isnan(a) ? 0
|
|
|
|
: (a < 0.0 ? std::numeric_limits<uint64_t>::min()
|
|
|
|
: std::numeric_limits<uint64_t>::max());
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int64_t ExecuteI64SConvertI32(int32_t a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<int64_t>(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline int64_t ExecuteI64UConvertI32(uint32_t a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<uint64_t>(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32SConvertI32(int32_t a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<float>(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32UConvertI32(uint32_t a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<float>(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32SConvertI64(int64_t a, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return static_cast<float>(a);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32UConvertI64(uint64_t a, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return CallExternalIntToFloatFunction<float, uint64_t,
|
|
|
|
uint64_to_float32_wrapper>(a);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline float ExecuteF32ConvertF64(double a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<float>(a);
|
|
|
|
}
|
|
|
|
|
2017-10-23 09:10:10 +00:00
|
|
|
inline Float32 ExecuteF32ReinterpretI32(int32_t a, TrapReason* trap) {
|
|
|
|
return Float32::FromBits(a);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64SConvertI32(int32_t a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<double>(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64UConvertI32(uint32_t a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<double>(a);
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64SConvertI64(int64_t a, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return static_cast<double>(a);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64UConvertI64(uint64_t a, TrapReason* trap) {
|
2018-04-20 07:25:19 +00:00
|
|
|
return CallExternalIntToFloatFunction<double, uint64_t,
|
|
|
|
uint64_to_float64_wrapper>(a);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
inline double ExecuteF64ConvertF32(float a, TrapReason* trap) {
|
2016-05-25 08:32:37 +00:00
|
|
|
return static_cast<double>(a);
|
|
|
|
}
|
|
|
|
|
2017-10-23 09:10:10 +00:00
|
|
|
inline Float64 ExecuteF64ReinterpretI64(int64_t a, TrapReason* trap) {
|
|
|
|
return Float64::FromBits(a);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
inline int32_t ExecuteI32ReinterpretF32(WasmValue a) {
|
2017-10-23 09:10:10 +00:00
|
|
|
return a.to_f32_boxed().get_bits();
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
inline int64_t ExecuteI64ReinterpretF64(WasmValue a) {
|
2017-10-23 09:10:10 +00:00
|
|
|
return a.to_f64_boxed().get_bits();
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
enum InternalOpcode {
|
|
|
|
#define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
|
|
|
|
FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
|
|
|
|
#undef DECL_INTERNAL_ENUM
|
|
|
|
};
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
const char* OpcodeName(uint32_t val) {
|
2016-05-25 08:32:37 +00:00
|
|
|
switch (val) {
|
|
|
|
#define DECL_INTERNAL_CASE(name, value) \
|
|
|
|
case kInternal##name: \
|
|
|
|
return "Internal" #name;
|
|
|
|
FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_CASE)
|
|
|
|
#undef DECL_INTERNAL_CASE
|
|
|
|
}
|
|
|
|
return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
|
|
|
|
}
|
|
|
|
|
2017-04-26 17:41:26 +00:00
|
|
|
class SideTable;
|
2017-04-25 09:43:39 +00:00
|
|
|
|
|
|
|
// Code and metadata needed to execute a function.
|
|
|
|
struct InterpreterCode {
|
|
|
|
const WasmFunction* function; // wasm function
|
|
|
|
BodyLocalDecls locals; // local declarations
|
|
|
|
const byte* orig_start; // start of original code
|
|
|
|
const byte* orig_end; // end of original code
|
|
|
|
byte* start; // start of (maybe altered) code
|
|
|
|
byte* end; // end of (maybe altered) code
|
2017-04-26 17:41:26 +00:00
|
|
|
SideTable* side_table; // precomputed side table for control flow.
|
2017-04-25 09:43:39 +00:00
|
|
|
|
|
|
|
const byte* at(pc_t pc) { return start + pc; }
|
|
|
|
};
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
// A helper class to compute the control transfers for each bytecode offset.
|
|
|
|
// Control transfers allow Br, BrIf, BrTable, If, Else, and End bytecodes to
|
|
|
|
// be directly executed without the need to dynamically track blocks.
|
2017-04-26 17:41:26 +00:00
|
|
|
class SideTable : public ZoneObject {
|
2016-05-25 08:32:37 +00:00
|
|
|
public:
|
|
|
|
ControlTransferMap map_;
|
2018-05-07 13:47:59 +00:00
|
|
|
uint32_t max_stack_height_ = 0;
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-04-26 17:41:26 +00:00
|
|
|
SideTable(Zone* zone, const WasmModule* module, InterpreterCode* code)
|
2018-05-07 13:47:59 +00:00
|
|
|
: map_(zone) {
|
2017-04-25 09:43:39 +00:00
|
|
|
// Create a zone for all temporary objects.
|
|
|
|
Zone control_transfer_zone(zone->allocator(), ZONE_NAME);
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
// Represents a control flow label.
|
2017-04-25 09:43:39 +00:00
|
|
|
class CLabel : public ZoneObject {
|
|
|
|
explicit CLabel(Zone* zone, uint32_t target_stack_height, uint32_t arity)
|
2018-05-07 13:47:59 +00:00
|
|
|
: target_stack_height(target_stack_height),
|
2017-04-25 09:43:39 +00:00
|
|
|
arity(arity),
|
|
|
|
refs(zone) {}
|
|
|
|
|
|
|
|
public:
|
|
|
|
struct Ref {
|
|
|
|
const byte* from_pc;
|
|
|
|
const uint32_t stack_height;
|
|
|
|
};
|
2018-05-07 13:47:59 +00:00
|
|
|
const byte* target = nullptr;
|
2017-04-25 09:43:39 +00:00
|
|
|
uint32_t target_stack_height;
|
2017-05-02 15:46:52 +00:00
|
|
|
// Arity when branching to this label.
|
2017-04-25 09:43:39 +00:00
|
|
|
const uint32_t arity;
|
|
|
|
ZoneVector<Ref> refs;
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-04-25 09:43:39 +00:00
|
|
|
static CLabel* New(Zone* zone, uint32_t stack_height, uint32_t arity) {
|
|
|
|
return new (zone) CLabel(zone, stack_height, arity);
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
|
|
|
|
// Bind this label to the given PC.
|
2017-04-25 09:43:39 +00:00
|
|
|
void Bind(const byte* pc) {
|
2016-05-25 08:32:37 +00:00
|
|
|
DCHECK_NULL(target);
|
|
|
|
target = pc;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reference this label from the given location.
|
2017-04-25 09:43:39 +00:00
|
|
|
void Ref(const byte* from_pc, uint32_t stack_height) {
|
|
|
|
// Target being bound before a reference means this is a loop.
|
|
|
|
DCHECK_IMPLIES(target, *target == kExprLoop);
|
|
|
|
refs.push_back({from_pc, stack_height});
|
|
|
|
}
|
|
|
|
|
|
|
|
void Finish(ControlTransferMap* map, const byte* start) {
|
|
|
|
DCHECK_NOT_NULL(target);
|
|
|
|
for (auto ref : refs) {
|
|
|
|
size_t offset = static_cast<size_t>(ref.from_pc - start);
|
|
|
|
auto pcdiff = static_cast<pcdiff_t>(target - ref.from_pc);
|
|
|
|
DCHECK_GE(ref.stack_height, target_stack_height);
|
|
|
|
spdiff_t spdiff =
|
|
|
|
static_cast<spdiff_t>(ref.stack_height - target_stack_height);
|
|
|
|
TRACE("control transfer @%zu: Δpc %d, stack %u->%u = -%u\n", offset,
|
|
|
|
pcdiff, ref.stack_height, target_stack_height, spdiff);
|
|
|
|
ControlTransferEntry& entry = (*map)[offset];
|
|
|
|
entry.pc_diff = pcdiff;
|
|
|
|
entry.sp_diff = spdiff;
|
|
|
|
entry.target_arity = arity;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// An entry in the control stack.
|
|
|
|
struct Control {
|
|
|
|
const byte* pc;
|
|
|
|
CLabel* end_label;
|
|
|
|
CLabel* else_label;
|
2017-05-02 15:46:52 +00:00
|
|
|
// Arity (number of values on the stack) when exiting this control
|
|
|
|
// structure via |end|.
|
|
|
|
uint32_t exit_arity;
|
2017-05-02 17:46:21 +00:00
|
|
|
// Track whether this block was already left, i.e. all further
|
|
|
|
// instructions are unreachable.
|
|
|
|
bool unreachable = false;
|
2017-05-02 15:46:52 +00:00
|
|
|
|
|
|
|
Control(const byte* pc, CLabel* end_label, CLabel* else_label,
|
|
|
|
uint32_t exit_arity)
|
|
|
|
: pc(pc),
|
|
|
|
end_label(end_label),
|
|
|
|
else_label(else_label),
|
|
|
|
exit_arity(exit_arity) {}
|
|
|
|
Control(const byte* pc, CLabel* end_label, uint32_t exit_arity)
|
|
|
|
: Control(pc, end_label, nullptr, exit_arity) {}
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-04-25 09:43:39 +00:00
|
|
|
void Finish(ControlTransferMap* map, const byte* start) {
|
|
|
|
end_label->Finish(map, start);
|
|
|
|
if (else_label) else_label->Finish(map, start);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Compute the ControlTransfer map.
|
2016-09-27 20:46:10 +00:00
|
|
|
// This algorithm maintains a stack of control constructs similar to the
|
2016-05-25 08:32:37 +00:00
|
|
|
// AST decoder. The {control_stack} allows matching {br,br_if,br_table}
|
|
|
|
// bytecodes with their target, as well as determining whether the current
|
|
|
|
// bytecodes are within the true or false block of an else.
|
2017-04-25 09:43:39 +00:00
|
|
|
ZoneVector<Control> control_stack(&control_transfer_zone);
|
|
|
|
uint32_t stack_height = 0;
|
|
|
|
uint32_t func_arity =
|
|
|
|
static_cast<uint32_t>(code->function->sig->return_count());
|
|
|
|
CLabel* func_label =
|
|
|
|
CLabel::New(&control_transfer_zone, stack_height, func_arity);
|
2017-05-02 15:46:52 +00:00
|
|
|
control_stack.emplace_back(code->orig_start, func_label, func_arity);
|
2017-05-02 17:46:21 +00:00
|
|
|
auto control_parent = [&]() -> Control& {
|
|
|
|
DCHECK_LE(2, control_stack.size());
|
|
|
|
return control_stack[control_stack.size() - 2];
|
|
|
|
};
|
|
|
|
auto copy_unreachable = [&] {
|
|
|
|
control_stack.back().unreachable = control_parent().unreachable;
|
|
|
|
};
|
2017-04-25 09:43:39 +00:00
|
|
|
for (BytecodeIterator i(code->orig_start, code->orig_end, &code->locals);
|
|
|
|
i.has_next(); i.next()) {
|
2016-07-11 12:57:22 +00:00
|
|
|
WasmOpcode opcode = i.current();
|
2017-11-04 01:03:03 +00:00
|
|
|
if (WasmOpcodes::IsPrefixOpcode(opcode)) opcode = i.prefixed_opcode();
|
2017-05-02 17:46:21 +00:00
|
|
|
bool unreachable = control_stack.back().unreachable;
|
|
|
|
if (unreachable) {
|
|
|
|
TRACE("@%u: %s (is unreachable)\n", i.pc_offset(),
|
|
|
|
WasmOpcodes::OpcodeName(opcode));
|
|
|
|
} else {
|
|
|
|
auto stack_effect =
|
|
|
|
StackEffect(module, code->function->sig, i.pc(), i.end());
|
|
|
|
TRACE("@%u: %s (sp %d - %d + %d)\n", i.pc_offset(),
|
|
|
|
WasmOpcodes::OpcodeName(opcode), stack_height, stack_effect.first,
|
|
|
|
stack_effect.second);
|
|
|
|
DCHECK_GE(stack_height, stack_effect.first);
|
|
|
|
DCHECK_GE(kMaxUInt32, static_cast<uint64_t>(stack_height) -
|
|
|
|
stack_effect.first + stack_effect.second);
|
|
|
|
stack_height = stack_height - stack_effect.first + stack_effect.second;
|
|
|
|
if (stack_height > max_stack_height_) max_stack_height_ = stack_height;
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
switch (opcode) {
|
2017-05-02 15:46:52 +00:00
|
|
|
case kExprBlock:
|
2017-04-27 14:12:43 +00:00
|
|
|
case kExprLoop: {
|
2017-05-02 15:46:52 +00:00
|
|
|
bool is_loop = opcode == kExprLoop;
|
2018-05-03 11:59:06 +00:00
|
|
|
BlockTypeImmediate<Decoder::kNoValidate> imm(&i, i.pc());
|
|
|
|
if (imm.type == kWasmVar) {
|
|
|
|
imm.sig = module->signatures[imm.sig_index];
|
2017-10-11 13:01:17 +00:00
|
|
|
}
|
|
|
|
TRACE("control @%u: %s, arity %d->%d\n", i.pc_offset(),
|
2018-05-03 11:59:06 +00:00
|
|
|
is_loop ? "Loop" : "Block", imm.in_arity(), imm.out_arity());
|
|
|
|
CLabel* label =
|
|
|
|
CLabel::New(&control_transfer_zone, stack_height,
|
|
|
|
is_loop ? imm.in_arity() : imm.out_arity());
|
|
|
|
control_stack.emplace_back(i.pc(), label, imm.out_arity());
|
2017-05-02 17:46:21 +00:00
|
|
|
copy_unreachable();
|
2017-05-02 15:46:52 +00:00
|
|
|
if (is_loop) label->Bind(i.pc());
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprIf: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BlockTypeImmediate<Decoder::kNoValidate> imm(&i, i.pc());
|
|
|
|
if (imm.type == kWasmVar) {
|
|
|
|
imm.sig = module->signatures[imm.sig_index];
|
2017-10-11 13:01:17 +00:00
|
|
|
}
|
|
|
|
TRACE("control @%u: If, arity %d->%d\n", i.pc_offset(),
|
2018-05-03 11:59:06 +00:00
|
|
|
imm.in_arity(), imm.out_arity());
|
|
|
|
CLabel* end_label = CLabel::New(&control_transfer_zone, stack_height,
|
|
|
|
imm.out_arity());
|
2017-04-25 09:43:39 +00:00
|
|
|
CLabel* else_label =
|
|
|
|
CLabel::New(&control_transfer_zone, stack_height, 0);
|
2017-05-02 15:46:52 +00:00
|
|
|
control_stack.emplace_back(i.pc(), end_label, else_label,
|
2018-05-03 11:59:06 +00:00
|
|
|
imm.out_arity());
|
2017-05-02 17:46:21 +00:00
|
|
|
copy_unreachable();
|
|
|
|
if (!unreachable) else_label->Ref(i.pc(), stack_height);
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprElse: {
|
|
|
|
Control* c = &control_stack.back();
|
2017-05-02 17:46:21 +00:00
|
|
|
copy_unreachable();
|
2016-09-27 20:46:10 +00:00
|
|
|
TRACE("control @%u: Else\n", i.pc_offset());
|
2017-05-02 17:46:21 +00:00
|
|
|
if (!control_parent().unreachable) {
|
|
|
|
c->end_label->Ref(i.pc(), stack_height);
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
DCHECK_NOT_NULL(c->else_label);
|
2017-04-25 09:43:39 +00:00
|
|
|
c->else_label->Bind(i.pc() + 1);
|
|
|
|
c->else_label->Finish(&map_, code->orig_start);
|
2016-05-25 08:32:37 +00:00
|
|
|
c->else_label = nullptr;
|
2017-04-25 09:43:39 +00:00
|
|
|
DCHECK_GE(stack_height, c->end_label->target_stack_height);
|
|
|
|
stack_height = c->end_label->target_stack_height;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprEnd: {
|
|
|
|
Control* c = &control_stack.back();
|
2016-09-27 20:46:10 +00:00
|
|
|
TRACE("control @%u: End\n", i.pc_offset());
|
2017-04-25 09:43:39 +00:00
|
|
|
// Only loops have bound labels.
|
|
|
|
DCHECK_IMPLIES(c->end_label->target, *c->pc == kExprLoop);
|
|
|
|
if (!c->end_label->target) {
|
|
|
|
if (c->else_label) c->else_label->Bind(i.pc());
|
|
|
|
c->end_label->Bind(i.pc() + 1);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2017-04-25 09:43:39 +00:00
|
|
|
c->Finish(&map_, code->orig_start);
|
|
|
|
DCHECK_GE(stack_height, c->end_label->target_stack_height);
|
2017-05-02 15:46:52 +00:00
|
|
|
stack_height = c->end_label->target_stack_height + c->exit_arity;
|
2016-05-25 08:32:37 +00:00
|
|
|
control_stack.pop_back();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprBr: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BreakDepthImmediate<Decoder::kNoValidate> imm(&i, i.pc());
|
|
|
|
TRACE("control @%u: Br[depth=%u]\n", i.pc_offset(), imm.depth);
|
|
|
|
Control* c = &control_stack[control_stack.size() - imm.depth - 1];
|
2017-05-02 17:46:21 +00:00
|
|
|
if (!unreachable) c->end_label->Ref(i.pc(), stack_height);
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprBrIf: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BreakDepthImmediate<Decoder::kNoValidate> imm(&i, i.pc());
|
|
|
|
TRACE("control @%u: BrIf[depth=%u]\n", i.pc_offset(), imm.depth);
|
|
|
|
Control* c = &control_stack[control_stack.size() - imm.depth - 1];
|
2017-05-02 17:46:21 +00:00
|
|
|
if (!unreachable) c->end_label->Ref(i.pc(), stack_height);
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprBrTable: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BranchTableImmediate<Decoder::kNoValidate> imm(&i, i.pc());
|
|
|
|
BranchTableIterator<Decoder::kNoValidate> iterator(&i, imm);
|
2016-09-27 20:46:10 +00:00
|
|
|
TRACE("control @%u: BrTable[count=%u]\n", i.pc_offset(),
|
2018-05-03 11:59:06 +00:00
|
|
|
imm.table_count);
|
2017-05-02 17:46:21 +00:00
|
|
|
if (!unreachable) {
|
|
|
|
while (iterator.has_next()) {
|
|
|
|
uint32_t j = iterator.cur_index();
|
|
|
|
uint32_t target = iterator.next();
|
|
|
|
Control* c = &control_stack[control_stack.size() - target - 1];
|
|
|
|
c->end_label->Ref(i.pc() + j, stack_height);
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2017-05-02 17:46:21 +00:00
|
|
|
default:
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
2017-05-02 17:46:21 +00:00
|
|
|
}
|
|
|
|
if (WasmOpcodes::IsUnconditionalJump(opcode)) {
|
|
|
|
control_stack.back().unreachable = true;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
}
|
2017-04-25 09:43:39 +00:00
|
|
|
DCHECK_EQ(0, control_stack.size());
|
|
|
|
DCHECK_EQ(func_arity, stack_height);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-04-25 09:43:39 +00:00
|
|
|
ControlTransferEntry& Lookup(pc_t from) {
|
2016-05-25 08:32:37 +00:00
|
|
|
auto result = map_.find(from);
|
2017-04-25 09:43:39 +00:00
|
|
|
DCHECK(result != map_.end());
|
2016-05-25 08:32:37 +00:00
|
|
|
return result->second;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
struct ExternalCallResult {
|
|
|
|
enum Type {
|
|
|
|
// The function should be executed inside this interpreter.
|
|
|
|
INTERNAL,
|
|
|
|
// For indirect calls: Table or function does not exist.
|
|
|
|
INVALID_FUNC,
|
|
|
|
// For indirect calls: Signature does not match expected signature.
|
|
|
|
SIGNATURE_MISMATCH,
|
|
|
|
// The function was executed and returned normally.
|
|
|
|
EXTERNAL_RETURNED,
|
|
|
|
// The function was executed, threw an exception, and the stack was unwound.
|
|
|
|
EXTERNAL_UNWOUND
|
|
|
|
};
|
|
|
|
Type type;
|
|
|
|
// If type is INTERNAL, this field holds the function to call internally.
|
|
|
|
InterpreterCode* interpreter_code;
|
|
|
|
|
|
|
|
ExternalCallResult(Type type) : type(type) { // NOLINT
|
|
|
|
DCHECK_NE(INTERNAL, type);
|
|
|
|
}
|
|
|
|
ExternalCallResult(Type type, InterpreterCode* code)
|
|
|
|
: type(type), interpreter_code(code) {
|
|
|
|
DCHECK_EQ(INTERNAL, type);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
// The main storage for interpreter code. It maps {WasmFunction} to the
|
|
|
|
// metadata needed to execute each function.
|
|
|
|
class CodeMap {
|
|
|
|
Zone* zone_;
|
|
|
|
const WasmModule* module_;
|
|
|
|
ZoneVector<InterpreterCode> interpreter_code_;
|
2018-02-21 11:42:57 +00:00
|
|
|
// TODO(wasm): Remove this testing wart. It is needed because interpreter
|
|
|
|
// entry stubs are not generated in testing the interpreter in cctests.
|
|
|
|
bool call_indirect_through_module_ = false;
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
public:
|
|
|
|
CodeMap(Isolate* isolate, const WasmModule* module,
|
|
|
|
const uint8_t* module_start, Zone* zone)
|
2017-07-14 13:58:25 +00:00
|
|
|
: zone_(zone), module_(module), interpreter_code_(zone) {
|
2016-05-25 08:32:37 +00:00
|
|
|
if (module == nullptr) return;
|
2017-03-14 15:54:43 +00:00
|
|
|
interpreter_code_.reserve(module->functions.size());
|
|
|
|
for (const WasmFunction& function : module->functions) {
|
|
|
|
if (function.imported) {
|
2017-06-12 11:59:14 +00:00
|
|
|
DCHECK(!function.code.is_set());
|
2017-03-14 15:54:43 +00:00
|
|
|
AddFunction(&function, nullptr, nullptr);
|
|
|
|
} else {
|
2017-06-12 11:59:14 +00:00
|
|
|
AddFunction(&function, module_start + function.code.offset(),
|
|
|
|
module_start + function.code.end_offset());
|
2017-03-14 15:54:43 +00:00
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2017-03-15 15:57:02 +00:00
|
|
|
}
|
|
|
|
|
2018-02-21 11:42:57 +00:00
|
|
|
bool call_indirect_through_module() { return call_indirect_through_module_; }
|
|
|
|
|
|
|
|
void set_call_indirect_through_module(bool val) {
|
|
|
|
call_indirect_through_module_ = val;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
const WasmModule* module() const { return module_; }
|
2017-11-28 22:25:36 +00:00
|
|
|
|
2017-03-14 15:54:43 +00:00
|
|
|
InterpreterCode* GetCode(const WasmFunction* function) {
|
|
|
|
InterpreterCode* code = GetCode(function->func_index);
|
|
|
|
DCHECK_EQ(function, code->function);
|
|
|
|
return code;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
InterpreterCode* GetCode(uint32_t function_index) {
|
2017-03-14 15:54:43 +00:00
|
|
|
DCHECK_LT(function_index, interpreter_code_.size());
|
2016-05-25 08:32:37 +00:00
|
|
|
return Preprocess(&interpreter_code_[function_index]);
|
|
|
|
}
|
|
|
|
|
2016-07-28 04:56:56 +00:00
|
|
|
InterpreterCode* GetIndirectCode(uint32_t table_index, uint32_t entry_index) {
|
2018-01-18 17:18:54 +00:00
|
|
|
uint32_t saved_index;
|
|
|
|
USE(saved_index);
|
2018-07-10 12:14:06 +00:00
|
|
|
if (table_index >= module_->tables.size()) return nullptr;
|
2018-01-18 17:18:54 +00:00
|
|
|
// Mask table index for SSCA mitigation.
|
|
|
|
saved_index = table_index;
|
2018-07-10 12:14:06 +00:00
|
|
|
table_index &= static_cast<int32_t>((table_index - module_->tables.size()) &
|
|
|
|
~static_cast<int32_t>(table_index)) >>
|
|
|
|
31;
|
2018-01-18 17:18:54 +00:00
|
|
|
DCHECK_EQ(table_index, saved_index);
|
2018-07-10 12:14:06 +00:00
|
|
|
const WasmTable* table = &module_->tables[table_index];
|
2016-07-28 04:56:56 +00:00
|
|
|
if (entry_index >= table->values.size()) return nullptr;
|
2018-01-18 17:18:54 +00:00
|
|
|
// Mask entry_index for SSCA mitigation.
|
|
|
|
saved_index = entry_index;
|
|
|
|
entry_index &= static_cast<int32_t>((entry_index - table->values.size()) &
|
|
|
|
~static_cast<int32_t>(entry_index)) >>
|
|
|
|
31;
|
|
|
|
DCHECK_EQ(entry_index, saved_index);
|
2016-07-28 04:56:56 +00:00
|
|
|
uint32_t index = table->values[entry_index];
|
2016-05-25 08:32:37 +00:00
|
|
|
if (index >= interpreter_code_.size()) return nullptr;
|
2018-01-18 17:18:54 +00:00
|
|
|
// Mask index for SSCA mitigation.
|
|
|
|
saved_index = index;
|
|
|
|
index &= static_cast<int32_t>((index - interpreter_code_.size()) &
|
|
|
|
~static_cast<int32_t>(index)) >>
|
|
|
|
31;
|
|
|
|
DCHECK_EQ(index, saved_index);
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
return GetCode(index);
|
|
|
|
}
|
|
|
|
|
|
|
|
InterpreterCode* Preprocess(InterpreterCode* code) {
|
2017-03-14 15:54:43 +00:00
|
|
|
DCHECK_EQ(code->function->imported, code->start == nullptr);
|
2017-04-26 17:41:26 +00:00
|
|
|
if (!code->side_table && code->start) {
|
2016-05-25 08:32:37 +00:00
|
|
|
// Compute the control targets map and the local declarations.
|
2017-04-26 17:41:26 +00:00
|
|
|
code->side_table = new (zone_) SideTable(zone_, module_, code);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
return code;
|
|
|
|
}
|
|
|
|
|
2017-03-14 15:54:43 +00:00
|
|
|
void AddFunction(const WasmFunction* function, const byte* code_start,
|
|
|
|
const byte* code_end) {
|
2016-05-25 08:32:37 +00:00
|
|
|
InterpreterCode code = {
|
2016-12-21 12:42:06 +00:00
|
|
|
function, BodyLocalDecls(zone_), code_start,
|
2016-05-25 08:32:37 +00:00
|
|
|
code_end, const_cast<byte*>(code_start), const_cast<byte*>(code_end),
|
|
|
|
nullptr};
|
|
|
|
|
|
|
|
DCHECK_EQ(interpreter_code_.size(), function->func_index);
|
|
|
|
interpreter_code_.push_back(code);
|
|
|
|
}
|
|
|
|
|
2017-03-14 15:54:43 +00:00
|
|
|
void SetFunctionCode(const WasmFunction* function, const byte* start,
|
2016-05-25 08:32:37 +00:00
|
|
|
const byte* end) {
|
2017-03-14 15:54:43 +00:00
|
|
|
DCHECK_LT(function->func_index, interpreter_code_.size());
|
|
|
|
InterpreterCode* code = &interpreter_code_[function->func_index];
|
|
|
|
DCHECK_EQ(function, code->function);
|
2016-05-25 08:32:37 +00:00
|
|
|
code->orig_start = start;
|
|
|
|
code->orig_end = end;
|
|
|
|
code->start = const_cast<byte*>(start);
|
|
|
|
code->end = const_cast<byte*>(end);
|
2017-04-26 17:41:26 +00:00
|
|
|
code->side_table = nullptr;
|
2016-05-25 08:32:37 +00:00
|
|
|
Preprocess(code);
|
|
|
|
}
|
2017-03-23 09:46:16 +00:00
|
|
|
};
|
2017-03-15 15:57:02 +00:00
|
|
|
|
2017-10-24 12:28:30 +00:00
|
|
|
// Like a static_cast from src to dst, but specialized for boxed floats.
|
|
|
|
template <typename dst, typename src>
|
|
|
|
struct converter {
|
|
|
|
dst operator()(src val) const { return static_cast<dst>(val); }
|
|
|
|
};
|
|
|
|
template <>
|
|
|
|
struct converter<Float64, uint64_t> {
|
|
|
|
Float64 operator()(uint64_t val) const { return Float64::FromBits(val); }
|
|
|
|
};
|
|
|
|
template <>
|
|
|
|
struct converter<Float32, uint32_t> {
|
|
|
|
Float32 operator()(uint32_t val) const { return Float32::FromBits(val); }
|
|
|
|
};
|
|
|
|
template <>
|
|
|
|
struct converter<uint64_t, Float64> {
|
|
|
|
uint64_t operator()(Float64 val) const { return val.get_bits(); }
|
|
|
|
};
|
|
|
|
template <>
|
|
|
|
struct converter<uint32_t, Float32> {
|
|
|
|
uint32_t operator()(Float32 val) const { return val.get_bits(); }
|
|
|
|
};
|
|
|
|
|
2017-10-26 07:45:12 +00:00
|
|
|
template <typename T>
|
|
|
|
V8_INLINE bool has_nondeterminism(T val) {
|
|
|
|
static_assert(!std::is_floating_point<T>::value, "missing specialization");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
template <>
|
|
|
|
V8_INLINE bool has_nondeterminism<float>(float val) {
|
|
|
|
return std::isnan(val);
|
|
|
|
}
|
|
|
|
template <>
|
|
|
|
V8_INLINE bool has_nondeterminism<double>(double val) {
|
|
|
|
return std::isnan(val);
|
|
|
|
}
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
// Responsible for executing code directly.
|
2017-01-18 11:40:29 +00:00
|
|
|
class ThreadImpl {
|
2017-03-21 10:54:14 +00:00
|
|
|
struct Activation {
|
|
|
|
uint32_t fp;
|
2017-04-26 17:41:26 +00:00
|
|
|
sp_t sp;
|
|
|
|
Activation(uint32_t fp, sp_t sp) : fp(fp), sp(sp) {}
|
2017-03-21 10:54:14 +00:00
|
|
|
};
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
public:
|
2018-04-06 10:18:18 +00:00
|
|
|
ThreadImpl(Zone* zone, CodeMap* codemap,
|
|
|
|
Handle<WasmInstanceObject> instance_object)
|
2016-05-25 08:32:37 +00:00
|
|
|
: codemap_(codemap),
|
2018-04-06 10:18:18 +00:00
|
|
|
instance_object_(instance_object),
|
2016-05-25 08:32:37 +00:00
|
|
|
frames_(zone),
|
2017-03-21 10:54:14 +00:00
|
|
|
activations_(zone) {}
|
2016-05-25 08:32:37 +00:00
|
|
|
|
|
|
|
//==========================================================================
|
|
|
|
// Implementation of public interface for WasmInterpreter::Thread.
|
|
|
|
//==========================================================================
|
|
|
|
|
2017-01-18 10:23:20 +00:00
|
|
|
WasmInterpreter::State state() { return state_; }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
void InitFrame(const WasmFunction* function, WasmValue* args) {
|
2017-03-21 10:54:14 +00:00
|
|
|
DCHECK_EQ(current_activation().fp, frames_.size());
|
2017-03-14 15:54:43 +00:00
|
|
|
InterpreterCode* code = codemap()->GetCode(function);
|
2017-04-26 17:41:26 +00:00
|
|
|
size_t num_params = function->sig->parameter_count();
|
|
|
|
EnsureStackSpace(num_params);
|
|
|
|
Push(args, num_params);
|
2017-03-14 15:54:43 +00:00
|
|
|
PushFrame(code);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-04-06 13:32:36 +00:00
|
|
|
WasmInterpreter::State Run(int num_steps = -1) {
|
2017-03-14 15:54:43 +00:00
|
|
|
DCHECK(state_ == WasmInterpreter::STOPPED ||
|
|
|
|
state_ == WasmInterpreter::PAUSED);
|
2017-04-06 13:32:36 +00:00
|
|
|
DCHECK(num_steps == -1 || num_steps > 0);
|
|
|
|
if (num_steps == -1) {
|
2016-05-30 10:02:34 +00:00
|
|
|
TRACE(" => Run()\n");
|
2017-04-06 13:32:36 +00:00
|
|
|
} else if (num_steps == 1) {
|
|
|
|
TRACE(" => Step()\n");
|
|
|
|
} else {
|
|
|
|
TRACE(" => Run(%d)\n", num_steps);
|
|
|
|
}
|
|
|
|
state_ = WasmInterpreter::RUNNING;
|
|
|
|
Execute(frames_.back().code, frames_.back().pc, num_steps);
|
2017-03-31 09:23:22 +00:00
|
|
|
// If state_ is STOPPED, the current activation must be fully unwound.
|
|
|
|
DCHECK_IMPLIES(state_ == WasmInterpreter::STOPPED,
|
|
|
|
current_activation().fp == frames_.size());
|
2016-05-25 08:32:37 +00:00
|
|
|
return state_;
|
|
|
|
}
|
|
|
|
|
2017-01-18 10:23:20 +00:00
|
|
|
void Pause() { UNIMPLEMENTED(); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-01-18 10:23:20 +00:00
|
|
|
void Reset() {
|
2016-05-25 08:32:37 +00:00
|
|
|
TRACE("----- RESET -----\n");
|
2018-07-05 09:00:20 +00:00
|
|
|
sp_ = stack_.get();
|
2016-05-25 08:32:37 +00:00
|
|
|
frames_.clear();
|
|
|
|
state_ = WasmInterpreter::STOPPED;
|
|
|
|
trap_reason_ = kTrapCount;
|
2016-10-20 14:27:23 +00:00
|
|
|
possible_nondeterminism_ = false;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-01-20 12:58:14 +00:00
|
|
|
int GetFrameCount() {
|
|
|
|
DCHECK_GE(kMaxInt, frames_.size());
|
|
|
|
return static_cast<int>(frames_.size());
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue GetReturnValue(uint32_t index) {
|
2017-12-02 00:30:37 +00:00
|
|
|
if (state_ == WasmInterpreter::TRAPPED) return WasmValue(0xDEADBEEF);
|
2017-03-21 10:54:14 +00:00
|
|
|
DCHECK_EQ(WasmInterpreter::FINISHED, state_);
|
|
|
|
Activation act = current_activation();
|
|
|
|
// Current activation must be finished.
|
|
|
|
DCHECK_EQ(act.fp, frames_.size());
|
2017-04-26 17:41:26 +00:00
|
|
|
return GetStackValue(act.sp + index);
|
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue GetStackValue(sp_t index) {
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_GT(StackHeight(), index);
|
2018-07-05 09:00:20 +00:00
|
|
|
return stack_[index];
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
void SetStackValue(sp_t index, WasmValue value) {
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_GT(StackHeight(), index);
|
2018-07-05 09:00:20 +00:00
|
|
|
stack_[index] = value;
|
2017-04-11 13:04:13 +00:00
|
|
|
}
|
|
|
|
|
2017-03-16 11:54:31 +00:00
|
|
|
TrapReason GetTrapReason() { return trap_reason_; }
|
|
|
|
|
2017-01-18 10:23:20 +00:00
|
|
|
pc_t GetBreakpointPc() { return break_pc_; }
|
2016-05-30 10:02:34 +00:00
|
|
|
|
2017-01-18 10:23:20 +00:00
|
|
|
bool PossibleNondeterminism() { return possible_nondeterminism_; }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-02-21 18:21:31 +00:00
|
|
|
uint64_t NumInterpretedCalls() { return num_interpreted_calls_; }
|
|
|
|
|
2017-01-24 10:13:33 +00:00
|
|
|
void AddBreakFlags(uint8_t flags) { break_flags_ |= flags; }
|
|
|
|
|
|
|
|
void ClearBreakFlags() { break_flags_ = WasmInterpreter::BreakFlag::None; }
|
|
|
|
|
2017-03-21 10:54:14 +00:00
|
|
|
uint32_t NumActivations() {
|
|
|
|
return static_cast<uint32_t>(activations_.size());
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t StartActivation() {
|
|
|
|
TRACE("----- START ACTIVATION %zu -----\n", activations_.size());
|
|
|
|
// If you use activations, use them consistently:
|
|
|
|
DCHECK_IMPLIES(activations_.empty(), frames_.empty());
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_IMPLIES(activations_.empty(), StackHeight() == 0);
|
2017-03-21 10:54:14 +00:00
|
|
|
uint32_t activation_id = static_cast<uint32_t>(activations_.size());
|
|
|
|
activations_.emplace_back(static_cast<uint32_t>(frames_.size()),
|
2017-04-26 17:41:26 +00:00
|
|
|
StackHeight());
|
2017-03-21 10:54:14 +00:00
|
|
|
state_ = WasmInterpreter::STOPPED;
|
|
|
|
return activation_id;
|
|
|
|
}
|
|
|
|
|
|
|
|
void FinishActivation(uint32_t id) {
|
|
|
|
TRACE("----- FINISH ACTIVATION %zu -----\n", activations_.size() - 1);
|
|
|
|
DCHECK_LT(0, activations_.size());
|
|
|
|
DCHECK_EQ(activations_.size() - 1, id);
|
|
|
|
// Stack height must match the start of this activation (otherwise unwind
|
|
|
|
// first).
|
|
|
|
DCHECK_EQ(activations_.back().fp, frames_.size());
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_LE(activations_.back().sp, StackHeight());
|
2018-07-05 09:00:20 +00:00
|
|
|
sp_ = stack_.get() + activations_.back().sp;
|
2017-03-21 10:54:14 +00:00
|
|
|
activations_.pop_back();
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t ActivationFrameBase(uint32_t id) {
|
|
|
|
DCHECK_GT(activations_.size(), id);
|
|
|
|
return activations_[id].fp;
|
|
|
|
}
|
|
|
|
|
2017-03-20 12:53:01 +00:00
|
|
|
// Handle a thrown exception. Returns whether the exception was handled inside
|
2017-03-21 10:54:14 +00:00
|
|
|
// the current activation. Unwinds the interpreted stack accordingly.
|
2017-03-20 12:53:01 +00:00
|
|
|
WasmInterpreter::Thread::ExceptionHandlingResult HandleException(
|
|
|
|
Isolate* isolate) {
|
|
|
|
DCHECK(isolate->has_pending_exception());
|
2017-10-25 09:29:26 +00:00
|
|
|
// TODO(wasm): Add wasm exception handling (would return HANDLED).
|
2017-03-20 12:53:01 +00:00
|
|
|
USE(isolate->pending_exception());
|
|
|
|
TRACE("----- UNWIND -----\n");
|
2017-03-21 10:54:14 +00:00
|
|
|
DCHECK_LT(0, activations_.size());
|
|
|
|
Activation& act = activations_.back();
|
|
|
|
DCHECK_LE(act.fp, frames_.size());
|
|
|
|
frames_.resize(act.fp);
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_LE(act.sp, StackHeight());
|
2018-07-05 09:00:20 +00:00
|
|
|
sp_ = stack_.get() + act.sp;
|
2017-03-20 12:53:01 +00:00
|
|
|
state_ = WasmInterpreter::STOPPED;
|
|
|
|
return WasmInterpreter::Thread::UNWOUND;
|
|
|
|
}
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
private:
|
|
|
|
// Entries on the stack of functions being evaluated.
|
|
|
|
struct Frame {
|
|
|
|
InterpreterCode* code;
|
2017-03-14 10:46:18 +00:00
|
|
|
pc_t pc;
|
2016-05-25 08:32:37 +00:00
|
|
|
sp_t sp;
|
|
|
|
|
|
|
|
// Limit of parameters.
|
|
|
|
sp_t plimit() { return sp + code->function->sig->parameter_count(); }
|
|
|
|
// Limit of locals.
|
2017-01-06 22:24:56 +00:00
|
|
|
sp_t llimit() { return plimit() + code->locals.type_list.size(); }
|
2016-05-25 08:32:37 +00:00
|
|
|
};
|
|
|
|
|
2016-09-27 20:46:10 +00:00
|
|
|
struct Block {
|
|
|
|
pc_t pc;
|
|
|
|
sp_t sp;
|
|
|
|
size_t fp;
|
|
|
|
unsigned arity;
|
|
|
|
};
|
|
|
|
|
2017-04-11 13:04:13 +00:00
|
|
|
friend class InterpretedFrameImpl;
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
CodeMap* codemap_;
|
2018-04-06 10:18:18 +00:00
|
|
|
Handle<WasmInstanceObject> instance_object_;
|
2018-07-05 09:00:20 +00:00
|
|
|
std::unique_ptr<WasmValue[]> stack_;
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue* stack_limit_ = nullptr; // End of allocated stack space.
|
|
|
|
WasmValue* sp_ = nullptr; // Current stack pointer.
|
2016-05-25 08:32:37 +00:00
|
|
|
ZoneVector<Frame> frames_;
|
2017-01-24 10:13:33 +00:00
|
|
|
WasmInterpreter::State state_ = WasmInterpreter::STOPPED;
|
|
|
|
pc_t break_pc_ = kInvalidPc;
|
|
|
|
TrapReason trap_reason_ = kTrapCount;
|
|
|
|
bool possible_nondeterminism_ = false;
|
|
|
|
uint8_t break_flags_ = 0; // a combination of WasmInterpreter::BreakFlag
|
2017-02-21 18:21:31 +00:00
|
|
|
uint64_t num_interpreted_calls_ = 0;
|
2017-03-21 10:54:14 +00:00
|
|
|
// Store the stack height of each activation (for unwind and frame
|
|
|
|
// inspection).
|
|
|
|
ZoneVector<Activation> activations_;
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-08-07 17:17:06 +00:00
|
|
|
CodeMap* codemap() const { return codemap_; }
|
|
|
|
const WasmModule* module() const { return codemap_->module(); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
|
|
|
void DoTrap(TrapReason trap, pc_t pc) {
|
|
|
|
state_ = WasmInterpreter::TRAPPED;
|
|
|
|
trap_reason_ = trap;
|
|
|
|
CommitPc(pc);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Push a frame with arguments already on the stack.
|
2017-03-14 15:54:43 +00:00
|
|
|
void PushFrame(InterpreterCode* code) {
|
|
|
|
DCHECK_NOT_NULL(code);
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_NOT_NULL(code->side_table);
|
|
|
|
EnsureStackSpace(code->side_table->max_stack_height_ +
|
|
|
|
code->locals.type_list.size());
|
|
|
|
|
2017-02-21 18:21:31 +00:00
|
|
|
++num_interpreted_calls_;
|
2016-05-25 08:32:37 +00:00
|
|
|
size_t arity = code->function->sig->parameter_count();
|
|
|
|
// The parameters will overlap the arguments already on the stack.
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_GE(StackHeight(), arity);
|
|
|
|
frames_.push_back({code, 0, StackHeight() - arity});
|
2017-03-14 10:46:18 +00:00
|
|
|
frames_.back().pc = InitLocals(code);
|
2017-03-14 15:54:43 +00:00
|
|
|
TRACE(" => PushFrame #%zu (#%u @%zu)\n", frames_.size() - 1,
|
|
|
|
code->function->func_index, frames_.back().pc);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pc_t InitLocals(InterpreterCode* code) {
|
2017-01-06 22:24:56 +00:00
|
|
|
for (auto p : code->locals.type_list) {
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val;
|
2017-01-06 22:24:56 +00:00
|
|
|
switch (p) {
|
2018-05-18 21:47:59 +00:00
|
|
|
#define CASE_TYPE(wasm, ctype) \
|
|
|
|
case kWasm##wasm: \
|
|
|
|
val = WasmValue(ctype{}); \
|
2017-03-15 15:57:02 +00:00
|
|
|
break;
|
|
|
|
WASM_CTYPES(CASE_TYPE)
|
|
|
|
#undef CASE_TYPE
|
2016-05-25 08:32:37 +00:00
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
break;
|
|
|
|
}
|
2017-04-26 17:41:26 +00:00
|
|
|
Push(val);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2017-01-06 22:24:56 +00:00
|
|
|
return code->locals.encoded_size;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void CommitPc(pc_t pc) {
|
2017-03-14 15:54:43 +00:00
|
|
|
DCHECK(!frames_.empty());
|
|
|
|
frames_.back().pc = pc;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
|
2016-05-30 10:02:34 +00:00
|
|
|
if (pc == break_pc_) {
|
2016-09-27 20:46:10 +00:00
|
|
|
// Skip the previously hit breakpoint when resuming.
|
2016-05-30 10:02:34 +00:00
|
|
|
break_pc_ = kInvalidPc;
|
|
|
|
return true;
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2017-04-25 09:43:39 +00:00
|
|
|
int LookupTargetDelta(InterpreterCode* code, pc_t pc) {
|
2017-04-26 17:41:26 +00:00
|
|
|
return static_cast<int>(code->side_table->Lookup(pc).pc_diff);
|
2016-09-27 20:46:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int DoBreak(InterpreterCode* code, pc_t pc, size_t depth) {
|
2017-04-26 17:41:26 +00:00
|
|
|
ControlTransferEntry& control_transfer_entry = code->side_table->Lookup(pc);
|
|
|
|
DoStackTransfer(sp_ - control_transfer_entry.sp_diff,
|
2017-04-25 09:43:39 +00:00
|
|
|
control_transfer_entry.target_arity);
|
|
|
|
return control_transfer_entry.pc_diff;
|
2016-09-27 20:46:10 +00:00
|
|
|
}
|
|
|
|
|
2017-03-14 10:46:18 +00:00
|
|
|
pc_t ReturnPc(Decoder* decoder, InterpreterCode* code, pc_t pc) {
|
|
|
|
switch (code->orig_start[pc]) {
|
|
|
|
case kExprCallFunction: {
|
2018-05-03 11:59:06 +00:00
|
|
|
CallFunctionImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
|
|
|
|
return pc + 1 + imm.length;
|
2017-03-14 10:46:18 +00:00
|
|
|
}
|
|
|
|
case kExprCallIndirect: {
|
2018-05-03 11:59:06 +00:00
|
|
|
CallIndirectImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
|
|
|
|
return pc + 1 + imm.length;
|
2017-03-14 10:46:18 +00:00
|
|
|
}
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool DoReturn(Decoder* decoder, InterpreterCode** code, pc_t* pc, pc_t* limit,
|
|
|
|
size_t arity) {
|
[base] Define CHECK comparison for signed vs. unsigned
The current CHECK/DCHECK implementation fails statically if a signed
value is compared against an unsigned value. The common solution is to
cast on each caller, which is tedious and error-prone (might hide bugs).
This CL implements signed vs. unsigned comparisons by executing up to
two comparisons. For example, if i is int32_t and u is uint_32_t, a
DCHECK_LE(i, u) would create the check
i <= 0 || static_cast<uint32_t>(i) <= u.
For checks against constants, at least one of the checks can be removed
by compiler optimizations.
The tradeoff we have to make is to sometimes silently execute an
additional comparison. And we increase code complexity of course, even
though the usage is just as easy (or even easier) as before.
The compile time impact seems to be minimal:
I ran 3 full compilations for Optdebug on my local machine, one time on
the current ToT, one time with this CL plus http://crrev.com/2524093002.
Before: 143.72 +- 1.21 seconds
Now: 144.18 +- 0.67 seconds
In order to check that the new comparisons are working, I refactored
some DCHECKs in wasm to use the new magic, and added unit test cases.
R=ishell@chromium.org, titzer@chromium.org
CC=ahaas@chromium.org, bmeurer@chromium.org
Committed: https://crrev.com/5925074a9dab5a8577766545b91b62f2c531d3dc
Review-Url: https://codereview.chromium.org/2526783002
Cr-Original-Commit-Position: refs/heads/master@{#41275}
Cr-Commit-Position: refs/heads/master@{#41411}
2016-12-01 08:52:31 +00:00
|
|
|
DCHECK_GT(frames_.size(), 0);
|
2018-07-05 09:00:20 +00:00
|
|
|
WasmValue* sp_dest = stack_.get() + frames_.back().sp;
|
2016-05-25 08:32:37 +00:00
|
|
|
frames_.pop_back();
|
2017-03-21 10:54:14 +00:00
|
|
|
if (frames_.size() == current_activation().fp) {
|
2016-09-27 20:46:10 +00:00
|
|
|
// A return from the last frame terminates the execution.
|
2016-05-25 08:32:37 +00:00
|
|
|
state_ = WasmInterpreter::FINISHED;
|
2017-04-26 17:41:26 +00:00
|
|
|
DoStackTransfer(sp_dest, arity);
|
2016-05-25 08:32:37 +00:00
|
|
|
TRACE(" => finish\n");
|
|
|
|
return false;
|
|
|
|
} else {
|
|
|
|
// Return to caller frame.
|
|
|
|
Frame* top = &frames_.back();
|
|
|
|
*code = top->code;
|
2017-03-14 10:46:18 +00:00
|
|
|
decoder->Reset((*code)->start, (*code)->end);
|
|
|
|
*pc = ReturnPc(decoder, *code, top->pc);
|
2016-05-25 08:32:37 +00:00
|
|
|
*limit = top->code->end - top->code->start;
|
2017-03-14 15:54:43 +00:00
|
|
|
TRACE(" => Return to #%zu (#%u @%zu)\n", frames_.size() - 1,
|
|
|
|
(*code)->function->func_index, *pc);
|
2017-04-26 17:41:26 +00:00
|
|
|
DoStackTransfer(sp_dest, arity);
|
2016-05-25 08:32:37 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-31 08:29:02 +00:00
|
|
|
// Returns true if the call was successful, false if the stack check failed
|
|
|
|
// and the current activation was fully unwound.
|
|
|
|
bool DoCall(Decoder* decoder, InterpreterCode* target, pc_t* pc,
|
2018-04-06 09:37:52 +00:00
|
|
|
pc_t* limit) V8_WARN_UNUSED_RESULT {
|
2017-03-14 15:54:43 +00:00
|
|
|
frames_.back().pc = *pc;
|
|
|
|
PushFrame(target);
|
2017-03-31 08:29:02 +00:00
|
|
|
if (!DoStackCheck()) return false;
|
2017-03-14 10:46:18 +00:00
|
|
|
*pc = frames_.back().pc;
|
2016-05-25 08:32:37 +00:00
|
|
|
*limit = target->end - target->start;
|
2017-03-14 10:46:18 +00:00
|
|
|
decoder->Reset(target->start, target->end);
|
2017-03-31 08:29:02 +00:00
|
|
|
return true;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2016-09-27 20:46:10 +00:00
|
|
|
// Copies {arity} values on the top of the stack down the stack to {dest},
|
|
|
|
// dropping the values in-between.
|
2017-07-14 13:49:01 +00:00
|
|
|
void DoStackTransfer(WasmValue* dest, size_t arity) {
|
2016-09-27 20:46:10 +00:00
|
|
|
// before: |---------------| pop_count | arity |
|
2017-04-26 17:41:26 +00:00
|
|
|
// ^ 0 ^ dest ^ sp_
|
2016-09-27 20:46:10 +00:00
|
|
|
//
|
|
|
|
// after: |---------------| arity |
|
2017-04-26 17:41:26 +00:00
|
|
|
// ^ 0 ^ sp_
|
|
|
|
DCHECK_LE(dest, sp_);
|
|
|
|
DCHECK_LE(dest + arity, sp_);
|
2017-10-27 10:39:27 +00:00
|
|
|
if (arity) memmove(dest, sp_ - arity, arity * sizeof(*sp_));
|
2017-04-26 17:41:26 +00:00
|
|
|
sp_ = dest + arity;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-04-03 07:44:47 +00:00
|
|
|
template <typename mtype>
|
2018-04-13 22:28:05 +00:00
|
|
|
inline Address BoundsCheckMem(uint32_t offset, uint32_t index) {
|
2018-04-06 10:18:18 +00:00
|
|
|
size_t mem_size = instance_object_->memory_size();
|
2018-04-13 22:28:05 +00:00
|
|
|
if (sizeof(mtype) > mem_size) return kNullAddress;
|
|
|
|
if (offset > (mem_size - sizeof(mtype))) return kNullAddress;
|
|
|
|
if (index > (mem_size - sizeof(mtype) - offset)) return kNullAddress;
|
2017-12-18 13:04:30 +00:00
|
|
|
// Compute the effective address of the access, making sure to condition
|
|
|
|
// the index even in the in-bounds case.
|
2018-04-13 22:28:05 +00:00
|
|
|
return reinterpret_cast<Address>(instance_object_->memory_start()) +
|
|
|
|
offset + (index & instance_object_->memory_mask());
|
2017-04-03 07:44:47 +00:00
|
|
|
}
|
|
|
|
|
2017-02-03 09:51:04 +00:00
|
|
|
template <typename ctype, typename mtype>
|
2017-10-02 07:39:30 +00:00
|
|
|
bool ExecuteLoad(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len,
|
|
|
|
MachineRepresentation rep) {
|
2018-05-03 11:59:06 +00:00
|
|
|
MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc),
|
|
|
|
sizeof(ctype));
|
2017-02-03 09:51:04 +00:00
|
|
|
uint32_t index = Pop().to<uint32_t>();
|
2018-05-03 11:59:06 +00:00
|
|
|
Address addr = BoundsCheckMem<mtype>(imm.offset, index);
|
2017-12-18 13:04:30 +00:00
|
|
|
if (!addr) {
|
2017-02-03 09:51:04 +00:00
|
|
|
DoTrap(kTrapMemOutOfBounds, pc);
|
|
|
|
return false;
|
|
|
|
}
|
2017-10-24 12:28:30 +00:00
|
|
|
WasmValue result(
|
|
|
|
converter<ctype, mtype>{}(ReadLittleEndianValue<mtype>(addr)));
|
2017-02-03 09:51:04 +00:00
|
|
|
|
2017-04-26 17:41:26 +00:00
|
|
|
Push(result);
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2017-10-02 07:39:30 +00:00
|
|
|
|
|
|
|
if (FLAG_wasm_trace_memory) {
|
2018-05-03 11:59:06 +00:00
|
|
|
wasm::MemoryTracingInfo info(imm.offset + index, false, rep);
|
2018-01-12 17:46:03 +00:00
|
|
|
TraceMemoryOperation(ExecutionEngine::kInterpreter, &info,
|
|
|
|
code->function->func_index, static_cast<int>(pc),
|
2018-04-06 10:18:18 +00:00
|
|
|
instance_object_->memory_start());
|
2017-10-02 07:39:30 +00:00
|
|
|
}
|
|
|
|
|
2017-02-03 09:51:04 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <typename ctype, typename mtype>
|
2017-10-02 07:39:30 +00:00
|
|
|
bool ExecuteStore(Decoder* decoder, InterpreterCode* code, pc_t pc, int& len,
|
|
|
|
MachineRepresentation rep) {
|
2018-05-03 11:59:06 +00:00
|
|
|
MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc),
|
|
|
|
sizeof(ctype));
|
2017-10-26 07:45:12 +00:00
|
|
|
ctype val = Pop().to<ctype>();
|
2017-02-03 09:51:04 +00:00
|
|
|
|
|
|
|
uint32_t index = Pop().to<uint32_t>();
|
2018-05-03 11:59:06 +00:00
|
|
|
Address addr = BoundsCheckMem<mtype>(imm.offset, index);
|
2017-12-18 13:04:30 +00:00
|
|
|
if (!addr) {
|
2017-02-03 09:51:04 +00:00
|
|
|
DoTrap(kTrapMemOutOfBounds, pc);
|
|
|
|
return false;
|
|
|
|
}
|
2017-10-26 07:45:12 +00:00
|
|
|
WriteLittleEndianValue<mtype>(addr, converter<mtype, ctype>{}(val));
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2017-02-03 09:51:04 +00:00
|
|
|
|
2017-10-02 07:39:30 +00:00
|
|
|
if (FLAG_wasm_trace_memory) {
|
2018-05-03 11:59:06 +00:00
|
|
|
wasm::MemoryTracingInfo info(imm.offset + index, true, rep);
|
2018-01-12 17:46:03 +00:00
|
|
|
TraceMemoryOperation(ExecutionEngine::kInterpreter, &info,
|
|
|
|
code->function->func_index, static_cast<int>(pc),
|
2018-04-06 10:18:18 +00:00
|
|
|
instance_object_->memory_start());
|
2017-10-02 07:39:30 +00:00
|
|
|
}
|
|
|
|
|
2017-02-03 09:51:04 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-07-10 20:01:04 +00:00
|
|
|
template <typename type>
|
2017-12-01 22:31:45 +00:00
|
|
|
bool ExtractAtomicOpParams(Decoder* decoder, InterpreterCode* code,
|
|
|
|
Address& address, pc_t pc, int& len,
|
2018-04-26 09:58:33 +00:00
|
|
|
type* val = nullptr, type* val2 = nullptr) {
|
2018-05-03 11:59:06 +00:00
|
|
|
MemoryAccessImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc + 1),
|
|
|
|
sizeof(type));
|
2018-07-10 20:01:04 +00:00
|
|
|
if (val2) *val2 = Pop().to<uint32_t>();
|
|
|
|
if (val) *val = Pop().to<uint32_t>();
|
2017-11-04 01:03:03 +00:00
|
|
|
uint32_t index = Pop().to<uint32_t>();
|
2018-05-03 11:59:06 +00:00
|
|
|
address = BoundsCheckMem<type>(imm.offset, index);
|
2017-12-18 13:04:30 +00:00
|
|
|
if (!address) {
|
2017-11-04 01:03:03 +00:00
|
|
|
DoTrap(kTrapMemOutOfBounds, pc);
|
|
|
|
return false;
|
|
|
|
}
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 2 + imm.length;
|
2017-11-04 01:03:03 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2017-12-27 18:54:38 +00:00
|
|
|
bool ExecuteNumericOp(WasmOpcode opcode, Decoder* decoder,
|
|
|
|
InterpreterCode* code, pc_t pc, int& len) {
|
|
|
|
switch (opcode) {
|
2018-01-16 19:32:52 +00:00
|
|
|
case kExprI32SConvertSatF32:
|
|
|
|
Push(WasmValue(ExecuteConvertSaturate<int32_t>(Pop().to<float>())));
|
|
|
|
return true;
|
|
|
|
case kExprI32UConvertSatF32:
|
|
|
|
Push(WasmValue(ExecuteConvertSaturate<uint32_t>(Pop().to<float>())));
|
|
|
|
return true;
|
|
|
|
case kExprI32SConvertSatF64:
|
|
|
|
Push(WasmValue(ExecuteConvertSaturate<int32_t>(Pop().to<double>())));
|
|
|
|
return true;
|
|
|
|
case kExprI32UConvertSatF64:
|
|
|
|
Push(WasmValue(ExecuteConvertSaturate<uint32_t>(Pop().to<double>())));
|
2017-12-27 18:54:38 +00:00
|
|
|
return true;
|
2018-02-07 19:10:16 +00:00
|
|
|
case kExprI64SConvertSatF32:
|
2018-01-30 23:43:44 +00:00
|
|
|
Push(WasmValue(ExecuteI64SConvertSatF32(Pop().to<float>())));
|
|
|
|
return true;
|
2018-02-07 19:10:16 +00:00
|
|
|
case kExprI64UConvertSatF32:
|
|
|
|
Push(WasmValue(ExecuteI64UConvertSatF32(Pop().to<float>())));
|
|
|
|
return true;
|
|
|
|
case kExprI64SConvertSatF64:
|
|
|
|
Push(WasmValue(ExecuteI64SConvertSatF64(Pop().to<double>())));
|
|
|
|
return true;
|
|
|
|
case kExprI64UConvertSatF64:
|
|
|
|
Push(WasmValue(ExecuteI64UConvertSatF64(Pop().to<double>())));
|
|
|
|
return true;
|
2017-12-27 18:54:38 +00:00
|
|
|
default:
|
2018-02-13 10:18:54 +00:00
|
|
|
FATAL("Unknown or unimplemented opcode #%d:%s", code->start[pc],
|
|
|
|
OpcodeName(code->start[pc]));
|
2017-12-27 18:54:38 +00:00
|
|
|
UNREACHABLE();
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2017-11-04 01:03:03 +00:00
|
|
|
bool ExecuteAtomicOp(WasmOpcode opcode, Decoder* decoder,
|
|
|
|
InterpreterCode* code, pc_t pc, int& len) {
|
|
|
|
WasmValue result;
|
|
|
|
switch (opcode) {
|
2018-07-10 20:01:04 +00:00
|
|
|
#define ATOMIC_BINOP_CASE(name, type, operation) \
|
2017-12-01 22:31:45 +00:00
|
|
|
case kExpr##name: { \
|
|
|
|
type val; \
|
|
|
|
Address addr; \
|
2018-07-10 20:01:04 +00:00
|
|
|
if (!ExtractAtomicOpParams<type>(decoder, code, addr, pc, len, &val)) { \
|
2017-12-01 22:31:45 +00:00
|
|
|
return false; \
|
|
|
|
} \
|
|
|
|
static_assert(sizeof(std::atomic<type>) == sizeof(type), \
|
|
|
|
"Size mismatch for types std::atomic<" #type \
|
|
|
|
">, and " #type); \
|
2018-07-10 20:01:04 +00:00
|
|
|
result = WasmValue( \
|
|
|
|
std::operation(reinterpret_cast<std::atomic<type>*>(addr), val)); \
|
2017-12-01 22:31:45 +00:00
|
|
|
Push(result); \
|
|
|
|
break; \
|
2017-11-04 01:03:03 +00:00
|
|
|
}
|
2018-07-10 20:01:04 +00:00
|
|
|
ATOMIC_BINOP_CASE(I32AtomicAdd, uint32_t, atomic_fetch_add);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicAdd8U, uint8_t, atomic_fetch_add);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicAdd16U, uint16_t, atomic_fetch_add);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicSub, uint32_t, atomic_fetch_sub);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicSub8U, uint8_t, atomic_fetch_sub);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicSub16U, uint16_t, atomic_fetch_sub);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicAnd, uint32_t, atomic_fetch_and);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicAnd8U, uint8_t, atomic_fetch_and);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicAnd16U, uint16_t, atomic_fetch_and);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicOr, uint32_t, atomic_fetch_or);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicOr8U, uint8_t, atomic_fetch_or);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicOr16U, uint16_t, atomic_fetch_or);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicXor, uint32_t, atomic_fetch_xor);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicXor8U, uint8_t, atomic_fetch_xor);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicXor16U, uint16_t, atomic_fetch_xor);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicExchange, uint32_t, atomic_exchange);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicExchange8U, uint8_t, atomic_exchange);
|
|
|
|
ATOMIC_BINOP_CASE(I32AtomicExchange16U, uint16_t, atomic_exchange);
|
2017-11-04 01:03:03 +00:00
|
|
|
#undef ATOMIC_BINOP_CASE
|
2018-07-10 20:01:04 +00:00
|
|
|
#define ATOMIC_COMPARE_EXCHANGE_CASE(name, type) \
|
|
|
|
case kExpr##name: { \
|
|
|
|
type val; \
|
|
|
|
type val2; \
|
|
|
|
Address addr; \
|
|
|
|
if (!ExtractAtomicOpParams<type>(decoder, code, addr, pc, len, &val, \
|
|
|
|
&val2)) { \
|
|
|
|
return false; \
|
|
|
|
} \
|
|
|
|
static_assert(sizeof(std::atomic<type>) == sizeof(type), \
|
|
|
|
"Size mismatch for types std::atomic<" #type \
|
|
|
|
">, and " #type); \
|
|
|
|
std::atomic_compare_exchange_strong( \
|
|
|
|
reinterpret_cast<std::atomic<type>*>(addr), &val, val2); \
|
|
|
|
Push(WasmValue(val)); \
|
|
|
|
break; \
|
|
|
|
}
|
|
|
|
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange, uint32_t);
|
|
|
|
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange8U, uint8_t);
|
|
|
|
ATOMIC_COMPARE_EXCHANGE_CASE(I32AtomicCompareExchange16U, uint16_t);
|
2018-04-26 09:58:33 +00:00
|
|
|
#undef ATOMIC_COMPARE_EXCHANGE_CASE
|
2018-07-10 20:01:04 +00:00
|
|
|
#define ATOMIC_LOAD_CASE(name, type, operation) \
|
2017-12-01 22:31:45 +00:00
|
|
|
case kExpr##name: { \
|
|
|
|
Address addr; \
|
2018-07-10 20:01:04 +00:00
|
|
|
if (!ExtractAtomicOpParams<type>(decoder, code, addr, pc, len)) { \
|
2017-12-01 22:31:45 +00:00
|
|
|
return false; \
|
|
|
|
} \
|
|
|
|
static_assert(sizeof(std::atomic<type>) == sizeof(type), \
|
|
|
|
"Size mismatch for types std::atomic<" #type \
|
|
|
|
">, and " #type); \
|
2018-07-10 20:01:04 +00:00
|
|
|
result = \
|
|
|
|
WasmValue(std::operation(reinterpret_cast<std::atomic<type>*>(addr))); \
|
2017-12-01 22:31:45 +00:00
|
|
|
Push(result); \
|
|
|
|
break; \
|
|
|
|
}
|
2018-07-10 20:01:04 +00:00
|
|
|
ATOMIC_LOAD_CASE(I32AtomicLoad, uint32_t, atomic_load);
|
|
|
|
ATOMIC_LOAD_CASE(I32AtomicLoad8U, uint8_t, atomic_load);
|
|
|
|
ATOMIC_LOAD_CASE(I32AtomicLoad16U, uint16_t, atomic_load);
|
2017-12-01 22:31:45 +00:00
|
|
|
#undef ATOMIC_LOAD_CASE
|
2018-07-10 20:01:04 +00:00
|
|
|
#define ATOMIC_STORE_CASE(name, type, operation) \
|
2017-12-01 22:31:45 +00:00
|
|
|
case kExpr##name: { \
|
|
|
|
type val; \
|
|
|
|
Address addr; \
|
2018-07-10 20:01:04 +00:00
|
|
|
if (!ExtractAtomicOpParams<type>(decoder, code, addr, pc, len, &val)) { \
|
2017-12-01 22:31:45 +00:00
|
|
|
return false; \
|
|
|
|
} \
|
|
|
|
static_assert(sizeof(std::atomic<type>) == sizeof(type), \
|
|
|
|
"Size mismatch for types std::atomic<" #type \
|
|
|
|
">, and " #type); \
|
|
|
|
std::operation(reinterpret_cast<std::atomic<type>*>(addr), val); \
|
|
|
|
break; \
|
|
|
|
}
|
2018-07-10 20:01:04 +00:00
|
|
|
ATOMIC_STORE_CASE(I32AtomicStore, uint32_t, atomic_store);
|
|
|
|
ATOMIC_STORE_CASE(I32AtomicStore8U, uint8_t, atomic_store);
|
|
|
|
ATOMIC_STORE_CASE(I32AtomicStore16U, uint16_t, atomic_store);
|
2017-12-01 22:31:45 +00:00
|
|
|
#undef ATOMIC_STORE_CASE
|
2017-11-04 01:03:03 +00:00
|
|
|
default:
|
2018-04-26 09:58:33 +00:00
|
|
|
UNREACHABLE();
|
2017-11-04 01:03:03 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2018-04-25 18:12:51 +00:00
|
|
|
byte* GetGlobalPtr(const WasmGlobal* global) {
|
|
|
|
if (global->mutability && global->imported) {
|
|
|
|
DCHECK(FLAG_experimental_wasm_mut_global);
|
|
|
|
return reinterpret_cast<byte*>(
|
|
|
|
instance_object_->imported_mutable_globals()[global->index]);
|
|
|
|
} else {
|
|
|
|
return instance_object_->globals_start() + global->offset;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-18 21:47:59 +00:00
|
|
|
bool ExecuteSimdOp(WasmOpcode opcode, Decoder* decoder, InterpreterCode* code,
|
|
|
|
pc_t pc, int& len) {
|
|
|
|
switch (opcode) {
|
|
|
|
#define SPLAT_CASE(format, sType, valType, num) \
|
|
|
|
case kExpr##format##Splat: { \
|
|
|
|
WasmValue val = Pop(); \
|
|
|
|
valType v = val.to<valType>(); \
|
|
|
|
sType s; \
|
|
|
|
for (int i = 0; i < num; i++) s.val[i] = v; \
|
|
|
|
Push(WasmValue(Simd128(s))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
SPLAT_CASE(I32x4, int4, int32_t, 4)
|
|
|
|
SPLAT_CASE(F32x4, float4, float, 4)
|
|
|
|
SPLAT_CASE(I16x8, int8, int32_t, 8)
|
|
|
|
SPLAT_CASE(I8x16, int16, int32_t, 16)
|
|
|
|
#undef SPLAT_CASE
|
|
|
|
#define EXTRACT_LANE_CASE(format, name) \
|
|
|
|
case kExpr##format##ExtractLane: { \
|
|
|
|
SimdLaneImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
|
|
|
|
++len; \
|
|
|
|
WasmValue val = Pop(); \
|
|
|
|
Simd128 s = val.to_s128(); \
|
|
|
|
Push(WasmValue(s.to_##name().val[imm.lane])); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
EXTRACT_LANE_CASE(I32x4, i32x4)
|
|
|
|
EXTRACT_LANE_CASE(F32x4, f32x4)
|
|
|
|
EXTRACT_LANE_CASE(I16x8, i16x8)
|
|
|
|
EXTRACT_LANE_CASE(I8x16, i8x16)
|
|
|
|
#undef EXTRACT_LANE_CASE
|
2018-05-23 22:21:05 +00:00
|
|
|
#define BINOP_CASE(op, name, stype, count, expr) \
|
|
|
|
case kExpr##op: { \
|
|
|
|
WasmValue v2 = Pop(); \
|
|
|
|
WasmValue v1 = Pop(); \
|
|
|
|
stype s1 = v1.to_s128().to_##name(); \
|
|
|
|
stype s2 = v2.to_s128().to_##name(); \
|
|
|
|
stype res; \
|
|
|
|
for (size_t i = 0; i < count; ++i) { \
|
|
|
|
auto a = s1.val[i]; \
|
|
|
|
auto b = s2.val[i]; \
|
|
|
|
res.val[i] = expr; \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(Simd128(res))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
BINOP_CASE(F32x4Add, f32x4, float4, 4, a + b)
|
|
|
|
BINOP_CASE(F32x4Sub, f32x4, float4, 4, a - b)
|
|
|
|
BINOP_CASE(F32x4Mul, f32x4, float4, 4, a * b)
|
|
|
|
BINOP_CASE(F32x4Min, f32x4, float4, 4, a < b ? a : b)
|
|
|
|
BINOP_CASE(F32x4Max, f32x4, float4, 4, a > b ? a : b)
|
2018-05-25 21:38:37 +00:00
|
|
|
BINOP_CASE(I32x4Add, i32x4, int4, 4, a + b)
|
|
|
|
BINOP_CASE(I32x4Sub, i32x4, int4, 4, a - b)
|
|
|
|
BINOP_CASE(I32x4Mul, i32x4, int4, 4, a * b)
|
|
|
|
BINOP_CASE(I32x4MinS, i32x4, int4, 4, a < b ? a : b)
|
2018-06-04 21:41:09 +00:00
|
|
|
BINOP_CASE(I32x4MinU, i32x4, int4, 4,
|
|
|
|
static_cast<uint32_t>(a) < static_cast<uint32_t>(b) ? a : b)
|
2018-05-23 22:21:05 +00:00
|
|
|
BINOP_CASE(I32x4MaxS, i32x4, int4, 4, a > b ? a : b)
|
2018-06-04 21:41:09 +00:00
|
|
|
BINOP_CASE(I32x4MaxU, i32x4, int4, 4,
|
|
|
|
static_cast<uint32_t>(a) > static_cast<uint32_t>(b) ? a : b)
|
2018-05-23 22:21:05 +00:00
|
|
|
BINOP_CASE(S128And, i32x4, int4, 4, a & b)
|
|
|
|
BINOP_CASE(S128Or, i32x4, int4, 4, a | b)
|
|
|
|
BINOP_CASE(S128Xor, i32x4, int4, 4, a ^ b)
|
2018-05-25 21:38:37 +00:00
|
|
|
BINOP_CASE(I16x8Add, i16x8, int8, 8, a + b)
|
|
|
|
BINOP_CASE(I16x8Sub, i16x8, int8, 8, a - b)
|
|
|
|
BINOP_CASE(I16x8Mul, i16x8, int8, 8, a * b)
|
|
|
|
BINOP_CASE(I16x8MinS, i16x8, int8, 8, a < b ? a : b)
|
2018-06-04 21:41:09 +00:00
|
|
|
BINOP_CASE(I16x8MinU, i16x8, int8, 8,
|
|
|
|
static_cast<uint16_t>(a) < static_cast<uint16_t>(b) ? a : b)
|
2018-05-25 21:38:37 +00:00
|
|
|
BINOP_CASE(I16x8MaxS, i16x8, int8, 8, a > b ? a : b)
|
2018-06-04 21:41:09 +00:00
|
|
|
BINOP_CASE(I16x8MaxU, i16x8, int8, 8,
|
|
|
|
static_cast<uint16_t>(a) > static_cast<uint16_t>(b) ? a : b)
|
2018-05-23 22:21:05 +00:00
|
|
|
BINOP_CASE(I16x8AddSaturateS, i16x8, int8, 8, SaturateAdd<int16_t>(a, b))
|
2018-05-25 21:38:37 +00:00
|
|
|
BINOP_CASE(I16x8AddSaturateU, i16x8, int8, 8, SaturateAdd<uint16_t>(a, b))
|
2018-05-23 22:21:05 +00:00
|
|
|
BINOP_CASE(I16x8SubSaturateS, i16x8, int8, 8, SaturateSub<int16_t>(a, b))
|
2018-05-25 21:38:37 +00:00
|
|
|
BINOP_CASE(I16x8SubSaturateU, i16x8, int8, 8, SaturateSub<uint16_t>(a, b))
|
|
|
|
BINOP_CASE(I8x16Add, i8x16, int16, 16, a + b)
|
|
|
|
BINOP_CASE(I8x16Sub, i8x16, int16, 16, a - b)
|
|
|
|
BINOP_CASE(I8x16Mul, i8x16, int16, 16, a * b)
|
|
|
|
BINOP_CASE(I8x16MinS, i8x16, int16, 16, a < b ? a : b)
|
2018-06-04 21:41:09 +00:00
|
|
|
BINOP_CASE(I8x16MinU, i8x16, int16, 16,
|
|
|
|
static_cast<uint8_t>(a) < static_cast<uint8_t>(b) ? a : b)
|
2018-05-25 21:38:37 +00:00
|
|
|
BINOP_CASE(I8x16MaxS, i8x16, int16, 16, a > b ? a : b)
|
2018-06-04 21:41:09 +00:00
|
|
|
BINOP_CASE(I8x16MaxU, i8x16, int16, 16,
|
|
|
|
static_cast<uint8_t>(a) > static_cast<uint8_t>(b) ? a : b)
|
2018-05-23 22:21:05 +00:00
|
|
|
BINOP_CASE(I8x16AddSaturateS, i8x16, int16, 16, SaturateAdd<int8_t>(a, b))
|
2018-05-25 21:38:37 +00:00
|
|
|
BINOP_CASE(I8x16AddSaturateU, i8x16, int16, 16,
|
2018-05-23 22:21:05 +00:00
|
|
|
SaturateAdd<uint8_t>(a, b))
|
|
|
|
BINOP_CASE(I8x16SubSaturateS, i8x16, int16, 16, SaturateSub<int8_t>(a, b))
|
2018-05-25 21:38:37 +00:00
|
|
|
BINOP_CASE(I8x16SubSaturateU, i8x16, int16, 16,
|
2018-05-23 22:21:05 +00:00
|
|
|
SaturateSub<uint8_t>(a, b))
|
|
|
|
#undef BINOP_CASE
|
2018-05-23 23:24:10 +00:00
|
|
|
#define UNOP_CASE(op, name, stype, count, expr) \
|
|
|
|
case kExpr##op: { \
|
|
|
|
WasmValue v = Pop(); \
|
|
|
|
stype s = v.to_s128().to_##name(); \
|
|
|
|
stype res; \
|
|
|
|
for (size_t i = 0; i < count; ++i) { \
|
|
|
|
auto a = s.val[i]; \
|
|
|
|
res.val[i] = expr; \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(Simd128(res))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
UNOP_CASE(F32x4Abs, f32x4, float4, 4, std::abs(a))
|
|
|
|
UNOP_CASE(F32x4Neg, f32x4, float4, 4, -a)
|
|
|
|
UNOP_CASE(F32x4RecipApprox, f32x4, float4, 4, 1.0f / a)
|
|
|
|
UNOP_CASE(F32x4RecipSqrtApprox, f32x4, float4, 4, 1.0f / std::sqrt(a))
|
2018-05-25 21:38:37 +00:00
|
|
|
UNOP_CASE(I32x4Neg, i32x4, int4, 4, -a)
|
2018-05-23 23:24:10 +00:00
|
|
|
UNOP_CASE(S128Not, i32x4, int4, 4, ~a)
|
2018-05-25 21:38:37 +00:00
|
|
|
UNOP_CASE(I16x8Neg, i16x8, int8, 8, -a)
|
|
|
|
UNOP_CASE(I8x16Neg, i8x16, int16, 16, -a)
|
2018-05-23 23:24:10 +00:00
|
|
|
#undef UNOP_CASE
|
2018-05-25 21:50:54 +00:00
|
|
|
#define CMPOP_CASE(op, name, stype, out_stype, count, expr) \
|
|
|
|
case kExpr##op: { \
|
|
|
|
WasmValue v2 = Pop(); \
|
|
|
|
WasmValue v1 = Pop(); \
|
|
|
|
stype s1 = v1.to_s128().to_##name(); \
|
|
|
|
stype s2 = v2.to_s128().to_##name(); \
|
|
|
|
out_stype res; \
|
|
|
|
for (size_t i = 0; i < count; ++i) { \
|
|
|
|
auto a = s1.val[i]; \
|
|
|
|
auto b = s2.val[i]; \
|
|
|
|
res.val[i] = expr ? -1 : 0; \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(Simd128(res))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
CMPOP_CASE(F32x4Eq, f32x4, float4, int4, 4, a == b)
|
|
|
|
CMPOP_CASE(F32x4Ne, f32x4, float4, int4, 4, a != b)
|
|
|
|
CMPOP_CASE(F32x4Gt, f32x4, float4, int4, 4, a > b)
|
|
|
|
CMPOP_CASE(F32x4Ge, f32x4, float4, int4, 4, a >= b)
|
|
|
|
CMPOP_CASE(F32x4Lt, f32x4, float4, int4, 4, a < b)
|
|
|
|
CMPOP_CASE(F32x4Le, f32x4, float4, int4, 4, a <= b)
|
|
|
|
CMPOP_CASE(I32x4Eq, i32x4, int4, int4, 4, a == b)
|
|
|
|
CMPOP_CASE(I32x4Ne, i32x4, int4, int4, 4, a != b)
|
|
|
|
CMPOP_CASE(I32x4GtS, i32x4, int4, int4, 4, a > b)
|
|
|
|
CMPOP_CASE(I32x4GeS, i32x4, int4, int4, 4, a >= b)
|
|
|
|
CMPOP_CASE(I32x4LtS, i32x4, int4, int4, 4, a < b)
|
|
|
|
CMPOP_CASE(I32x4LeS, i32x4, int4, int4, 4, a <= b)
|
2018-06-04 21:41:09 +00:00
|
|
|
CMPOP_CASE(I32x4GtU, i32x4, int4, int4, 4,
|
|
|
|
static_cast<uint32_t>(a) > static_cast<uint32_t>(b))
|
|
|
|
CMPOP_CASE(I32x4GeU, i32x4, int4, int4, 4,
|
|
|
|
static_cast<uint32_t>(a) >= static_cast<uint32_t>(b))
|
|
|
|
CMPOP_CASE(I32x4LtU, i32x4, int4, int4, 4,
|
|
|
|
static_cast<uint32_t>(a) < static_cast<uint32_t>(b))
|
|
|
|
CMPOP_CASE(I32x4LeU, i32x4, int4, int4, 4,
|
|
|
|
static_cast<uint32_t>(a) <= static_cast<uint32_t>(b))
|
2018-05-25 21:50:54 +00:00
|
|
|
CMPOP_CASE(I16x8Eq, i16x8, int8, int8, 8, a == b)
|
|
|
|
CMPOP_CASE(I16x8Ne, i16x8, int8, int8, 8, a != b)
|
|
|
|
CMPOP_CASE(I16x8GtS, i16x8, int8, int8, 8, a > b)
|
|
|
|
CMPOP_CASE(I16x8GeS, i16x8, int8, int8, 8, a >= b)
|
|
|
|
CMPOP_CASE(I16x8LtS, i16x8, int8, int8, 8, a < b)
|
|
|
|
CMPOP_CASE(I16x8LeS, i16x8, int8, int8, 8, a <= b)
|
2018-06-04 21:41:09 +00:00
|
|
|
CMPOP_CASE(I16x8GtU, i16x8, int8, int8, 8,
|
|
|
|
static_cast<uint16_t>(a) > static_cast<uint16_t>(b))
|
|
|
|
CMPOP_CASE(I16x8GeU, i16x8, int8, int8, 8,
|
|
|
|
static_cast<uint16_t>(a) >= static_cast<uint16_t>(b))
|
|
|
|
CMPOP_CASE(I16x8LtU, i16x8, int8, int8, 8,
|
|
|
|
static_cast<uint16_t>(a) < static_cast<uint16_t>(b))
|
|
|
|
CMPOP_CASE(I16x8LeU, i16x8, int8, int8, 8,
|
|
|
|
static_cast<uint16_t>(a) <= static_cast<uint16_t>(b))
|
2018-05-25 21:50:54 +00:00
|
|
|
CMPOP_CASE(I8x16Eq, i8x16, int16, int16, 16, a == b)
|
|
|
|
CMPOP_CASE(I8x16Ne, i8x16, int16, int16, 16, a != b)
|
|
|
|
CMPOP_CASE(I8x16GtS, i8x16, int16, int16, 16, a > b)
|
|
|
|
CMPOP_CASE(I8x16GeS, i8x16, int16, int16, 16, a >= b)
|
|
|
|
CMPOP_CASE(I8x16LtS, i8x16, int16, int16, 16, a < b)
|
|
|
|
CMPOP_CASE(I8x16LeS, i8x16, int16, int16, 16, a <= b)
|
2018-06-04 21:41:09 +00:00
|
|
|
CMPOP_CASE(I8x16GtU, i8x16, int16, int16, 16,
|
|
|
|
static_cast<uint8_t>(a) > static_cast<uint8_t>(b))
|
|
|
|
CMPOP_CASE(I8x16GeU, i8x16, int16, int16, 16,
|
|
|
|
static_cast<uint8_t>(a) >= static_cast<uint8_t>(b))
|
|
|
|
CMPOP_CASE(I8x16LtU, i8x16, int16, int16, 16,
|
|
|
|
static_cast<uint8_t>(a) < static_cast<uint8_t>(b))
|
|
|
|
CMPOP_CASE(I8x16LeU, i8x16, int16, int16, 16,
|
|
|
|
static_cast<uint8_t>(a) <= static_cast<uint8_t>(b))
|
2018-05-25 21:50:54 +00:00
|
|
|
#undef CMPOP_CASE
|
2018-05-31 21:38:23 +00:00
|
|
|
#define REPLACE_LANE_CASE(format, name, stype, ctype) \
|
|
|
|
case kExpr##format##ReplaceLane: { \
|
|
|
|
SimdLaneImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
|
|
|
|
++len; \
|
|
|
|
WasmValue new_val = Pop(); \
|
|
|
|
WasmValue simd_val = Pop(); \
|
|
|
|
stype s = simd_val.to_s128().to_##name(); \
|
|
|
|
s.val[imm.lane] = new_val.to<ctype>(); \
|
|
|
|
Push(WasmValue(Simd128(s))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
REPLACE_LANE_CASE(F32x4, f32x4, float4, float)
|
|
|
|
REPLACE_LANE_CASE(I32x4, i32x4, int4, int32_t)
|
|
|
|
REPLACE_LANE_CASE(I16x8, i16x8, int8, int32_t)
|
|
|
|
REPLACE_LANE_CASE(I8x16, i8x16, int16, int32_t)
|
|
|
|
#undef REPLACE_LANE_CASE
|
2018-06-13 21:20:48 +00:00
|
|
|
case kExprS128LoadMem:
|
|
|
|
return ExecuteLoad<Simd128, Simd128>(decoder, code, pc, len,
|
|
|
|
MachineRepresentation::kSimd128);
|
|
|
|
case kExprS128StoreMem:
|
|
|
|
return ExecuteStore<Simd128, Simd128>(decoder, code, pc, len,
|
|
|
|
MachineRepresentation::kSimd128);
|
|
|
|
#define SHIFT_CASE(op, name, stype, count, expr) \
|
|
|
|
case kExpr##op: { \
|
|
|
|
SimdShiftImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc)); \
|
|
|
|
++len; \
|
|
|
|
WasmValue v = Pop(); \
|
|
|
|
stype s = v.to_s128().to_##name(); \
|
|
|
|
stype res; \
|
|
|
|
for (size_t i = 0; i < count; ++i) { \
|
|
|
|
auto a = s.val[i]; \
|
|
|
|
res.val[i] = expr; \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(Simd128(res))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
SHIFT_CASE(I32x4Shl, i32x4, int4, 4, a << imm.shift)
|
|
|
|
SHIFT_CASE(I32x4ShrS, i32x4, int4, 4, a >> imm.shift)
|
|
|
|
SHIFT_CASE(I32x4ShrU, i32x4, int4, 4,
|
|
|
|
static_cast<uint32_t>(a) >> imm.shift)
|
|
|
|
SHIFT_CASE(I16x8Shl, i16x8, int8, 8, a << imm.shift)
|
|
|
|
SHIFT_CASE(I16x8ShrS, i16x8, int8, 8, a >> imm.shift)
|
|
|
|
SHIFT_CASE(I16x8ShrU, i16x8, int8, 8,
|
|
|
|
static_cast<uint16_t>(a) >> imm.shift)
|
|
|
|
SHIFT_CASE(I8x16Shl, i8x16, int16, 16, a << imm.shift)
|
|
|
|
SHIFT_CASE(I8x16ShrS, i8x16, int16, 16, a >> imm.shift)
|
|
|
|
SHIFT_CASE(I8x16ShrU, i8x16, int16, 16,
|
|
|
|
static_cast<uint8_t>(a) >> imm.shift)
|
|
|
|
#undef SHIFT_CASE
|
2018-06-20 06:03:44 +00:00
|
|
|
#define CONVERT_CASE(op, src_type, name, dst_type, count, start_index, ctype, \
|
|
|
|
expr) \
|
|
|
|
case kExpr##op: { \
|
|
|
|
WasmValue v = Pop(); \
|
|
|
|
src_type s = v.to_s128().to_##name(); \
|
|
|
|
dst_type res; \
|
|
|
|
for (size_t i = 0; i < count; ++i) { \
|
|
|
|
ctype a = s.val[start_index + i]; \
|
|
|
|
res.val[i] = expr; \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(Simd128(res))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
CONVERT_CASE(F32x4SConvertI32x4, int4, i32x4, float4, 4, 0, int32_t,
|
|
|
|
static_cast<float>(a))
|
|
|
|
CONVERT_CASE(F32x4UConvertI32x4, int4, i32x4, float4, 4, 0, uint32_t,
|
|
|
|
static_cast<float>(a))
|
|
|
|
CONVERT_CASE(I32x4SConvertF32x4, float4, f32x4, int4, 4, 0, double,
|
|
|
|
std::isnan(a) ? 0
|
|
|
|
: a<kMinInt ? kMinInt : a> kMaxInt
|
|
|
|
? kMaxInt
|
|
|
|
: static_cast<int32_t>(a))
|
|
|
|
CONVERT_CASE(I32x4UConvertF32x4, float4, f32x4, int4, 4, 0, double,
|
|
|
|
std::isnan(a)
|
|
|
|
? 0
|
|
|
|
: a<0 ? 0 : a> kMaxUInt32 ? kMaxUInt32
|
|
|
|
: static_cast<uint32_t>(a))
|
|
|
|
CONVERT_CASE(I32x4SConvertI16x8High, int8, i16x8, int4, 4, 4, int16_t,
|
|
|
|
a)
|
|
|
|
CONVERT_CASE(I32x4UConvertI16x8High, int8, i16x8, int4, 4, 4, uint16_t,
|
|
|
|
a)
|
|
|
|
CONVERT_CASE(I32x4SConvertI16x8Low, int8, i16x8, int4, 4, 0, int16_t, a)
|
|
|
|
CONVERT_CASE(I32x4UConvertI16x8Low, int8, i16x8, int4, 4, 0, uint16_t,
|
|
|
|
a)
|
|
|
|
CONVERT_CASE(I16x8SConvertI8x16High, int16, i8x16, int8, 8, 8, int8_t,
|
|
|
|
a)
|
|
|
|
CONVERT_CASE(I16x8UConvertI8x16High, int16, i8x16, int8, 8, 8, uint8_t,
|
|
|
|
a)
|
|
|
|
CONVERT_CASE(I16x8SConvertI8x16Low, int16, i8x16, int8, 8, 0, int8_t, a)
|
|
|
|
CONVERT_CASE(I16x8UConvertI8x16Low, int16, i8x16, int8, 8, 0, uint8_t,
|
|
|
|
a)
|
|
|
|
#undef CONVERT_CASE
|
|
|
|
#define PACK_CASE(op, src_type, name, dst_type, count, ctype, dst_ctype, \
|
|
|
|
is_unsigned) \
|
|
|
|
case kExpr##op: { \
|
|
|
|
WasmValue v2 = Pop(); \
|
|
|
|
WasmValue v1 = Pop(); \
|
|
|
|
src_type s1 = v1.to_s128().to_##name(); \
|
|
|
|
src_type s2 = v2.to_s128().to_##name(); \
|
|
|
|
dst_type res; \
|
|
|
|
int64_t min = std::numeric_limits<ctype>::min(); \
|
|
|
|
int64_t max = std::numeric_limits<ctype>::max(); \
|
|
|
|
for (size_t i = 0; i < count; ++i) { \
|
|
|
|
int32_t v = i < count / 2 ? s1.val[i] : s2.val[i - count / 2]; \
|
|
|
|
int64_t a = is_unsigned ? static_cast<int64_t>(v & 0xFFFFFFFFu) : v; \
|
|
|
|
res.val[i] = static_cast<dst_ctype>(std::max(min, std::min(max, a))); \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(Simd128(res))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
PACK_CASE(I16x8SConvertI32x4, int4, i32x4, int8, 8, int16_t, int16_t,
|
|
|
|
false)
|
|
|
|
PACK_CASE(I16x8UConvertI32x4, int4, i32x4, int8, 8, uint16_t, int16_t,
|
|
|
|
true)
|
|
|
|
PACK_CASE(I8x16SConvertI16x8, int8, i16x8, int16, 16, int8_t, int8_t,
|
|
|
|
false)
|
|
|
|
PACK_CASE(I8x16UConvertI16x8, int8, i16x8, int16, 16, uint8_t, int8_t,
|
|
|
|
true)
|
|
|
|
#undef PACK_CASE
|
2018-06-29 19:29:16 +00:00
|
|
|
case kExprS128Select: {
|
|
|
|
int4 v2 = Pop().to_s128().to_i32x4();
|
|
|
|
int4 v1 = Pop().to_s128().to_i32x4();
|
|
|
|
int4 bool_val = Pop().to_s128().to_i32x4();
|
|
|
|
int4 res;
|
|
|
|
for (size_t i = 0; i < 4; ++i) {
|
|
|
|
res.val[i] = v2.val[i] ^ ((v1.val[i] ^ v2.val[i]) & bool_val.val[i]);
|
|
|
|
}
|
|
|
|
Push(WasmValue(Simd128(res)));
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
#define ADD_HORIZ_CASE(op, name, stype, count) \
|
|
|
|
case kExpr##op: { \
|
|
|
|
WasmValue v2 = Pop(); \
|
|
|
|
WasmValue v1 = Pop(); \
|
|
|
|
stype s1 = v1.to_s128().to_##name(); \
|
|
|
|
stype s2 = v2.to_s128().to_##name(); \
|
|
|
|
stype res; \
|
|
|
|
for (size_t i = 0; i < count / 2; ++i) { \
|
|
|
|
res.val[i] = s1.val[i * 2] + s1.val[i * 2 + 1]; \
|
|
|
|
res.val[i + count / 2] = s2.val[i * 2] + s2.val[i * 2 + 1]; \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(Simd128(res))); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
ADD_HORIZ_CASE(I32x4AddHoriz, i32x4, int4, 4)
|
|
|
|
ADD_HORIZ_CASE(F32x4AddHoriz, f32x4, float4, 4)
|
|
|
|
ADD_HORIZ_CASE(I16x8AddHoriz, i16x8, int8, 8)
|
|
|
|
#undef ADD_HORIZ_CASE
|
|
|
|
case kExprS8x16Shuffle: {
|
|
|
|
Simd8x16ShuffleImmediate<Decoder::kNoValidate> imm(decoder,
|
|
|
|
code->at(pc));
|
|
|
|
len += 16;
|
|
|
|
int16 v2 = Pop().to_s128().to_i8x16();
|
|
|
|
int16 v1 = Pop().to_s128().to_i8x16();
|
|
|
|
int16 res;
|
|
|
|
for (size_t i = 0; i < kSimd128Size; ++i) {
|
|
|
|
int lane = imm.shuffle[i];
|
|
|
|
res.val[i] =
|
|
|
|
lane < kSimd128Size ? v1.val[lane] : v2.val[lane - kSimd128Size];
|
|
|
|
}
|
|
|
|
Push(WasmValue(Simd128(res)));
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
#define REDUCTION_CASE(op, name, stype, count, operation) \
|
|
|
|
case kExpr##op: { \
|
|
|
|
stype s = Pop().to_s128().to_##name(); \
|
|
|
|
int32_t res = s.val[0]; \
|
|
|
|
for (size_t i = 1; i < count; ++i) { \
|
|
|
|
res = res operation static_cast<int32_t>(s.val[i]); \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(res)); \
|
|
|
|
return true; \
|
|
|
|
}
|
|
|
|
REDUCTION_CASE(S1x4AnyTrue, i32x4, int4, 4, |)
|
|
|
|
REDUCTION_CASE(S1x4AllTrue, i32x4, int4, 4, &)
|
|
|
|
REDUCTION_CASE(S1x8AnyTrue, i16x8, int8, 8, |)
|
|
|
|
REDUCTION_CASE(S1x8AllTrue, i16x8, int8, 8, &)
|
|
|
|
REDUCTION_CASE(S1x16AnyTrue, i8x16, int16, 16, |)
|
|
|
|
REDUCTION_CASE(S1x16AllTrue, i8x16, int16, 16, &)
|
|
|
|
#undef REDUCTION_CASE
|
2018-05-18 21:47:59 +00:00
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-31 08:29:02 +00:00
|
|
|
// Check if our control stack (frames_) exceeds the limit. Trigger stack
|
|
|
|
// overflow if it does, and unwinding the current frame.
|
|
|
|
// Returns true if execution can continue, false if the current activation was
|
|
|
|
// fully unwound.
|
|
|
|
// Do call this function immediately *after* pushing a new frame. The pc of
|
|
|
|
// the top frame will be reset to 0 if the stack check fails.
|
2018-04-06 09:37:52 +00:00
|
|
|
bool DoStackCheck() V8_WARN_UNUSED_RESULT {
|
2017-11-02 09:39:33 +00:00
|
|
|
// The goal of this stack check is not to prevent actual stack overflows,
|
|
|
|
// but to simulate stack overflows during the execution of compiled code.
|
|
|
|
// That is why this function uses FLAG_stack_size, even though the value
|
|
|
|
// stack actually lies in zone memory.
|
|
|
|
const size_t stack_size_limit = FLAG_stack_size * KB;
|
|
|
|
// Sum up the value stack size and the control stack size.
|
|
|
|
const size_t current_stack_size =
|
2018-07-05 09:00:20 +00:00
|
|
|
(sp_ - stack_.get()) + frames_.size() * sizeof(Frame);
|
2017-11-02 09:39:33 +00:00
|
|
|
if (V8_LIKELY(current_stack_size <= stack_size_limit)) {
|
2017-03-31 08:29:02 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
// The pc of the top frame is initialized to the first instruction. We reset
|
|
|
|
// it to 0 here such that we report the same position as in compiled code.
|
|
|
|
frames_.back().pc = 0;
|
2018-04-06 10:18:18 +00:00
|
|
|
Isolate* isolate = instance_object_->GetIsolate();
|
2017-03-31 08:29:02 +00:00
|
|
|
HandleScope handle_scope(isolate);
|
|
|
|
isolate->StackOverflow();
|
|
|
|
return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
|
|
|
|
}
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
void Execute(InterpreterCode* code, pc_t pc, int max) {
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_NOT_NULL(code->side_table);
|
|
|
|
DCHECK(!frames_.empty());
|
|
|
|
// There must be enough space on the stack to hold the arguments, locals,
|
|
|
|
// and the value stack.
|
|
|
|
DCHECK_LE(code->function->sig->parameter_count() +
|
|
|
|
code->locals.type_list.size() +
|
|
|
|
code->side_table->max_stack_height_,
|
2018-07-05 09:00:20 +00:00
|
|
|
stack_limit_ - stack_.get() - frames_.back().sp);
|
2017-04-26 17:41:26 +00:00
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
Decoder decoder(code->start, code->end);
|
|
|
|
pc_t limit = code->end - code->start;
|
2017-03-24 15:42:49 +00:00
|
|
|
bool hit_break = false;
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
#define PAUSE_IF_BREAK_FLAG(flag) \
|
|
|
|
if (V8_UNLIKELY(break_flags_ & WasmInterpreter::BreakFlag::flag)) { \
|
|
|
|
hit_break = true; \
|
|
|
|
max = 0; \
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-01-27 09:48:08 +00:00
|
|
|
DCHECK_GT(limit, pc);
|
2017-03-15 15:57:02 +00:00
|
|
|
DCHECK_NOT_NULL(code->start);
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-04-06 13:32:36 +00:00
|
|
|
// Do first check for a breakpoint, in order to set hit_break correctly.
|
2016-05-30 10:02:34 +00:00
|
|
|
const char* skip = " ";
|
2016-05-25 08:32:37 +00:00
|
|
|
int len = 1;
|
2017-11-04 01:03:03 +00:00
|
|
|
byte orig = code->start[pc];
|
|
|
|
WasmOpcode opcode = static_cast<WasmOpcode>(orig);
|
|
|
|
if (WasmOpcodes::IsPrefixOpcode(opcode)) {
|
|
|
|
opcode = static_cast<WasmOpcode>(opcode << 8 | code->start[pc + 1]);
|
|
|
|
}
|
|
|
|
if (V8_UNLIKELY(orig == kInternalBreakpoint)) {
|
2016-05-30 10:02:34 +00:00
|
|
|
orig = code->orig_start[pc];
|
2017-11-04 01:03:03 +00:00
|
|
|
if (WasmOpcodes::IsPrefixOpcode(static_cast<WasmOpcode>(orig))) {
|
|
|
|
opcode =
|
|
|
|
static_cast<WasmOpcode>(orig << 8 | code->orig_start[pc + 1]);
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
if (SkipBreakpoint(code, pc)) {
|
|
|
|
// skip breakpoint by switching on original code.
|
2016-05-30 10:02:34 +00:00
|
|
|
skip = "[skip] ";
|
2016-05-25 08:32:37 +00:00
|
|
|
} else {
|
2017-11-04 01:03:03 +00:00
|
|
|
TRACE("@%-3zu: [break] %-24s:", pc, WasmOpcodes::OpcodeName(opcode));
|
2016-05-30 10:02:34 +00:00
|
|
|
TraceValueStack();
|
|
|
|
TRACE("\n");
|
2017-03-24 15:42:49 +00:00
|
|
|
hit_break = true;
|
2017-01-27 08:50:50 +00:00
|
|
|
break;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-06 13:32:36 +00:00
|
|
|
// If max is 0, break. If max is positive (a limit is set), decrement it.
|
|
|
|
if (max == 0) break;
|
|
|
|
if (max > 0) --max;
|
2017-03-24 15:42:49 +00:00
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
USE(skip);
|
2017-11-04 01:03:03 +00:00
|
|
|
TRACE("@%-3zu: %s%-24s:", pc, skip, WasmOpcodes::OpcodeName(opcode));
|
2016-05-25 08:32:37 +00:00
|
|
|
TraceValueStack();
|
|
|
|
TRACE("\n");
|
|
|
|
|
2017-04-25 09:43:39 +00:00
|
|
|
#ifdef DEBUG
|
|
|
|
// Compute the stack effect of this opcode, and verify later that the
|
|
|
|
// stack was modified accordingly.
|
|
|
|
std::pair<uint32_t, uint32_t> stack_effect = wasm::StackEffect(
|
|
|
|
codemap_->module(), frames_.back().code->function->sig,
|
|
|
|
code->orig_start + pc, code->orig_end);
|
2017-04-26 17:41:26 +00:00
|
|
|
sp_t expected_new_stack_height =
|
|
|
|
StackHeight() - stack_effect.first + stack_effect.second;
|
2017-04-25 09:43:39 +00:00
|
|
|
#endif
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
switch (orig) {
|
|
|
|
case kExprNop:
|
|
|
|
break;
|
2016-09-27 20:46:10 +00:00
|
|
|
case kExprBlock: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BlockTypeImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
|
|
|
len = 1 + imm.length;
|
2016-09-27 20:46:10 +00:00
|
|
|
break;
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
case kExprLoop: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BlockTypeImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprIf: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BlockTypeImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue cond = Pop();
|
2016-05-25 08:32:37 +00:00
|
|
|
bool is_true = cond.to<uint32_t>() != 0;
|
|
|
|
if (is_true) {
|
|
|
|
// fall through to the true block.
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
TRACE(" true => fallthrough\n");
|
|
|
|
} else {
|
2017-04-25 09:43:39 +00:00
|
|
|
len = LookupTargetDelta(code, pc);
|
2016-05-25 08:32:37 +00:00
|
|
|
TRACE(" false => @%zu\n", pc + len);
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprElse: {
|
2017-04-25 09:43:39 +00:00
|
|
|
len = LookupTargetDelta(code, pc);
|
2016-05-25 08:32:37 +00:00
|
|
|
TRACE(" end => @%zu\n", pc + len);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprSelect: {
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue cond = Pop();
|
|
|
|
WasmValue fval = Pop();
|
|
|
|
WasmValue tval = Pop();
|
2017-04-26 17:41:26 +00:00
|
|
|
Push(cond.to<int32_t>() != 0 ? tval : fval);
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprBr: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BreakDepthImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
|
|
|
len = DoBreak(code, pc, imm.depth);
|
2016-05-25 08:32:37 +00:00
|
|
|
TRACE(" br => @%zu\n", pc + len);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprBrIf: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BreakDepthImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue cond = Pop();
|
2016-05-25 08:32:37 +00:00
|
|
|
bool is_true = cond.to<uint32_t>() != 0;
|
|
|
|
if (is_true) {
|
2018-05-03 11:59:06 +00:00
|
|
|
len = DoBreak(code, pc, imm.depth);
|
2016-05-25 08:32:37 +00:00
|
|
|
TRACE(" br_if => @%zu\n", pc + len);
|
|
|
|
} else {
|
|
|
|
TRACE(" false => fallthrough\n");
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprBrTable: {
|
2018-05-03 11:59:06 +00:00
|
|
|
BranchTableImmediate<Decoder::kNoValidate> imm(&decoder,
|
|
|
|
code->at(pc));
|
|
|
|
BranchTableIterator<Decoder::kNoValidate> iterator(&decoder, imm);
|
2016-05-25 08:32:37 +00:00
|
|
|
uint32_t key = Pop().to<uint32_t>();
|
2016-11-02 17:06:38 +00:00
|
|
|
uint32_t depth = 0;
|
2018-05-03 11:59:06 +00:00
|
|
|
if (key >= imm.table_count) key = imm.table_count;
|
2016-11-02 17:06:38 +00:00
|
|
|
for (uint32_t i = 0; i <= key; i++) {
|
|
|
|
DCHECK(iterator.has_next());
|
|
|
|
depth = iterator.next();
|
|
|
|
}
|
|
|
|
len = key + DoBreak(code, pc + key, static_cast<size_t>(depth));
|
2016-09-27 20:46:10 +00:00
|
|
|
TRACE(" br[%u] => @%zu\n", key, pc + key + len);
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprReturn: {
|
2016-09-27 20:46:10 +00:00
|
|
|
size_t arity = code->function->sig->return_count();
|
2017-03-14 10:46:18 +00:00
|
|
|
if (!DoReturn(&decoder, &code, &pc, &limit, arity)) return;
|
2017-01-24 10:13:33 +00:00
|
|
|
PAUSE_IF_BREAK_FLAG(AfterReturn);
|
2016-05-25 08:32:37 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
case kExprUnreachable: {
|
2017-03-14 10:46:18 +00:00
|
|
|
return DoTrap(kTrapUnreachable, pc);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
case kExprEnd: {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprI32Const: {
|
2018-05-03 11:59:06 +00:00
|
|
|
ImmI32Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
|
|
|
Push(WasmValue(imm.value));
|
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprI64Const: {
|
2018-05-03 11:59:06 +00:00
|
|
|
ImmI64Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
|
|
|
Push(WasmValue(imm.value));
|
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprF32Const: {
|
2018-05-03 11:59:06 +00:00
|
|
|
ImmF32Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
|
|
|
Push(WasmValue(imm.value));
|
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprF64Const: {
|
2018-05-03 11:59:06 +00:00
|
|
|
ImmF64Immediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
|
|
|
Push(WasmValue(imm.value));
|
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprGetLocal: {
|
2018-05-03 11:59:06 +00:00
|
|
|
LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
|
|
|
Push(GetStackValue(frames_.back().sp + imm.index));
|
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprSetLocal: {
|
2018-05-03 11:59:06 +00:00
|
|
|
LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val = Pop();
|
2018-05-03 11:59:06 +00:00
|
|
|
SetStackValue(frames_.back().sp + imm.index, val);
|
|
|
|
len = 1 + imm.length;
|
2016-09-27 20:46:10 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprTeeLocal: {
|
2018-05-03 11:59:06 +00:00
|
|
|
LocalIndexImmediate<Decoder::kNoValidate> imm(&decoder, code->at(pc));
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val = Pop();
|
2018-05-03 11:59:06 +00:00
|
|
|
SetStackValue(frames_.back().sp + imm.index, val);
|
2017-04-26 17:41:26 +00:00
|
|
|
Push(val);
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
2016-09-27 20:46:10 +00:00
|
|
|
case kExprDrop: {
|
|
|
|
Pop();
|
|
|
|
break;
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
case kExprCallFunction: {
|
2018-05-03 11:59:06 +00:00
|
|
|
CallFunctionImmediate<Decoder::kNoValidate> imm(&decoder,
|
|
|
|
code->at(pc));
|
|
|
|
InterpreterCode* target = codemap()->GetCode(imm.index);
|
2017-03-15 15:57:02 +00:00
|
|
|
if (target->function->imported) {
|
|
|
|
CommitPc(pc);
|
2017-03-23 09:46:16 +00:00
|
|
|
ExternalCallResult result =
|
|
|
|
CallImportedFunction(target->function->func_index);
|
|
|
|
switch (result.type) {
|
|
|
|
case ExternalCallResult::INTERNAL:
|
|
|
|
// The import is a function of this instance. Call it directly.
|
|
|
|
target = result.interpreter_code;
|
|
|
|
DCHECK(!target->function->imported);
|
|
|
|
break;
|
|
|
|
case ExternalCallResult::INVALID_FUNC:
|
|
|
|
case ExternalCallResult::SIGNATURE_MISMATCH:
|
|
|
|
// Direct calls are checked statically.
|
|
|
|
UNREACHABLE();
|
|
|
|
case ExternalCallResult::EXTERNAL_RETURNED:
|
|
|
|
PAUSE_IF_BREAK_FLAG(AfterCall);
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2017-03-23 09:46:16 +00:00
|
|
|
break;
|
|
|
|
case ExternalCallResult::EXTERNAL_UNWOUND:
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (result.type != ExternalCallResult::INTERNAL) break;
|
2017-03-14 15:54:43 +00:00
|
|
|
}
|
2017-03-23 09:46:16 +00:00
|
|
|
// Execute an internal call.
|
2017-03-31 08:29:02 +00:00
|
|
|
if (!DoCall(&decoder, target, &pc, &limit)) return;
|
2017-03-15 15:57:02 +00:00
|
|
|
code = target;
|
2017-01-24 10:13:33 +00:00
|
|
|
PAUSE_IF_BREAK_FLAG(AfterCall);
|
2017-03-15 15:57:02 +00:00
|
|
|
continue; // don't bump pc
|
2017-03-23 09:46:16 +00:00
|
|
|
} break;
|
2016-05-25 08:32:37 +00:00
|
|
|
case kExprCallIndirect: {
|
2018-05-03 11:59:06 +00:00
|
|
|
CallIndirectImmediate<Decoder::kNoValidate> imm(&decoder,
|
|
|
|
code->at(pc));
|
2016-09-27 20:46:10 +00:00
|
|
|
uint32_t entry_index = Pop().to<uint32_t>();
|
2016-07-28 04:56:56 +00:00
|
|
|
// Assume only one table for now.
|
2018-07-10 12:14:06 +00:00
|
|
|
DCHECK_LE(module()->tables.size(), 1u);
|
2017-03-23 09:46:16 +00:00
|
|
|
ExternalCallResult result =
|
2018-05-03 11:59:06 +00:00
|
|
|
CallIndirectFunction(0, entry_index, imm.sig_index);
|
2017-03-23 09:46:16 +00:00
|
|
|
switch (result.type) {
|
|
|
|
case ExternalCallResult::INTERNAL:
|
|
|
|
// The import is a function of this instance. Call it directly.
|
2017-03-31 08:29:02 +00:00
|
|
|
if (!DoCall(&decoder, result.interpreter_code, &pc, &limit))
|
|
|
|
return;
|
2017-03-23 09:46:16 +00:00
|
|
|
code = result.interpreter_code;
|
|
|
|
PAUSE_IF_BREAK_FLAG(AfterCall);
|
|
|
|
continue; // don't bump pc
|
|
|
|
case ExternalCallResult::INVALID_FUNC:
|
|
|
|
return DoTrap(kTrapFuncInvalid, pc);
|
|
|
|
case ExternalCallResult::SIGNATURE_MISMATCH:
|
2016-10-11 12:40:24 +00:00
|
|
|
return DoTrap(kTrapFuncSigMismatch, pc);
|
2017-03-23 09:46:16 +00:00
|
|
|
case ExternalCallResult::EXTERNAL_RETURNED:
|
|
|
|
PAUSE_IF_BREAK_FLAG(AfterCall);
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2017-03-23 09:46:16 +00:00
|
|
|
break;
|
|
|
|
case ExternalCallResult::EXTERNAL_UNWOUND:
|
|
|
|
return;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2017-03-23 09:46:16 +00:00
|
|
|
} break;
|
2016-08-02 22:38:54 +00:00
|
|
|
case kExprGetGlobal: {
|
2018-05-03 11:59:06 +00:00
|
|
|
GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
|
|
|
|
code->at(pc));
|
|
|
|
const WasmGlobal* global = &module()->globals[imm.index];
|
2018-04-25 18:12:51 +00:00
|
|
|
byte* ptr = GetGlobalPtr(global);
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val;
|
2017-03-15 15:57:02 +00:00
|
|
|
switch (global->type) {
|
2017-07-14 13:49:01 +00:00
|
|
|
#define CASE_TYPE(wasm, ctype) \
|
|
|
|
case kWasm##wasm: \
|
|
|
|
val = WasmValue(*reinterpret_cast<ctype*>(ptr)); \
|
2017-03-15 15:57:02 +00:00
|
|
|
break;
|
|
|
|
WASM_CTYPES(CASE_TYPE)
|
|
|
|
#undef CASE_TYPE
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2017-04-26 17:41:26 +00:00
|
|
|
Push(val);
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
2016-08-02 22:38:54 +00:00
|
|
|
case kExprSetGlobal: {
|
2018-05-03 11:59:06 +00:00
|
|
|
GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
|
|
|
|
code->at(pc));
|
|
|
|
const WasmGlobal* global = &module()->globals[imm.index];
|
2018-04-25 18:12:51 +00:00
|
|
|
byte* ptr = GetGlobalPtr(global);
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val = Pop();
|
2017-03-15 15:57:02 +00:00
|
|
|
switch (global->type) {
|
|
|
|
#define CASE_TYPE(wasm, ctype) \
|
|
|
|
case kWasm##wasm: \
|
|
|
|
*reinterpret_cast<ctype*>(ptr) = val.to<ctype>(); \
|
|
|
|
break;
|
|
|
|
WASM_CTYPES(CASE_TYPE)
|
|
|
|
#undef CASE_TYPE
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-10-02 07:39:30 +00:00
|
|
|
#define LOAD_CASE(name, ctype, mtype, rep) \
|
|
|
|
case kExpr##name: { \
|
|
|
|
if (!ExecuteLoad<ctype, mtype>(&decoder, code, pc, len, \
|
|
|
|
MachineRepresentation::rep)) \
|
|
|
|
return; \
|
|
|
|
break; \
|
|
|
|
}
|
|
|
|
|
|
|
|
LOAD_CASE(I32LoadMem8S, int32_t, int8_t, kWord8);
|
|
|
|
LOAD_CASE(I32LoadMem8U, int32_t, uint8_t, kWord8);
|
|
|
|
LOAD_CASE(I32LoadMem16S, int32_t, int16_t, kWord16);
|
|
|
|
LOAD_CASE(I32LoadMem16U, int32_t, uint16_t, kWord16);
|
|
|
|
LOAD_CASE(I64LoadMem8S, int64_t, int8_t, kWord8);
|
|
|
|
LOAD_CASE(I64LoadMem8U, int64_t, uint8_t, kWord16);
|
|
|
|
LOAD_CASE(I64LoadMem16S, int64_t, int16_t, kWord16);
|
|
|
|
LOAD_CASE(I64LoadMem16U, int64_t, uint16_t, kWord16);
|
|
|
|
LOAD_CASE(I64LoadMem32S, int64_t, int32_t, kWord32);
|
|
|
|
LOAD_CASE(I64LoadMem32U, int64_t, uint32_t, kWord32);
|
|
|
|
LOAD_CASE(I32LoadMem, int32_t, int32_t, kWord32);
|
|
|
|
LOAD_CASE(I64LoadMem, int64_t, int64_t, kWord64);
|
2017-10-24 12:28:30 +00:00
|
|
|
LOAD_CASE(F32LoadMem, Float32, uint32_t, kFloat32);
|
|
|
|
LOAD_CASE(F64LoadMem, Float64, uint64_t, kFloat64);
|
2016-05-25 08:32:37 +00:00
|
|
|
#undef LOAD_CASE
|
|
|
|
|
2017-10-02 07:39:30 +00:00
|
|
|
#define STORE_CASE(name, ctype, mtype, rep) \
|
|
|
|
case kExpr##name: { \
|
|
|
|
if (!ExecuteStore<ctype, mtype>(&decoder, code, pc, len, \
|
|
|
|
MachineRepresentation::rep)) \
|
|
|
|
return; \
|
|
|
|
break; \
|
|
|
|
}
|
|
|
|
|
|
|
|
STORE_CASE(I32StoreMem8, int32_t, int8_t, kWord8);
|
|
|
|
STORE_CASE(I32StoreMem16, int32_t, int16_t, kWord16);
|
|
|
|
STORE_CASE(I64StoreMem8, int64_t, int8_t, kWord8);
|
|
|
|
STORE_CASE(I64StoreMem16, int64_t, int16_t, kWord16);
|
|
|
|
STORE_CASE(I64StoreMem32, int64_t, int32_t, kWord32);
|
|
|
|
STORE_CASE(I32StoreMem, int32_t, int32_t, kWord32);
|
|
|
|
STORE_CASE(I64StoreMem, int64_t, int64_t, kWord64);
|
2017-10-24 12:28:30 +00:00
|
|
|
STORE_CASE(F32StoreMem, Float32, uint32_t, kFloat32);
|
|
|
|
STORE_CASE(F64StoreMem, Float64, uint64_t, kFloat64);
|
2016-05-25 08:32:37 +00:00
|
|
|
#undef STORE_CASE
|
|
|
|
|
2017-10-16 09:45:56 +00:00
|
|
|
#define ASMJS_LOAD_CASE(name, ctype, mtype, defval) \
|
|
|
|
case kExpr##name: { \
|
|
|
|
uint32_t index = Pop().to<uint32_t>(); \
|
|
|
|
ctype result; \
|
2018-04-13 22:28:05 +00:00
|
|
|
Address addr = BoundsCheckMem<mtype>(0, index); \
|
2017-12-18 13:04:30 +00:00
|
|
|
if (!addr) { \
|
2017-10-16 09:45:56 +00:00
|
|
|
result = defval; \
|
|
|
|
} else { \
|
|
|
|
/* TODO(titzer): alignment for asmjs load mem? */ \
|
|
|
|
result = static_cast<ctype>(*reinterpret_cast<mtype*>(addr)); \
|
|
|
|
} \
|
|
|
|
Push(WasmValue(result)); \
|
|
|
|
break; \
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
ASMJS_LOAD_CASE(I32AsmjsLoadMem8S, int32_t, int8_t, 0);
|
|
|
|
ASMJS_LOAD_CASE(I32AsmjsLoadMem8U, int32_t, uint8_t, 0);
|
|
|
|
ASMJS_LOAD_CASE(I32AsmjsLoadMem16S, int32_t, int16_t, 0);
|
|
|
|
ASMJS_LOAD_CASE(I32AsmjsLoadMem16U, int32_t, uint16_t, 0);
|
|
|
|
ASMJS_LOAD_CASE(I32AsmjsLoadMem, int32_t, int32_t, 0);
|
|
|
|
ASMJS_LOAD_CASE(F32AsmjsLoadMem, float, float,
|
|
|
|
std::numeric_limits<float>::quiet_NaN());
|
|
|
|
ASMJS_LOAD_CASE(F64AsmjsLoadMem, double, double,
|
|
|
|
std::numeric_limits<double>::quiet_NaN());
|
|
|
|
#undef ASMJS_LOAD_CASE
|
|
|
|
|
|
|
|
#define ASMJS_STORE_CASE(name, ctype, mtype) \
|
|
|
|
case kExpr##name: { \
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val = Pop(); \
|
2016-05-25 08:32:37 +00:00
|
|
|
uint32_t index = Pop().to<uint32_t>(); \
|
2018-04-13 22:28:05 +00:00
|
|
|
Address addr = BoundsCheckMem<mtype>(0, index); \
|
2017-12-18 13:04:30 +00:00
|
|
|
if (addr) { \
|
2016-05-25 08:32:37 +00:00
|
|
|
*(reinterpret_cast<mtype*>(addr)) = static_cast<mtype>(val.to<ctype>()); \
|
|
|
|
} \
|
2017-04-26 17:41:26 +00:00
|
|
|
Push(val); \
|
2016-05-25 08:32:37 +00:00
|
|
|
break; \
|
|
|
|
}
|
|
|
|
|
|
|
|
ASMJS_STORE_CASE(I32AsmjsStoreMem8, int32_t, int8_t);
|
|
|
|
ASMJS_STORE_CASE(I32AsmjsStoreMem16, int32_t, int16_t);
|
|
|
|
ASMJS_STORE_CASE(I32AsmjsStoreMem, int32_t, int32_t);
|
|
|
|
ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
|
|
|
|
ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
|
|
|
|
#undef ASMJS_STORE_CASE
|
2016-09-14 09:19:02 +00:00
|
|
|
case kExprGrowMemory: {
|
2018-05-03 11:59:06 +00:00
|
|
|
MemoryIndexImmediate<Decoder::kNoValidate> imm(&decoder,
|
|
|
|
code->at(pc));
|
2016-09-14 09:19:02 +00:00
|
|
|
uint32_t delta_pages = Pop().to<uint32_t>();
|
2018-06-23 09:05:50 +00:00
|
|
|
Handle<WasmMemoryObject> memory(instance_object_->memory_object(),
|
|
|
|
instance_object_->GetIsolate());
|
2018-03-16 14:08:59 +00:00
|
|
|
Isolate* isolate = memory->GetIsolate();
|
|
|
|
int32_t result = WasmMemoryObject::Grow(isolate, memory, delta_pages);
|
2017-10-16 09:45:56 +00:00
|
|
|
Push(WasmValue(result));
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2018-02-07 12:30:32 +00:00
|
|
|
// Treat one grow_memory instruction like 1000 other instructions,
|
|
|
|
// because it is a really expensive operation.
|
|
|
|
if (max > 0) max = std::max(0, max - 1000);
|
2016-09-14 09:19:02 +00:00
|
|
|
break;
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
case kExprMemorySize: {
|
2018-05-03 11:59:06 +00:00
|
|
|
MemoryIndexImmediate<Decoder::kNoValidate> imm(&decoder,
|
|
|
|
code->at(pc));
|
2018-04-06 10:18:18 +00:00
|
|
|
Push(WasmValue(static_cast<uint32_t>(instance_object_->memory_size() /
|
|
|
|
kWasmPageSize)));
|
2018-05-03 11:59:06 +00:00
|
|
|
len = 1 + imm.length;
|
2016-05-25 08:32:37 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-01-20 10:46:48 +00:00
|
|
|
// We need to treat kExprI32ReinterpretF32 and kExprI64ReinterpretF64
|
|
|
|
// specially to guarantee that the quiet bit of a NaN is preserved on
|
|
|
|
// ia32 by the reinterpret casts.
|
|
|
|
case kExprI32ReinterpretF32: {
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val = Pop();
|
|
|
|
Push(WasmValue(ExecuteI32ReinterpretF32(val)));
|
2017-01-20 10:46:48 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case kExprI64ReinterpretF64: {
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val = Pop();
|
|
|
|
Push(WasmValue(ExecuteI64ReinterpretF64(val)));
|
2017-01-20 10:46:48 +00:00
|
|
|
break;
|
2017-12-27 18:54:38 +00:00
|
|
|
}
|
2018-06-01 21:27:29 +00:00
|
|
|
#define SIGN_EXTENSION_CASE(name, wtype, ntype) \
|
|
|
|
case kExpr##name: { \
|
|
|
|
ntype val = static_cast<ntype>(Pop().to<wtype>()); \
|
|
|
|
Push(WasmValue(static_cast<wtype>(val))); \
|
|
|
|
break; \
|
|
|
|
}
|
|
|
|
SIGN_EXTENSION_CASE(I32SExtendI8, int32_t, int8_t);
|
|
|
|
SIGN_EXTENSION_CASE(I32SExtendI16, int32_t, int16_t);
|
|
|
|
SIGN_EXTENSION_CASE(I64SExtendI8, int64_t, int8_t);
|
|
|
|
SIGN_EXTENSION_CASE(I64SExtendI16, int64_t, int16_t);
|
|
|
|
SIGN_EXTENSION_CASE(I64SExtendI32, int64_t, int32_t);
|
|
|
|
#undef SIGN_EXTENSION_CASE
|
2017-12-27 18:54:38 +00:00
|
|
|
case kNumericPrefix: {
|
|
|
|
++len;
|
|
|
|
if (!ExecuteNumericOp(opcode, &decoder, code, pc, len)) return;
|
|
|
|
break;
|
2017-01-20 10:46:48 +00:00
|
|
|
}
|
2017-11-04 01:03:03 +00:00
|
|
|
case kAtomicPrefix: {
|
|
|
|
if (!ExecuteAtomicOp(opcode, &decoder, code, pc, len)) return;
|
|
|
|
break;
|
|
|
|
}
|
2018-05-18 21:47:59 +00:00
|
|
|
case kSimdPrefix: {
|
|
|
|
++len;
|
|
|
|
if (!ExecuteSimdOp(opcode, &decoder, code, pc, len)) return;
|
|
|
|
break;
|
|
|
|
}
|
2017-11-04 01:03:03 +00:00
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
#define EXECUTE_SIMPLE_BINOP(name, ctype, op) \
|
|
|
|
case kExpr##name: { \
|
|
|
|
WasmValue rval = Pop(); \
|
|
|
|
WasmValue lval = Pop(); \
|
2017-10-26 07:45:12 +00:00
|
|
|
auto result = lval.to<ctype>() op rval.to<ctype>(); \
|
|
|
|
possible_nondeterminism_ |= has_nondeterminism(result); \
|
|
|
|
Push(WasmValue(result)); \
|
2017-07-14 13:49:01 +00:00
|
|
|
break; \
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
FOREACH_SIMPLE_BINOP(EXECUTE_SIMPLE_BINOP)
|
|
|
|
#undef EXECUTE_SIMPLE_BINOP
|
|
|
|
|
2017-10-26 07:45:12 +00:00
|
|
|
#define EXECUTE_OTHER_BINOP(name, ctype) \
|
|
|
|
case kExpr##name: { \
|
|
|
|
TrapReason trap = kTrapCount; \
|
|
|
|
ctype rval = Pop().to<ctype>(); \
|
|
|
|
ctype lval = Pop().to<ctype>(); \
|
|
|
|
auto result = Execute##name(lval, rval, &trap); \
|
|
|
|
possible_nondeterminism_ |= has_nondeterminism(result); \
|
|
|
|
if (trap != kTrapCount) return DoTrap(trap, pc); \
|
|
|
|
Push(WasmValue(result)); \
|
|
|
|
break; \
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
FOREACH_OTHER_BINOP(EXECUTE_OTHER_BINOP)
|
|
|
|
#undef EXECUTE_OTHER_BINOP
|
|
|
|
|
2018-01-16 19:32:52 +00:00
|
|
|
#define EXECUTE_UNOP(name, ctype, exec_fn) \
|
2017-10-26 07:45:12 +00:00
|
|
|
case kExpr##name: { \
|
|
|
|
TrapReason trap = kTrapCount; \
|
|
|
|
ctype val = Pop().to<ctype>(); \
|
2018-01-16 19:32:52 +00:00
|
|
|
auto result = exec_fn(val, &trap); \
|
2017-10-26 07:45:12 +00:00
|
|
|
possible_nondeterminism_ |= has_nondeterminism(result); \
|
|
|
|
if (trap != kTrapCount) return DoTrap(trap, pc); \
|
|
|
|
Push(WasmValue(result)); \
|
|
|
|
break; \
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2018-01-16 19:32:52 +00:00
|
|
|
|
|
|
|
#define EXECUTE_OTHER_UNOP(name, ctype) EXECUTE_UNOP(name, ctype, Execute##name)
|
2016-05-25 08:32:37 +00:00
|
|
|
FOREACH_OTHER_UNOP(EXECUTE_OTHER_UNOP)
|
|
|
|
#undef EXECUTE_OTHER_UNOP
|
|
|
|
|
2018-01-16 19:32:52 +00:00
|
|
|
#define EXECUTE_I32CONV_FLOATOP(name, out_type, in_type) \
|
|
|
|
EXECUTE_UNOP(name, in_type, ExecuteConvert<out_type>)
|
|
|
|
FOREACH_I32CONV_FLOATOP(EXECUTE_I32CONV_FLOATOP)
|
|
|
|
#undef EXECUTE_I32CONV_FLOATOP
|
|
|
|
#undef EXECUTE_UNOP
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
default:
|
2018-02-13 10:18:54 +00:00
|
|
|
FATAL("Unknown or unimplemented opcode #%d:%s", code->start[pc],
|
|
|
|
OpcodeName(code->start[pc]));
|
2016-05-25 08:32:37 +00:00
|
|
|
UNREACHABLE();
|
|
|
|
}
|
|
|
|
|
2017-04-25 09:43:39 +00:00
|
|
|
#ifdef DEBUG
|
2017-11-04 01:03:03 +00:00
|
|
|
if (!WasmOpcodes::IsControlOpcode(opcode)) {
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_EQ(expected_new_stack_height, StackHeight());
|
2017-04-25 09:43:39 +00:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
pc += len;
|
2017-01-27 09:48:08 +00:00
|
|
|
if (pc == limit) {
|
|
|
|
// Fell off end of code; do an implicit return.
|
|
|
|
TRACE("@%-3zu: ImplicitReturn\n", pc);
|
2017-03-14 10:46:18 +00:00
|
|
|
if (!DoReturn(&decoder, &code, &pc, &limit,
|
|
|
|
code->function->sig->return_count()))
|
2017-01-27 09:48:08 +00:00
|
|
|
return;
|
|
|
|
PAUSE_IF_BREAK_FLAG(AfterReturn);
|
|
|
|
}
|
2017-09-08 13:42:59 +00:00
|
|
|
#undef PAUSE_IF_BREAK_FLAG
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2017-03-24 15:42:49 +00:00
|
|
|
|
2017-01-24 10:13:33 +00:00
|
|
|
state_ = WasmInterpreter::PAUSED;
|
2017-03-24 15:42:49 +00:00
|
|
|
break_pc_ = hit_break ? pc : kInvalidPc;
|
2017-01-24 10:13:33 +00:00
|
|
|
CommitPc(pc);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue Pop() {
|
[base] Define CHECK comparison for signed vs. unsigned
The current CHECK/DCHECK implementation fails statically if a signed
value is compared against an unsigned value. The common solution is to
cast on each caller, which is tedious and error-prone (might hide bugs).
This CL implements signed vs. unsigned comparisons by executing up to
two comparisons. For example, if i is int32_t and u is uint_32_t, a
DCHECK_LE(i, u) would create the check
i <= 0 || static_cast<uint32_t>(i) <= u.
For checks against constants, at least one of the checks can be removed
by compiler optimizations.
The tradeoff we have to make is to sometimes silently execute an
additional comparison. And we increase code complexity of course, even
though the usage is just as easy (or even easier) as before.
The compile time impact seems to be minimal:
I ran 3 full compilations for Optdebug on my local machine, one time on
the current ToT, one time with this CL plus http://crrev.com/2524093002.
Before: 143.72 +- 1.21 seconds
Now: 144.18 +- 0.67 seconds
In order to check that the new comparisons are working, I refactored
some DCHECKs in wasm to use the new magic, and added unit test cases.
R=ishell@chromium.org, titzer@chromium.org
CC=ahaas@chromium.org, bmeurer@chromium.org
Committed: https://crrev.com/5925074a9dab5a8577766545b91b62f2c531d3dc
Review-Url: https://codereview.chromium.org/2526783002
Cr-Original-Commit-Position: refs/heads/master@{#41275}
Cr-Commit-Position: refs/heads/master@{#41411}
2016-12-01 08:52:31 +00:00
|
|
|
DCHECK_GT(frames_.size(), 0);
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_GT(StackHeight(), frames_.back().llimit()); // can't pop into locals
|
|
|
|
return *--sp_;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void PopN(int n) {
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_GE(StackHeight(), n);
|
[base] Define CHECK comparison for signed vs. unsigned
The current CHECK/DCHECK implementation fails statically if a signed
value is compared against an unsigned value. The common solution is to
cast on each caller, which is tedious and error-prone (might hide bugs).
This CL implements signed vs. unsigned comparisons by executing up to
two comparisons. For example, if i is int32_t and u is uint_32_t, a
DCHECK_LE(i, u) would create the check
i <= 0 || static_cast<uint32_t>(i) <= u.
For checks against constants, at least one of the checks can be removed
by compiler optimizations.
The tradeoff we have to make is to sometimes silently execute an
additional comparison. And we increase code complexity of course, even
though the usage is just as easy (or even easier) as before.
The compile time impact seems to be minimal:
I ran 3 full compilations for Optdebug on my local machine, one time on
the current ToT, one time with this CL plus http://crrev.com/2524093002.
Before: 143.72 +- 1.21 seconds
Now: 144.18 +- 0.67 seconds
In order to check that the new comparisons are working, I refactored
some DCHECKs in wasm to use the new magic, and added unit test cases.
R=ishell@chromium.org, titzer@chromium.org
CC=ahaas@chromium.org, bmeurer@chromium.org
Committed: https://crrev.com/5925074a9dab5a8577766545b91b62f2c531d3dc
Review-Url: https://codereview.chromium.org/2526783002
Cr-Original-Commit-Position: refs/heads/master@{#41275}
Cr-Commit-Position: refs/heads/master@{#41411}
2016-12-01 08:52:31 +00:00
|
|
|
DCHECK_GT(frames_.size(), 0);
|
2017-04-26 17:41:26 +00:00
|
|
|
// Check that we don't pop into locals.
|
|
|
|
DCHECK_GE(StackHeight() - n, frames_.back().llimit());
|
|
|
|
sp_ -= n;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue PopArity(size_t arity) {
|
|
|
|
if (arity == 0) return WasmValue();
|
[base] Define CHECK comparison for signed vs. unsigned
The current CHECK/DCHECK implementation fails statically if a signed
value is compared against an unsigned value. The common solution is to
cast on each caller, which is tedious and error-prone (might hide bugs).
This CL implements signed vs. unsigned comparisons by executing up to
two comparisons. For example, if i is int32_t and u is uint_32_t, a
DCHECK_LE(i, u) would create the check
i <= 0 || static_cast<uint32_t>(i) <= u.
For checks against constants, at least one of the checks can be removed
by compiler optimizations.
The tradeoff we have to make is to sometimes silently execute an
additional comparison. And we increase code complexity of course, even
though the usage is just as easy (or even easier) as before.
The compile time impact seems to be minimal:
I ran 3 full compilations for Optdebug on my local machine, one time on
the current ToT, one time with this CL plus http://crrev.com/2524093002.
Before: 143.72 +- 1.21 seconds
Now: 144.18 +- 0.67 seconds
In order to check that the new comparisons are working, I refactored
some DCHECKs in wasm to use the new magic, and added unit test cases.
R=ishell@chromium.org, titzer@chromium.org
CC=ahaas@chromium.org, bmeurer@chromium.org
Committed: https://crrev.com/5925074a9dab5a8577766545b91b62f2c531d3dc
Review-Url: https://codereview.chromium.org/2526783002
Cr-Original-Commit-Position: refs/heads/master@{#41275}
Cr-Commit-Position: refs/heads/master@{#41411}
2016-12-01 08:52:31 +00:00
|
|
|
CHECK_EQ(1, arity);
|
2016-05-25 08:32:37 +00:00
|
|
|
return Pop();
|
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
void Push(WasmValue val) {
|
|
|
|
DCHECK_NE(kWasmStmt, val.type());
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_LE(1, stack_limit_ - sp_);
|
|
|
|
*sp_++ = val;
|
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
void Push(WasmValue* vals, size_t arity) {
|
2017-04-26 17:41:26 +00:00
|
|
|
DCHECK_LE(arity, stack_limit_ - sp_);
|
2017-07-14 13:49:01 +00:00
|
|
|
for (WasmValue *val = vals, *end = vals + arity; val != end; ++val) {
|
|
|
|
DCHECK_NE(kWasmStmt, val->type());
|
2017-04-26 17:41:26 +00:00
|
|
|
}
|
|
|
|
memcpy(sp_, vals, arity * sizeof(*sp_));
|
|
|
|
sp_ += arity;
|
|
|
|
}
|
|
|
|
|
|
|
|
void EnsureStackSpace(size_t size) {
|
|
|
|
if (V8_LIKELY(static_cast<size_t>(stack_limit_ - sp_) >= size)) return;
|
2018-07-05 09:00:20 +00:00
|
|
|
size_t old_size = stack_limit_ - stack_.get();
|
2017-04-27 16:16:53 +00:00
|
|
|
size_t requested_size =
|
2018-07-05 09:00:20 +00:00
|
|
|
base::bits::RoundUpToPowerOfTwo64((sp_ - stack_.get()) + size);
|
2017-04-27 16:16:53 +00:00
|
|
|
size_t new_size = Max(size_t{8}, Max(2 * old_size, requested_size));
|
2018-07-05 09:00:20 +00:00
|
|
|
std::unique_ptr<WasmValue[]> new_stack(new WasmValue[new_size]);
|
|
|
|
memcpy(new_stack.get(), stack_.get(), old_size * sizeof(*sp_));
|
|
|
|
sp_ = new_stack.get() + (sp_ - stack_.get());
|
|
|
|
stack_ = std::move(new_stack);
|
|
|
|
stack_limit_ = stack_.get() + new_size;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2018-07-05 09:00:20 +00:00
|
|
|
sp_t StackHeight() { return sp_ - stack_.get(); }
|
2017-04-26 17:41:26 +00:00
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
void TraceValueStack() {
|
2017-01-24 10:13:33 +00:00
|
|
|
#ifdef DEBUG
|
2017-04-26 17:41:26 +00:00
|
|
|
if (!FLAG_trace_wasm_interpreter) return;
|
2016-05-25 08:32:37 +00:00
|
|
|
Frame* top = frames_.size() > 0 ? &frames_.back() : nullptr;
|
|
|
|
sp_t sp = top ? top->sp : 0;
|
|
|
|
sp_t plimit = top ? top->plimit() : 0;
|
|
|
|
sp_t llimit = top ? top->llimit() : 0;
|
2017-04-26 17:41:26 +00:00
|
|
|
for (size_t i = sp; i < StackHeight(); ++i) {
|
|
|
|
if (i < plimit)
|
|
|
|
PrintF(" p%zu:", i);
|
|
|
|
else if (i < llimit)
|
|
|
|
PrintF(" l%zu:", i);
|
|
|
|
else
|
|
|
|
PrintF(" s%zu:", i);
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue val = GetStackValue(i);
|
|
|
|
switch (val.type()) {
|
2017-04-26 17:41:26 +00:00
|
|
|
case kWasmI32:
|
|
|
|
PrintF("i32:%d", val.to<int32_t>());
|
|
|
|
break;
|
|
|
|
case kWasmI64:
|
|
|
|
PrintF("i64:%" PRId64 "", val.to<int64_t>());
|
|
|
|
break;
|
|
|
|
case kWasmF32:
|
|
|
|
PrintF("f32:%f", val.to<float>());
|
|
|
|
break;
|
|
|
|
case kWasmF64:
|
|
|
|
PrintF("f64:%lf", val.to<double>());
|
|
|
|
break;
|
|
|
|
case kWasmStmt:
|
|
|
|
PrintF("void");
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
UNREACHABLE();
|
|
|
|
break;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
}
|
2017-01-24 10:13:33 +00:00
|
|
|
#endif // DEBUG
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
2017-03-15 15:57:02 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
ExternalCallResult TryHandleException(Isolate* isolate) {
|
|
|
|
if (HandleException(isolate) == WasmInterpreter::Thread::UNWOUND) {
|
|
|
|
return {ExternalCallResult::EXTERNAL_UNWOUND};
|
2017-03-15 15:57:02 +00:00
|
|
|
}
|
2017-03-23 09:46:16 +00:00
|
|
|
return {ExternalCallResult::EXTERNAL_RETURNED};
|
|
|
|
}
|
|
|
|
|
2018-04-06 10:18:18 +00:00
|
|
|
ExternalCallResult CallExternalWasmFunction(
|
|
|
|
Isolate* isolate, Handle<WasmInstanceObject> instance,
|
|
|
|
const wasm::WasmCode* code, FunctionSig* sig) {
|
|
|
|
if (code->kind() == wasm::WasmCode::kWasmToJsWrapper &&
|
|
|
|
!IsJSCompatibleSignature(sig)) {
|
2017-03-23 09:46:16 +00:00
|
|
|
isolate->Throw(*isolate->factory()->NewTypeError(
|
|
|
|
MessageTemplate::kWasmTrapTypeError));
|
|
|
|
return TryHandleException(isolate);
|
2017-03-15 15:57:02 +00:00
|
|
|
}
|
2017-03-23 09:46:16 +00:00
|
|
|
|
2018-04-06 10:18:18 +00:00
|
|
|
Handle<WasmDebugInfo> debug_info(instance_object_->debug_info(), isolate);
|
2017-08-07 11:40:21 +00:00
|
|
|
Handle<JSFunction> wasm_entry =
|
|
|
|
WasmDebugInfo::GetCWasmEntry(debug_info, sig);
|
|
|
|
|
|
|
|
TRACE(" => Calling external wasm function\n");
|
|
|
|
|
|
|
|
// Copy the arguments to one buffer.
|
|
|
|
// TODO(clemensh): Introduce a helper for all argument buffer
|
|
|
|
// con-/destruction.
|
|
|
|
int num_args = static_cast<int>(sig->parameter_count());
|
|
|
|
std::vector<uint8_t> arg_buffer(num_args * 8);
|
|
|
|
size_t offset = 0;
|
|
|
|
WasmValue* wasm_args = sp_ - num_args;
|
|
|
|
for (int i = 0; i < num_args; ++i) {
|
2018-05-07 11:02:21 +00:00
|
|
|
int param_size = ValueTypes::ElementSizeInBytes(sig->GetParam(i));
|
2017-08-07 11:40:21 +00:00
|
|
|
if (arg_buffer.size() < offset + param_size) {
|
|
|
|
arg_buffer.resize(std::max(2 * arg_buffer.size(), offset + param_size));
|
|
|
|
}
|
2018-04-13 22:28:05 +00:00
|
|
|
Address address = reinterpret_cast<Address>(arg_buffer.data()) + offset;
|
2017-08-07 11:40:21 +00:00
|
|
|
switch (sig->GetParam(i)) {
|
|
|
|
case kWasmI32:
|
2018-04-13 22:28:05 +00:00
|
|
|
WriteUnalignedValue(address, wasm_args[i].to<uint32_t>());
|
2017-08-07 11:40:21 +00:00
|
|
|
break;
|
|
|
|
case kWasmI64:
|
2018-04-13 22:28:05 +00:00
|
|
|
WriteUnalignedValue(address, wasm_args[i].to<uint64_t>());
|
2017-08-07 11:40:21 +00:00
|
|
|
break;
|
|
|
|
case kWasmF32:
|
2018-04-13 22:28:05 +00:00
|
|
|
WriteUnalignedValue(address, wasm_args[i].to<float>());
|
2017-08-07 11:40:21 +00:00
|
|
|
break;
|
|
|
|
case kWasmF64:
|
2018-04-13 22:28:05 +00:00
|
|
|
WriteUnalignedValue(address, wasm_args[i].to<double>());
|
2017-08-07 11:40:21 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
UNIMPLEMENTED();
|
|
|
|
}
|
|
|
|
offset += param_size;
|
|
|
|
}
|
|
|
|
|
2017-11-16 10:31:49 +00:00
|
|
|
// Ensure that there is enough space in the arg_buffer to hold the return
|
|
|
|
// value(s).
|
2018-05-07 11:02:21 +00:00
|
|
|
size_t return_size = 0;
|
2017-11-16 10:31:49 +00:00
|
|
|
for (ValueType t : sig->returns()) {
|
2018-05-07 11:02:21 +00:00
|
|
|
return_size += ValueTypes::ElementSizeInBytes(t);
|
2017-11-16 10:31:49 +00:00
|
|
|
}
|
|
|
|
if (arg_buffer.size() < return_size) {
|
|
|
|
arg_buffer.resize(return_size);
|
|
|
|
}
|
|
|
|
|
2018-04-06 10:18:18 +00:00
|
|
|
// Wrap the arg_buffer data pointer in a handle. As
|
2018-02-22 12:51:25 +00:00
|
|
|
// this is an aligned pointer, to the GC it will look like a Smi.
|
2017-08-07 11:40:21 +00:00
|
|
|
Handle<Object> arg_buffer_obj(reinterpret_cast<Object*>(arg_buffer.data()),
|
|
|
|
isolate);
|
|
|
|
DCHECK(!arg_buffer_obj->IsHeapObject());
|
|
|
|
|
2018-02-22 12:51:25 +00:00
|
|
|
static_assert(compiler::CWasmEntryParameters::kNumParameters == 3,
|
|
|
|
"code below needs adaption");
|
2017-08-07 11:40:21 +00:00
|
|
|
Handle<Object> args[compiler::CWasmEntryParameters::kNumParameters];
|
2018-03-14 11:47:49 +00:00
|
|
|
args[compiler::CWasmEntryParameters::kCodeObject] = Handle<Object>::cast(
|
2018-04-13 22:28:05 +00:00
|
|
|
isolate->factory()->NewForeign(code->instruction_start(), TENURED));
|
2018-04-06 10:18:18 +00:00
|
|
|
args[compiler::CWasmEntryParameters::kWasmInstance] = instance;
|
2017-08-07 11:40:21 +00:00
|
|
|
args[compiler::CWasmEntryParameters::kArgumentsBuffer] = arg_buffer_obj;
|
|
|
|
|
|
|
|
Handle<Object> receiver = isolate->factory()->undefined_value();
|
2017-10-12 13:32:14 +00:00
|
|
|
trap_handler::SetThreadInWasm();
|
2017-08-07 11:40:21 +00:00
|
|
|
MaybeHandle<Object> maybe_retval =
|
|
|
|
Execution::Call(isolate, wasm_entry, receiver, arraysize(args), args);
|
2017-10-12 13:32:14 +00:00
|
|
|
TRACE(" => External wasm function returned%s\n",
|
|
|
|
maybe_retval.is_null() ? " with exception" : "");
|
|
|
|
|
|
|
|
if (maybe_retval.is_null()) {
|
2018-04-27 17:37:07 +00:00
|
|
|
// JSEntryStub may through a stack overflow before we actually get to wasm
|
|
|
|
// code or back to the interpreter, meaning the thread-in-wasm flag won't
|
|
|
|
// be cleared.
|
|
|
|
if (trap_handler::IsThreadInWasm()) {
|
|
|
|
trap_handler::ClearThreadInWasm();
|
|
|
|
}
|
2017-10-12 13:32:14 +00:00
|
|
|
return TryHandleException(isolate);
|
|
|
|
}
|
|
|
|
|
|
|
|
trap_handler::ClearThreadInWasm();
|
2017-08-07 11:40:21 +00:00
|
|
|
|
|
|
|
// Pop arguments off the stack.
|
|
|
|
sp_ -= num_args;
|
|
|
|
// Push return values.
|
|
|
|
if (sig->return_count() > 0) {
|
|
|
|
// TODO(wasm): Handle multiple returns.
|
|
|
|
DCHECK_EQ(1, sig->return_count());
|
2018-04-13 22:28:05 +00:00
|
|
|
Address address = reinterpret_cast<Address>(arg_buffer.data());
|
2017-08-07 11:40:21 +00:00
|
|
|
switch (sig->GetReturn()) {
|
|
|
|
case kWasmI32:
|
2018-04-13 22:28:05 +00:00
|
|
|
Push(WasmValue(ReadUnalignedValue<uint32_t>(address)));
|
2017-08-07 11:40:21 +00:00
|
|
|
break;
|
|
|
|
case kWasmI64:
|
2018-04-13 22:28:05 +00:00
|
|
|
Push(WasmValue(ReadUnalignedValue<uint64_t>(address)));
|
2017-08-07 11:40:21 +00:00
|
|
|
break;
|
|
|
|
case kWasmF32:
|
2018-04-13 22:28:05 +00:00
|
|
|
Push(WasmValue(ReadUnalignedValue<float>(address)));
|
2017-08-07 11:40:21 +00:00
|
|
|
break;
|
|
|
|
case kWasmF64:
|
2018-04-13 22:28:05 +00:00
|
|
|
Push(WasmValue(ReadUnalignedValue<double>(address)));
|
2017-08-07 11:40:21 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
UNIMPLEMENTED();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return {ExternalCallResult::EXTERNAL_RETURNED};
|
|
|
|
}
|
|
|
|
|
2018-07-06 09:50:46 +00:00
|
|
|
static WasmCode* GetTargetCode(WasmCodeManager* code_manager,
|
|
|
|
Address target) {
|
|
|
|
NativeModule* native_module = code_manager->LookupNativeModule(target);
|
|
|
|
if (native_module->is_jump_table_slot(target)) {
|
|
|
|
uint32_t func_index =
|
|
|
|
native_module->GetFunctionIndexFromJumpTableSlot(target);
|
|
|
|
return native_module->code(func_index);
|
|
|
|
}
|
|
|
|
WasmCode* code = native_module->Lookup(target);
|
|
|
|
DCHECK_EQ(code->instruction_start(), target);
|
|
|
|
return code;
|
|
|
|
}
|
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
ExternalCallResult CallImportedFunction(uint32_t function_index) {
|
|
|
|
// Use a new HandleScope to avoid leaking / accumulating handles in the
|
|
|
|
// outer scope.
|
2018-04-06 10:18:18 +00:00
|
|
|
Isolate* isolate = instance_object_->GetIsolate();
|
2017-03-23 09:46:16 +00:00
|
|
|
HandleScope handle_scope(isolate);
|
|
|
|
|
2018-04-06 10:18:18 +00:00
|
|
|
DCHECK_GT(module()->num_imported_functions, function_index);
|
2018-04-10 18:02:24 +00:00
|
|
|
Handle<WasmInstanceObject> instance;
|
2018-07-06 09:50:46 +00:00
|
|
|
ImportedFunctionEntry entry(instance_object_, function_index);
|
|
|
|
instance = handle(entry.instance(), isolate);
|
|
|
|
WasmCode* code =
|
|
|
|
GetTargetCode(isolate->wasm_engine()->code_manager(), entry.target());
|
2018-04-06 10:18:18 +00:00
|
|
|
FunctionSig* sig = codemap()->module()->functions[function_index].sig;
|
|
|
|
return CallExternalWasmFunction(isolate, instance, code, sig);
|
2017-03-23 09:46:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ExternalCallResult CallIndirectFunction(uint32_t table_index,
|
|
|
|
uint32_t entry_index,
|
|
|
|
uint32_t sig_index) {
|
2018-02-21 11:42:57 +00:00
|
|
|
if (codemap()->call_indirect_through_module()) {
|
|
|
|
// Rely on the information stored in the WasmModule.
|
2017-03-23 09:46:16 +00:00
|
|
|
InterpreterCode* code =
|
|
|
|
codemap()->GetIndirectCode(table_index, entry_index);
|
|
|
|
if (!code) return {ExternalCallResult::INVALID_FUNC};
|
|
|
|
if (code->function->sig_index != sig_index) {
|
|
|
|
// If not an exact match, we have to do a canonical check.
|
2017-10-16 11:18:03 +00:00
|
|
|
int function_canonical_id =
|
|
|
|
module()->signature_ids[code->function->sig_index];
|
|
|
|
int expected_canonical_id = module()->signature_ids[sig_index];
|
|
|
|
DCHECK_EQ(function_canonical_id,
|
|
|
|
module()->signature_map.Find(code->function->sig));
|
|
|
|
if (function_canonical_id != expected_canonical_id) {
|
2017-03-23 09:46:16 +00:00
|
|
|
return {ExternalCallResult::SIGNATURE_MISMATCH};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return {ExternalCallResult::INTERNAL, code};
|
|
|
|
}
|
|
|
|
|
2018-04-06 10:18:18 +00:00
|
|
|
Isolate* isolate = instance_object_->GetIsolate();
|
|
|
|
uint32_t expected_sig_id = module()->signature_ids[sig_index];
|
|
|
|
DCHECK_EQ(expected_sig_id,
|
|
|
|
module()->signature_map.Find(module()->signatures[sig_index]));
|
|
|
|
|
|
|
|
// The function table is stored in the instance.
|
|
|
|
// TODO(wasm): the wasm interpreter currently supports only one table.
|
|
|
|
CHECK_EQ(0, table_index);
|
|
|
|
// Bounds check against table size.
|
|
|
|
if (entry_index >= instance_object_->indirect_function_table_size()) {
|
|
|
|
return {ExternalCallResult::INVALID_FUNC};
|
2018-04-04 15:14:01 +00:00
|
|
|
}
|
|
|
|
|
2018-06-19 09:47:17 +00:00
|
|
|
IndirectFunctionTableEntry entry(instance_object_, entry_index);
|
|
|
|
// Signature check.
|
|
|
|
if (entry.sig_id() != static_cast<int32_t>(expected_sig_id)) {
|
|
|
|
return {ExternalCallResult::SIGNATURE_MISMATCH};
|
|
|
|
}
|
Revert "[wasm] Introduce jump table"
This reverts commit 733b7c8258872dbbb44222831694c5f6b69424ab.
Reason for revert: breaks arm64 gc-stress: https://ci.chromium.org/buildbot/client.v8.ports/V8%20Linux%20-%20arm64%20-%20sim%20-%20gc%20stress/11659
Original change's description:
> [wasm] Introduce jump table
>
> This introduces the concept of a jump table for WebAssembly, which is
> used for every direct and indirect call to any WebAssembly function.
> For lazy compilation, it will initially contain code to call the
> WasmCompileLazy builtin, where it passes the function index to be
> called.
> For non-lazy-compilation, it will contain a jump to the actual code.
> The jump table allows to easily redirect functions for lazy
> compilation, tier-up, debugging and (in the future) code aging. After
> this CL, we will not need to patch existing code any more for any of
> these operations.
>
> R=mstarzinger@chromium.org, titzer@chromium.org
>
> Bug: v8:7758
> Change-Id: I45f9983c2b06ae81bf5ce9847f4542fb48844a4f
> Reviewed-on: https://chromium-review.googlesource.com/1097075
> Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
> Reviewed-by: Ben Titzer <titzer@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#53805}
TBR=mstarzinger@chromium.org,titzer@chromium.org,clemensh@chromium.org,sreten.kovacevic@mips.com
Change-Id: Iea358db2cf13656a65cf69a6d82cbbc10d3e7e1c
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: v8:7758
Reviewed-on: https://chromium-review.googlesource.com/1105157
Reviewed-by: Clemens Hammacher <clemensh@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53807}
2018-06-18 20:37:10 +00:00
|
|
|
|
2018-06-19 09:47:17 +00:00
|
|
|
Handle<WasmInstanceObject> instance = handle(entry.instance(), isolate);
|
2018-07-06 09:50:46 +00:00
|
|
|
WasmCode* code =
|
|
|
|
GetTargetCode(isolate->wasm_engine()->code_manager(), entry.target());
|
2018-04-06 10:18:18 +00:00
|
|
|
|
|
|
|
// Call either an internal or external WASM function.
|
|
|
|
HandleScope scope(isolate);
|
2018-04-04 17:07:48 +00:00
|
|
|
FunctionSig* signature = module()->signatures[sig_index];
|
2018-04-06 10:18:18 +00:00
|
|
|
|
|
|
|
if (code->kind() == wasm::WasmCode::kFunction) {
|
|
|
|
if (!instance_object_.is_identical_to(instance)) {
|
|
|
|
// Cross instance call.
|
|
|
|
return CallExternalWasmFunction(isolate, instance, code, signature);
|
|
|
|
}
|
|
|
|
return {ExternalCallResult::INTERNAL, codemap()->GetCode(code->index())};
|
|
|
|
}
|
|
|
|
|
|
|
|
// Call to external function.
|
2018-05-15 07:18:34 +00:00
|
|
|
if (code->kind() == wasm::WasmCode::kInterpreterEntry ||
|
2018-04-06 10:18:18 +00:00
|
|
|
code->kind() == wasm::WasmCode::kWasmToJsWrapper) {
|
|
|
|
return CallExternalWasmFunction(isolate, instance, code, signature);
|
|
|
|
}
|
|
|
|
return {ExternalCallResult::INVALID_FUNC};
|
2017-03-15 15:57:02 +00:00
|
|
|
}
|
2017-03-21 10:54:14 +00:00
|
|
|
|
|
|
|
inline Activation current_activation() {
|
|
|
|
return activations_.empty() ? Activation(0, 0) : activations_.back();
|
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
};
|
|
|
|
|
2017-04-11 13:04:13 +00:00
|
|
|
class InterpretedFrameImpl {
|
|
|
|
public:
|
|
|
|
InterpretedFrameImpl(ThreadImpl* thread, int index)
|
|
|
|
: thread_(thread), index_(index) {
|
|
|
|
DCHECK_LE(0, index);
|
|
|
|
}
|
|
|
|
|
|
|
|
const WasmFunction* function() const { return frame()->code->function; }
|
|
|
|
|
|
|
|
int pc() const {
|
|
|
|
DCHECK_LE(0, frame()->pc);
|
|
|
|
DCHECK_GE(kMaxInt, frame()->pc);
|
|
|
|
return static_cast<int>(frame()->pc);
|
|
|
|
}
|
|
|
|
|
|
|
|
int GetParameterCount() const {
|
|
|
|
DCHECK_GE(kMaxInt, function()->sig->parameter_count());
|
|
|
|
return static_cast<int>(function()->sig->parameter_count());
|
|
|
|
}
|
|
|
|
|
|
|
|
int GetLocalCount() const {
|
|
|
|
size_t num_locals = function()->sig->parameter_count() +
|
|
|
|
frame()->code->locals.type_list.size();
|
|
|
|
DCHECK_GE(kMaxInt, num_locals);
|
|
|
|
return static_cast<int>(num_locals);
|
|
|
|
}
|
|
|
|
|
|
|
|
int GetStackHeight() const {
|
|
|
|
bool is_top_frame =
|
|
|
|
static_cast<size_t>(index_) + 1 == thread_->frames_.size();
|
|
|
|
size_t stack_limit =
|
2017-04-26 17:41:26 +00:00
|
|
|
is_top_frame ? thread_->StackHeight() : thread_->frames_[index_ + 1].sp;
|
2017-04-13 08:27:47 +00:00
|
|
|
DCHECK_LE(frame()->sp, stack_limit);
|
|
|
|
size_t frame_size = stack_limit - frame()->sp;
|
|
|
|
DCHECK_LE(GetLocalCount(), frame_size);
|
|
|
|
return static_cast<int>(frame_size) - GetLocalCount();
|
2017-04-11 13:04:13 +00:00
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue GetLocalValue(int index) const {
|
2017-04-11 13:04:13 +00:00
|
|
|
DCHECK_LE(0, index);
|
|
|
|
DCHECK_GT(GetLocalCount(), index);
|
|
|
|
return thread_->GetStackValue(static_cast<int>(frame()->sp) + index);
|
|
|
|
}
|
|
|
|
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue GetStackValue(int index) const {
|
2017-04-11 13:04:13 +00:00
|
|
|
DCHECK_LE(0, index);
|
|
|
|
// Index must be within the number of stack values of this frame.
|
|
|
|
DCHECK_GT(GetStackHeight(), index);
|
|
|
|
return thread_->GetStackValue(static_cast<int>(frame()->sp) +
|
|
|
|
GetLocalCount() + index);
|
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
|
|
|
ThreadImpl* thread_;
|
|
|
|
int index_;
|
|
|
|
|
|
|
|
ThreadImpl::Frame* frame() const {
|
|
|
|
DCHECK_GT(thread_->frames_.size(), index_);
|
|
|
|
return &thread_->frames_[index_];
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-01-18 11:40:29 +00:00
|
|
|
// Converters between WasmInterpreter::Thread and WasmInterpreter::ThreadImpl.
|
|
|
|
// Thread* is the public interface, without knowledge of the object layout.
|
|
|
|
// This cast is potentially risky, but as long as we always cast it back before
|
|
|
|
// accessing any data, it should be fine. UBSan is not complaining.
|
|
|
|
WasmInterpreter::Thread* ToThread(ThreadImpl* impl) {
|
|
|
|
return reinterpret_cast<WasmInterpreter::Thread*>(impl);
|
|
|
|
}
|
2017-03-23 09:46:16 +00:00
|
|
|
ThreadImpl* ToImpl(WasmInterpreter::Thread* thread) {
|
2017-01-18 11:40:29 +00:00
|
|
|
return reinterpret_cast<ThreadImpl*>(thread);
|
|
|
|
}
|
2017-03-23 09:46:16 +00:00
|
|
|
|
2017-04-11 13:04:13 +00:00
|
|
|
// Same conversion for InterpretedFrame and InterpretedFrameImpl.
|
|
|
|
InterpretedFrame* ToFrame(InterpretedFrameImpl* impl) {
|
|
|
|
return reinterpret_cast<InterpretedFrame*>(impl);
|
|
|
|
}
|
|
|
|
const InterpretedFrameImpl* ToImpl(const InterpretedFrame* frame) {
|
|
|
|
return reinterpret_cast<const InterpretedFrameImpl*>(frame);
|
|
|
|
}
|
|
|
|
|
2017-01-18 11:40:29 +00:00
|
|
|
} // namespace
|
|
|
|
|
2017-01-18 10:23:20 +00:00
|
|
|
//============================================================================
|
2017-01-18 11:40:29 +00:00
|
|
|
// Implementation of the pimpl idiom for WasmInterpreter::Thread.
|
|
|
|
// Instead of placing a pointer to the ThreadImpl inside of the Thread object,
|
|
|
|
// we just reinterpret_cast them. ThreadImpls are only allocated inside this
|
|
|
|
// translation unit anyway.
|
2017-01-18 10:23:20 +00:00
|
|
|
//============================================================================
|
|
|
|
WasmInterpreter::State WasmInterpreter::Thread::state() {
|
2017-01-18 11:40:29 +00:00
|
|
|
return ToImpl(this)->state();
|
2017-01-18 10:23:20 +00:00
|
|
|
}
|
2017-03-14 15:54:43 +00:00
|
|
|
void WasmInterpreter::Thread::InitFrame(const WasmFunction* function,
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue* args) {
|
2017-03-14 15:54:43 +00:00
|
|
|
ToImpl(this)->InitFrame(function, args);
|
2017-01-18 10:23:20 +00:00
|
|
|
}
|
2017-04-06 13:32:36 +00:00
|
|
|
WasmInterpreter::State WasmInterpreter::Thread::Run(int num_steps) {
|
|
|
|
return ToImpl(this)->Run(num_steps);
|
2017-01-18 11:40:29 +00:00
|
|
|
}
|
|
|
|
void WasmInterpreter::Thread::Pause() { return ToImpl(this)->Pause(); }
|
|
|
|
void WasmInterpreter::Thread::Reset() { return ToImpl(this)->Reset(); }
|
2017-03-20 12:53:01 +00:00
|
|
|
WasmInterpreter::Thread::ExceptionHandlingResult
|
|
|
|
WasmInterpreter::Thread::HandleException(Isolate* isolate) {
|
|
|
|
return ToImpl(this)->HandleException(isolate);
|
|
|
|
}
|
2017-01-18 10:23:20 +00:00
|
|
|
pc_t WasmInterpreter::Thread::GetBreakpointPc() {
|
2017-01-18 11:40:29 +00:00
|
|
|
return ToImpl(this)->GetBreakpointPc();
|
|
|
|
}
|
|
|
|
int WasmInterpreter::Thread::GetFrameCount() {
|
|
|
|
return ToImpl(this)->GetFrameCount();
|
2017-01-18 10:23:20 +00:00
|
|
|
}
|
2017-12-19 20:18:39 +00:00
|
|
|
WasmInterpreter::FramePtr WasmInterpreter::Thread::GetFrame(int index) {
|
2017-04-11 13:04:13 +00:00
|
|
|
DCHECK_LE(0, index);
|
|
|
|
DCHECK_GT(GetFrameCount(), index);
|
2017-12-19 20:18:39 +00:00
|
|
|
return FramePtr(ToFrame(new InterpretedFrameImpl(ToImpl(this), index)));
|
2017-01-18 10:23:20 +00:00
|
|
|
}
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue WasmInterpreter::Thread::GetReturnValue(int index) {
|
2017-01-18 11:40:29 +00:00
|
|
|
return ToImpl(this)->GetReturnValue(index);
|
2017-01-18 10:23:20 +00:00
|
|
|
}
|
2017-03-16 11:54:31 +00:00
|
|
|
TrapReason WasmInterpreter::Thread::GetTrapReason() {
|
|
|
|
return ToImpl(this)->GetTrapReason();
|
|
|
|
}
|
2017-01-18 10:23:20 +00:00
|
|
|
bool WasmInterpreter::Thread::PossibleNondeterminism() {
|
2017-01-18 11:40:29 +00:00
|
|
|
return ToImpl(this)->PossibleNondeterminism();
|
2017-01-18 10:23:20 +00:00
|
|
|
}
|
2017-02-21 18:21:31 +00:00
|
|
|
uint64_t WasmInterpreter::Thread::NumInterpretedCalls() {
|
|
|
|
return ToImpl(this)->NumInterpretedCalls();
|
|
|
|
}
|
2017-01-24 10:13:33 +00:00
|
|
|
void WasmInterpreter::Thread::AddBreakFlags(uint8_t flags) {
|
|
|
|
ToImpl(this)->AddBreakFlags(flags);
|
|
|
|
}
|
|
|
|
void WasmInterpreter::Thread::ClearBreakFlags() {
|
|
|
|
ToImpl(this)->ClearBreakFlags();
|
|
|
|
}
|
2017-03-21 10:54:14 +00:00
|
|
|
uint32_t WasmInterpreter::Thread::NumActivations() {
|
|
|
|
return ToImpl(this)->NumActivations();
|
|
|
|
}
|
|
|
|
uint32_t WasmInterpreter::Thread::StartActivation() {
|
|
|
|
return ToImpl(this)->StartActivation();
|
|
|
|
}
|
|
|
|
void WasmInterpreter::Thread::FinishActivation(uint32_t id) {
|
|
|
|
ToImpl(this)->FinishActivation(id);
|
|
|
|
}
|
|
|
|
uint32_t WasmInterpreter::Thread::ActivationFrameBase(uint32_t id) {
|
|
|
|
return ToImpl(this)->ActivationFrameBase(id);
|
|
|
|
}
|
2017-01-18 10:23:20 +00:00
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
//============================================================================
|
|
|
|
// The implementation details of the interpreter.
|
|
|
|
//============================================================================
|
|
|
|
class WasmInterpreterInternals : public ZoneObject {
|
|
|
|
public:
|
2016-11-30 15:02:40 +00:00
|
|
|
// Create a copy of the module bytes for the interpreter, since the passed
|
|
|
|
// pointer might be invalidated after constructing the interpreter.
|
|
|
|
const ZoneVector<uint8_t> module_bytes_;
|
2016-05-25 08:32:37 +00:00
|
|
|
CodeMap codemap_;
|
2017-01-18 11:40:29 +00:00
|
|
|
ZoneVector<ThreadImpl> threads_;
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-03-23 09:46:16 +00:00
|
|
|
WasmInterpreterInternals(Isolate* isolate, Zone* zone,
|
2017-08-07 17:17:06 +00:00
|
|
|
const WasmModule* module,
|
|
|
|
const ModuleWireBytes& wire_bytes,
|
2018-04-06 10:18:18 +00:00
|
|
|
Handle<WasmInstanceObject> instance_object)
|
2017-10-16 09:45:56 +00:00
|
|
|
: module_bytes_(wire_bytes.start(), wire_bytes.end(), zone),
|
2017-08-07 17:17:06 +00:00
|
|
|
codemap_(isolate, module, module_bytes_.data(), zone),
|
2016-05-25 08:32:37 +00:00
|
|
|
threads_(zone) {
|
2018-04-06 10:18:18 +00:00
|
|
|
threads_.emplace_back(zone, &codemap_, instance_object);
|
2016-06-09 14:22:05 +00:00
|
|
|
}
|
2016-05-25 08:32:37 +00:00
|
|
|
};
|
|
|
|
|
2018-04-06 10:18:18 +00:00
|
|
|
namespace {
|
|
|
|
// TODO(wasm): a finalizer is only required to delete the global handle.
|
|
|
|
void GlobalHandleDeleter(const v8::WeakCallbackInfo<void>& data) {
|
|
|
|
GlobalHandles::Destroy(reinterpret_cast<Object**>(
|
|
|
|
reinterpret_cast<JSObject**>(data.GetParameter())));
|
|
|
|
}
|
|
|
|
|
|
|
|
Handle<WasmInstanceObject> MakeWeak(
|
|
|
|
Isolate* isolate, Handle<WasmInstanceObject> instance_object) {
|
|
|
|
Handle<Object> handle = isolate->global_handles()->Create(*instance_object);
|
|
|
|
// TODO(wasm): use a phantom handle in the WasmInterpreter.
|
|
|
|
GlobalHandles::MakeWeak(handle.location(), handle.location(),
|
|
|
|
&GlobalHandleDeleter,
|
|
|
|
v8::WeakCallbackType::kFinalizer);
|
|
|
|
return Handle<WasmInstanceObject>::cast(handle);
|
|
|
|
}
|
|
|
|
} // namespace
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
//============================================================================
|
|
|
|
// Implementation of the public interface of the interpreter.
|
|
|
|
//============================================================================
|
2017-08-07 17:17:06 +00:00
|
|
|
WasmInterpreter::WasmInterpreter(Isolate* isolate, const WasmModule* module,
|
|
|
|
const ModuleWireBytes& wire_bytes,
|
2018-04-06 10:18:18 +00:00
|
|
|
Handle<WasmInstanceObject> instance_object)
|
2017-03-23 09:46:16 +00:00
|
|
|
: zone_(isolate->allocator(), ZONE_NAME),
|
2017-08-07 17:17:06 +00:00
|
|
|
internals_(new (&zone_) WasmInterpreterInternals(
|
2018-04-06 10:18:18 +00:00
|
|
|
isolate, &zone_, module, wire_bytes,
|
|
|
|
MakeWeak(isolate, instance_object))) {}
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-04-04 08:48:32 +00:00
|
|
|
WasmInterpreter::~WasmInterpreter() { internals_->~WasmInterpreterInternals(); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-01-18 10:23:20 +00:00
|
|
|
void WasmInterpreter::Run() { internals_->threads_[0].Run(); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2017-01-18 10:23:20 +00:00
|
|
|
void WasmInterpreter::Pause() { internals_->threads_[0].Pause(); }
|
2016-05-25 08:32:37 +00:00
|
|
|
|
2016-05-30 10:02:34 +00:00
|
|
|
bool WasmInterpreter::SetBreakpoint(const WasmFunction* function, pc_t pc,
|
2016-05-25 08:32:37 +00:00
|
|
|
bool enabled) {
|
2017-03-14 15:54:43 +00:00
|
|
|
InterpreterCode* code = internals_->codemap_.GetCode(function);
|
2016-05-30 10:02:34 +00:00
|
|
|
size_t size = static_cast<size_t>(code->end - code->start);
|
2016-05-25 08:32:37 +00:00
|
|
|
// Check bounds for {pc}.
|
2017-01-06 22:24:56 +00:00
|
|
|
if (pc < code->locals.encoded_size || pc >= size) return false;
|
2016-05-25 08:32:37 +00:00
|
|
|
// Make a copy of the code before enabling a breakpoint.
|
|
|
|
if (enabled && code->orig_start == code->start) {
|
|
|
|
code->start = reinterpret_cast<byte*>(zone_.New(size));
|
|
|
|
memcpy(code->start, code->orig_start, size);
|
|
|
|
code->end = code->start + size;
|
|
|
|
}
|
|
|
|
bool prev = code->start[pc] == kInternalBreakpoint;
|
|
|
|
if (enabled) {
|
|
|
|
code->start[pc] = kInternalBreakpoint;
|
|
|
|
} else {
|
|
|
|
code->start[pc] = code->orig_start[pc];
|
|
|
|
}
|
|
|
|
return prev;
|
|
|
|
}
|
|
|
|
|
2016-05-30 10:02:34 +00:00
|
|
|
bool WasmInterpreter::GetBreakpoint(const WasmFunction* function, pc_t pc) {
|
2017-03-14 15:54:43 +00:00
|
|
|
InterpreterCode* code = internals_->codemap_.GetCode(function);
|
2016-05-30 10:02:34 +00:00
|
|
|
size_t size = static_cast<size_t>(code->end - code->start);
|
2016-05-25 08:32:37 +00:00
|
|
|
// Check bounds for {pc}.
|
2017-01-06 22:24:56 +00:00
|
|
|
if (pc < code->locals.encoded_size || pc >= size) return false;
|
2016-05-25 08:32:37 +00:00
|
|
|
// Check if a breakpoint is present at that place in the code.
|
|
|
|
return code->start[pc] == kInternalBreakpoint;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool WasmInterpreter::SetTracing(const WasmFunction* function, bool enabled) {
|
|
|
|
UNIMPLEMENTED();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
int WasmInterpreter::GetThreadCount() {
|
|
|
|
return 1; // only one thread for now.
|
|
|
|
}
|
|
|
|
|
2016-06-09 14:22:05 +00:00
|
|
|
WasmInterpreter::Thread* WasmInterpreter::GetThread(int id) {
|
2016-05-25 08:32:37 +00:00
|
|
|
CHECK_EQ(0, id); // only one thread for now.
|
2017-01-18 11:40:29 +00:00
|
|
|
return ToThread(&internals_->threads_[id]);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-14 15:54:43 +00:00
|
|
|
void WasmInterpreter::AddFunctionForTesting(const WasmFunction* function) {
|
|
|
|
internals_->codemap_.AddFunction(function, nullptr, nullptr);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-03-14 15:54:43 +00:00
|
|
|
void WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
|
2016-05-25 08:32:37 +00:00
|
|
|
const byte* start,
|
|
|
|
const byte* end) {
|
2017-03-14 15:54:43 +00:00
|
|
|
internals_->codemap_.SetFunctionCode(function, start, end);
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2018-02-21 11:42:57 +00:00
|
|
|
void WasmInterpreter::SetCallIndirectTestMode() {
|
|
|
|
internals_->codemap_.set_call_indirect_through_module(true);
|
|
|
|
}
|
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
|
2017-04-25 09:43:39 +00:00
|
|
|
Zone* zone, const WasmModule* module, const byte* start, const byte* end) {
|
|
|
|
// Create some dummy structures, to avoid special-casing the implementation
|
|
|
|
// just for testing.
|
|
|
|
FunctionSig sig(0, 0, nullptr);
|
2018-03-13 16:14:01 +00:00
|
|
|
WasmFunction function{&sig, 0, 0, {0, 0}, false, false};
|
2017-04-25 09:43:39 +00:00
|
|
|
InterpreterCode code{
|
|
|
|
&function, BodyLocalDecls(zone), start, end, nullptr, nullptr, nullptr};
|
|
|
|
|
|
|
|
// Now compute and return the control transfers.
|
2017-04-26 17:41:26 +00:00
|
|
|
SideTable side_table(zone, module, &code);
|
|
|
|
return side_table.map_;
|
2016-05-25 08:32:37 +00:00
|
|
|
}
|
|
|
|
|
2017-01-20 12:58:14 +00:00
|
|
|
//============================================================================
|
|
|
|
// Implementation of the frame inspection interface.
|
|
|
|
//============================================================================
|
2017-04-11 13:04:13 +00:00
|
|
|
const WasmFunction* InterpretedFrame::function() const {
|
|
|
|
return ToImpl(this)->function();
|
|
|
|
}
|
|
|
|
int InterpretedFrame::pc() const { return ToImpl(this)->pc(); }
|
2017-01-20 12:58:14 +00:00
|
|
|
int InterpretedFrame::GetParameterCount() const {
|
2017-04-11 13:04:13 +00:00
|
|
|
return ToImpl(this)->GetParameterCount();
|
2017-01-20 12:58:14 +00:00
|
|
|
}
|
2017-04-11 13:04:13 +00:00
|
|
|
int InterpretedFrame::GetLocalCount() const {
|
|
|
|
return ToImpl(this)->GetLocalCount();
|
2017-01-20 12:58:14 +00:00
|
|
|
}
|
2017-04-11 13:04:13 +00:00
|
|
|
int InterpretedFrame::GetStackHeight() const {
|
|
|
|
return ToImpl(this)->GetStackHeight();
|
|
|
|
}
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue InterpretedFrame::GetLocalValue(int index) const {
|
2017-04-11 13:04:13 +00:00
|
|
|
return ToImpl(this)->GetLocalValue(index);
|
|
|
|
}
|
2017-07-14 13:49:01 +00:00
|
|
|
WasmValue InterpretedFrame::GetStackValue(int index) const {
|
2017-04-11 13:04:13 +00:00
|
|
|
return ToImpl(this)->GetStackValue(index);
|
2017-01-20 12:58:14 +00:00
|
|
|
}
|
2018-05-02 10:42:31 +00:00
|
|
|
void InterpretedFrameDeleter::operator()(InterpretedFrame* ptr) {
|
2017-12-19 20:18:39 +00:00
|
|
|
delete ToImpl(ptr);
|
|
|
|
}
|
2017-01-20 12:58:14 +00:00
|
|
|
|
2017-08-09 08:11:21 +00:00
|
|
|
#undef TRACE
|
2017-09-08 13:42:59 +00:00
|
|
|
#undef FOREACH_INTERNAL_OPCODE
|
|
|
|
#undef WASM_CTYPES
|
|
|
|
#undef FOREACH_SIMPLE_BINOP
|
|
|
|
#undef FOREACH_OTHER_BINOP
|
2018-01-16 19:32:52 +00:00
|
|
|
#undef FOREACH_I32CONV_FLOATOP
|
2017-09-08 13:42:59 +00:00
|
|
|
#undef FOREACH_OTHER_UNOP
|
2017-08-09 08:11:21 +00:00
|
|
|
|
2016-05-25 08:32:37 +00:00
|
|
|
} // namespace wasm
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|