2016-02-10 16:38:49 +00:00
|
|
|
// Copyright 2015 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
|
|
|
#ifndef V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
|
|
|
|
#define V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
|
|
|
|
|
2016-07-14 06:26:03 +00:00
|
|
|
#include "src/builtins/builtins.h"
|
2019-05-21 09:30:15 +00:00
|
|
|
#include "src/codegen/code-stub-assembler.h"
|
2019-05-24 13:51:59 +00:00
|
|
|
#include "src/common/globals.h"
|
2016-07-15 12:03:04 +00:00
|
|
|
#include "src/interpreter/bytecode-register.h"
|
2016-02-10 16:38:49 +00:00
|
|
|
#include "src/interpreter/bytecodes.h"
|
|
|
|
#include "src/runtime/runtime.h"
|
2019-05-23 13:27:57 +00:00
|
|
|
#include "src/utils/allocation.h"
|
2016-02-10 16:38:49 +00:00
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
namespace interpreter {
|
|
|
|
|
2016-10-17 10:01:42 +00:00
|
|
|
class V8_EXPORT_PRIVATE InterpreterAssembler : public CodeStubAssembler {
|
2016-02-10 16:38:49 +00:00
|
|
|
public:
|
2016-11-16 11:48:07 +00:00
|
|
|
InterpreterAssembler(compiler::CodeAssemblerState* state, Bytecode bytecode,
|
2016-03-21 17:08:21 +00:00
|
|
|
OperandScale operand_scale);
|
2016-12-28 15:47:34 +00:00
|
|
|
~InterpreterAssembler();
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2016-12-12 14:52:30 +00:00
|
|
|
// Returns the 32-bit unsigned count immediate for bytecode operand
|
|
|
|
// |operand_index| in the current bytecode.
|
2016-02-10 16:38:49 +00:00
|
|
|
compiler::Node* BytecodeOperandCount(int operand_index);
|
2016-12-12 14:52:30 +00:00
|
|
|
// Returns the 32-bit unsigned flag for bytecode operand |operand_index|
|
|
|
|
// in the current bytecode.
|
2016-03-21 17:08:21 +00:00
|
|
|
compiler::Node* BytecodeOperandFlag(int operand_index);
|
2016-12-12 14:52:30 +00:00
|
|
|
// Returns the 32-bit zero-extended index immediate for bytecode operand
|
|
|
|
// |operand_index| in the current bytecode.
|
2017-03-09 14:40:02 +00:00
|
|
|
compiler::Node* BytecodeOperandIdxInt32(int operand_index);
|
|
|
|
// Returns the word zero-extended index immediate for bytecode operand
|
|
|
|
// |operand_index| in the current bytecode.
|
2016-02-10 16:38:49 +00:00
|
|
|
compiler::Node* BytecodeOperandIdx(int operand_index);
|
2016-12-12 14:52:30 +00:00
|
|
|
// Returns the smi index immediate for bytecode operand |operand_index|
|
|
|
|
// in the current bytecode.
|
|
|
|
compiler::Node* BytecodeOperandIdxSmi(int operand_index);
|
|
|
|
// Returns the 32-bit unsigned immediate for bytecode operand |operand_index|
|
|
|
|
// in the current bytecode.
|
2016-09-13 14:48:39 +00:00
|
|
|
compiler::Node* BytecodeOperandUImm(int operand_index);
|
2017-01-24 22:09:02 +00:00
|
|
|
// Returns the word-size unsigned immediate for bytecode operand
|
|
|
|
// |operand_index| in the current bytecode.
|
|
|
|
compiler::Node* BytecodeOperandUImmWord(int operand_index);
|
2017-07-25 18:37:36 +00:00
|
|
|
// Returns the unsigned smi immediate for bytecode operand |operand_index| in
|
|
|
|
// the current bytecode.
|
|
|
|
compiler::Node* BytecodeOperandUImmSmi(int operand_index);
|
2016-12-12 14:52:30 +00:00
|
|
|
// Returns the 32-bit signed immediate for bytecode operand |operand_index|
|
|
|
|
// in the current bytecode.
|
2016-02-10 16:38:49 +00:00
|
|
|
compiler::Node* BytecodeOperandImm(int operand_index);
|
2016-12-12 14:52:30 +00:00
|
|
|
// Returns the word-size signed immediate for bytecode operand |operand_index|
|
|
|
|
// in the current bytecode.
|
|
|
|
compiler::Node* BytecodeOperandImmIntPtr(int operand_index);
|
|
|
|
// Returns the smi immediate for bytecode operand |operand_index| in the
|
2016-02-10 16:38:49 +00:00
|
|
|
// current bytecode.
|
2016-12-12 14:52:30 +00:00
|
|
|
compiler::Node* BytecodeOperandImmSmi(int operand_index);
|
|
|
|
// Returns the 32-bit unsigned runtime id immediate for bytecode operand
|
2016-03-21 17:08:21 +00:00
|
|
|
// |operand_index| in the current bytecode.
|
|
|
|
compiler::Node* BytecodeOperandRuntimeId(int operand_index);
|
2017-09-05 11:23:26 +00:00
|
|
|
// Returns the 32-bit unsigned native context index immediate for bytecode
|
|
|
|
// operand |operand_index| in the current bytecode.
|
|
|
|
compiler::Node* BytecodeOperandNativeContextIndex(int operand_index);
|
2016-12-12 14:52:30 +00:00
|
|
|
// Returns the 32-bit unsigned intrinsic id immediate for bytecode operand
|
2016-06-21 14:37:16 +00:00
|
|
|
// |operand_index| in the current bytecode.
|
|
|
|
compiler::Node* BytecodeOperandIntrinsicId(int operand_index);
|
2016-02-10 16:38:49 +00:00
|
|
|
|
|
|
|
// Accumulator.
|
|
|
|
compiler::Node* GetAccumulator();
|
|
|
|
void SetAccumulator(compiler::Node* value);
|
|
|
|
|
|
|
|
// Context.
|
|
|
|
compiler::Node* GetContext();
|
|
|
|
void SetContext(compiler::Node* value);
|
|
|
|
|
2016-09-16 13:26:44 +00:00
|
|
|
// Context at |depth| in the context chain starting at |context|.
|
2016-09-13 11:09:00 +00:00
|
|
|
compiler::Node* GetContextAtDepth(compiler::Node* context,
|
|
|
|
compiler::Node* depth);
|
|
|
|
|
2016-09-16 13:26:44 +00:00
|
|
|
// Goto the given |target| if the context chain starting at |context| has any
|
|
|
|
// extensions up to the given |depth|.
|
|
|
|
void GotoIfHasContextExtensionUpToDepth(compiler::Node* context,
|
|
|
|
compiler::Node* depth, Label* target);
|
|
|
|
|
2018-01-26 10:53:27 +00:00
|
|
|
// A RegListNodePair provides an abstraction over lists of registers.
|
|
|
|
class RegListNodePair {
|
|
|
|
public:
|
|
|
|
RegListNodePair(Node* base_reg_location, Node* reg_count)
|
|
|
|
: base_reg_location_(base_reg_location), reg_count_(reg_count) {}
|
|
|
|
|
|
|
|
compiler::Node* reg_count() const { return reg_count_; }
|
|
|
|
compiler::Node* base_reg_location() const { return base_reg_location_; }
|
|
|
|
|
|
|
|
private:
|
|
|
|
compiler::Node* base_reg_location_;
|
|
|
|
compiler::Node* reg_count_;
|
|
|
|
};
|
|
|
|
|
2016-04-28 14:54:39 +00:00
|
|
|
// Backup/restore register file to/from a fixed array of the correct length.
|
2018-05-24 09:29:53 +00:00
|
|
|
// There is an asymmetry between suspend/export and resume/import.
|
|
|
|
// - Suspend copies arguments and registers to the generator.
|
|
|
|
// - Resume copies only the registers from the generator, the arguments
|
|
|
|
// are copied by the ResumeGenerator trampoline.
|
|
|
|
compiler::Node* ExportParametersAndRegisterFile(
|
2018-08-07 13:26:55 +00:00
|
|
|
TNode<FixedArray> array, const RegListNodePair& registers,
|
|
|
|
TNode<Int32T> formal_parameter_count);
|
|
|
|
compiler::Node* ImportRegisterFile(TNode<FixedArray> array,
|
2018-05-24 09:29:53 +00:00
|
|
|
const RegListNodePair& registers,
|
2018-08-07 13:26:55 +00:00
|
|
|
TNode<Int32T> formal_parameter_count);
|
2016-04-22 09:17:58 +00:00
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
// Loads from and stores to the interpreter register file.
|
|
|
|
compiler::Node* LoadRegister(Register reg);
|
2017-02-16 16:54:02 +00:00
|
|
|
compiler::Node* LoadAndUntagRegister(Register reg);
|
2018-01-26 10:53:27 +00:00
|
|
|
compiler::Node* LoadRegisterAtOperandIndex(int operand_index);
|
|
|
|
std::pair<compiler::Node*, compiler::Node*> LoadRegisterPairAtOperandIndex(
|
|
|
|
int operand_index);
|
|
|
|
void StoreRegister(compiler::Node* value, Register reg);
|
|
|
|
void StoreAndTagRegister(compiler::Node* value, Register reg);
|
|
|
|
void StoreRegisterAtOperandIndex(compiler::Node* value, int operand_index);
|
|
|
|
void StoreRegisterPairAtOperandIndex(compiler::Node* value1,
|
|
|
|
compiler::Node* value2,
|
|
|
|
int operand_index);
|
|
|
|
void StoreRegisterTripleAtOperandIndex(compiler::Node* value1,
|
|
|
|
compiler::Node* value2,
|
|
|
|
compiler::Node* value3,
|
|
|
|
int operand_index);
|
|
|
|
|
|
|
|
RegListNodePair GetRegisterListAtOperandIndex(int operand_index);
|
|
|
|
Node* LoadRegisterFromRegisterList(const RegListNodePair& reg_list,
|
|
|
|
int index);
|
|
|
|
Node* RegisterLocationInRegisterList(const RegListNodePair& reg_list,
|
|
|
|
int index);
|
|
|
|
|
|
|
|
// Load constant at the index specified in operand |operand_index| from the
|
|
|
|
// constant pool.
|
|
|
|
compiler::Node* LoadConstantPoolEntryAtOperandIndex(int operand_index);
|
|
|
|
// Load and untag constant at the index specified in operand |operand_index|
|
|
|
|
// from the constant pool.
|
2019-08-20 13:22:33 +00:00
|
|
|
TNode<IntPtrT> LoadAndUntagConstantPoolEntryAtOperandIndex(int operand_index);
|
2016-02-10 16:38:49 +00:00
|
|
|
// Load constant at |index| in the constant pool.
|
|
|
|
compiler::Node* LoadConstantPoolEntry(compiler::Node* index);
|
2016-08-05 09:33:06 +00:00
|
|
|
// Load and untag constant at |index| in the constant pool.
|
2019-08-20 13:22:33 +00:00
|
|
|
TNode<IntPtrT> LoadAndUntagConstantPoolEntry(compiler::Node* index);
|
2016-08-05 09:33:06 +00:00
|
|
|
|
2019-03-12 11:43:06 +00:00
|
|
|
// Load the FeedbackVector for the current function. The retuned node could be
|
|
|
|
// undefined.
|
|
|
|
compiler::TNode<HeapObject> LoadFeedbackVector();
|
2018-11-16 13:45:56 +00:00
|
|
|
|
2016-09-13 11:03:43 +00:00
|
|
|
// Increment the call count for a CALL_IC or construct call.
|
|
|
|
// The call count is located at feedback_vector[slot_id + 1].
|
2017-10-23 09:18:57 +00:00
|
|
|
void IncrementCallCount(compiler::Node* feedback_vector,
|
|
|
|
compiler::Node* slot_id);
|
|
|
|
|
|
|
|
// Collect the callable |target| feedback for either a CALL_IC or
|
|
|
|
// an INSTANCEOF_IC in the |feedback_vector| at |slot_id|.
|
|
|
|
void CollectCallableFeedback(compiler::Node* target, compiler::Node* context,
|
|
|
|
compiler::Node* feedback_vector,
|
|
|
|
compiler::Node* slot_id);
|
2016-09-13 11:03:43 +00:00
|
|
|
|
2017-08-08 16:55:14 +00:00
|
|
|
// Collect CALL_IC feedback for |target| function in the
|
2017-10-23 09:18:57 +00:00
|
|
|
// |feedback_vector| at |slot_id|, and the call counts in
|
|
|
|
// the |feedback_vector| at |slot_id+1|.
|
2017-08-08 16:55:14 +00:00
|
|
|
void CollectCallFeedback(compiler::Node* target, compiler::Node* context,
|
2018-11-16 13:45:56 +00:00
|
|
|
compiler::Node* maybe_feedback_vector,
|
2017-10-23 09:18:57 +00:00
|
|
|
compiler::Node* slot_id);
|
2017-07-23 07:16:24 +00:00
|
|
|
|
2018-01-26 10:53:27 +00:00
|
|
|
// Call JSFunction or Callable |function| with |args| arguments, possibly
|
2017-08-25 20:56:04 +00:00
|
|
|
// including the receiver depending on |receiver_mode|. After the call returns
|
|
|
|
// directly dispatches to the next bytecode.
|
|
|
|
void CallJSAndDispatch(compiler::Node* function, compiler::Node* context,
|
2018-01-26 10:53:27 +00:00
|
|
|
const RegListNodePair& args,
|
2017-07-13 18:06:15 +00:00
|
|
|
ConvertReceiverMode receiver_mode);
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2017-08-25 20:56:04 +00:00
|
|
|
// Call JSFunction or Callable |function| with |arg_count| arguments (not
|
|
|
|
// including receiver) passed as |args|, possibly including the receiver
|
|
|
|
// depending on |receiver_mode|. After the call returns directly dispatches to
|
|
|
|
// the next bytecode.
|
|
|
|
template <class... TArgs>
|
|
|
|
void CallJSAndDispatch(Node* function, Node* context, Node* arg_count,
|
|
|
|
ConvertReceiverMode receiver_mode, TArgs... args);
|
|
|
|
|
2018-01-26 10:53:27 +00:00
|
|
|
// Call JSFunction or Callable |function| with |args|
|
|
|
|
// arguments (not including receiver), and the final argument being spread.
|
|
|
|
// After the call returns directly dispatches to the next bytecode.
|
2017-08-25 20:56:04 +00:00
|
|
|
void CallJSWithSpreadAndDispatch(compiler::Node* function,
|
2017-01-24 14:37:01 +00:00
|
|
|
compiler::Node* context,
|
2018-01-26 10:53:27 +00:00
|
|
|
const RegListNodePair& args,
|
2017-07-23 07:16:24 +00:00
|
|
|
compiler::Node* slot_id,
|
|
|
|
compiler::Node* feedback_vector);
|
2017-01-24 14:37:01 +00:00
|
|
|
|
2018-01-26 10:53:27 +00:00
|
|
|
// Call constructor |target| with |args| arguments (not including receiver).
|
|
|
|
// The |new_target| is the same as the |target| for the new keyword, but
|
|
|
|
// differs for the super keyword.
|
2017-08-08 04:35:54 +00:00
|
|
|
compiler::Node* Construct(compiler::Node* target, compiler::Node* context,
|
|
|
|
compiler::Node* new_target,
|
2018-01-26 10:53:27 +00:00
|
|
|
const RegListNodePair& args,
|
|
|
|
compiler::Node* slot_id,
|
2017-02-07 14:05:02 +00:00
|
|
|
compiler::Node* feedback_vector);
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2018-01-26 10:53:27 +00:00
|
|
|
// Call constructor |target| with |args| arguments (not including
|
|
|
|
// receiver). The last argument is always a spread. The |new_target| is the
|
|
|
|
// same as the |target| for the new keyword, but differs for the super
|
|
|
|
// keyword.
|
2017-08-08 04:35:54 +00:00
|
|
|
compiler::Node* ConstructWithSpread(compiler::Node* target,
|
2017-02-01 08:11:43 +00:00
|
|
|
compiler::Node* context,
|
|
|
|
compiler::Node* new_target,
|
2018-01-26 10:53:27 +00:00
|
|
|
const RegListNodePair& args,
|
2017-07-23 07:16:24 +00:00
|
|
|
compiler::Node* slot_id,
|
|
|
|
compiler::Node* feedback_vector);
|
2017-01-18 10:34:24 +00:00
|
|
|
|
2018-01-26 10:53:27 +00:00
|
|
|
// Call runtime function with |args| arguments which will return |return_size|
|
|
|
|
// number of values.
|
2016-02-10 16:38:49 +00:00
|
|
|
compiler::Node* CallRuntimeN(compiler::Node* function_id,
|
|
|
|
compiler::Node* context,
|
2018-01-26 10:53:27 +00:00
|
|
|
const RegListNodePair& args,
|
|
|
|
int return_size = 1);
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2017-01-24 22:09:02 +00:00
|
|
|
// Jump forward relative to the current bytecode by the |jump_offset|.
|
2016-04-22 13:55:53 +00:00
|
|
|
compiler::Node* Jump(compiler::Node* jump_offset);
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2017-01-24 22:09:02 +00:00
|
|
|
// Jump backward relative to the current bytecode by the |jump_offset|.
|
|
|
|
compiler::Node* JumpBackward(compiler::Node* jump_offset);
|
|
|
|
|
|
|
|
// Jump forward relative to the current bytecode by |jump_offset| if the
|
2016-02-10 16:38:49 +00:00
|
|
|
// word values |lhs| and |rhs| are equal.
|
|
|
|
void JumpIfWordEqual(compiler::Node* lhs, compiler::Node* rhs,
|
|
|
|
compiler::Node* jump_offset);
|
|
|
|
|
2017-01-24 22:09:02 +00:00
|
|
|
// Jump forward relative to the current bytecode by |jump_offset| if the
|
2016-02-10 16:38:49 +00:00
|
|
|
// word values |lhs| and |rhs| are not equal.
|
|
|
|
void JumpIfWordNotEqual(compiler::Node* lhs, compiler::Node* rhs,
|
|
|
|
compiler::Node* jump_offset);
|
|
|
|
|
2016-05-18 07:50:00 +00:00
|
|
|
// Updates the profiler interrupt budget for a return.
|
|
|
|
void UpdateInterruptBudgetOnReturn();
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2016-07-26 14:31:10 +00:00
|
|
|
// Returns the OSR nesting level from the bytecode header.
|
2019-05-30 19:55:07 +00:00
|
|
|
compiler::Node* LoadOsrNestingLevel();
|
2016-07-26 14:31:10 +00:00
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
// Dispatch to the bytecode.
|
2016-04-22 13:55:53 +00:00
|
|
|
compiler::Node* Dispatch();
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
// Dispatch bytecode as wide operand variant.
|
|
|
|
void DispatchWide(OperandScale operand_scale);
|
|
|
|
|
2018-01-29 12:16:32 +00:00
|
|
|
// Dispatch to |target_bytecode| at |new_bytecode_offset|.
|
|
|
|
// |target_bytecode| should be equivalent to loading from the offset.
|
|
|
|
compiler::Node* DispatchToBytecode(compiler::Node* target_bytecode,
|
|
|
|
compiler::Node* new_bytecode_offset);
|
|
|
|
|
2018-01-03 23:27:03 +00:00
|
|
|
// Abort with the given abort reason.
|
|
|
|
void Abort(AbortReason abort_reason);
|
2016-05-17 20:39:45 +00:00
|
|
|
void AbortIfWordNotEqual(compiler::Node* lhs, compiler::Node* rhs,
|
2018-01-03 23:27:03 +00:00
|
|
|
AbortReason abort_reason);
|
2017-06-02 11:55:48 +00:00
|
|
|
// Abort if |register_count| is invalid for given register file array.
|
2018-05-24 09:29:53 +00:00
|
|
|
void AbortIfRegisterCountInvalid(compiler::Node* parameters_and_registers,
|
|
|
|
compiler::Node* formal_parameter_count,
|
2017-06-02 11:55:48 +00:00
|
|
|
compiler::Node* register_count);
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2017-01-27 07:31:03 +00:00
|
|
|
// Dispatch to frame dropper trampoline if necessary.
|
|
|
|
void MaybeDropFrames(compiler::Node* context);
|
|
|
|
|
2016-06-21 12:12:47 +00:00
|
|
|
// Returns the offset from the BytecodeArrayPointer of the current bytecode.
|
|
|
|
compiler::Node* BytecodeOffset();
|
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
protected:
|
2016-04-06 07:57:35 +00:00
|
|
|
Bytecode bytecode() const { return bytecode_; }
|
2016-02-10 16:38:49 +00:00
|
|
|
static bool TargetSupportsUnalignedAccess();
|
|
|
|
|
2017-10-10 16:00:31 +00:00
|
|
|
void ToNumberOrNumeric(Object::Conversion mode);
|
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
private:
|
|
|
|
// Returns a tagged pointer to the current function's BytecodeArray object.
|
|
|
|
compiler::Node* BytecodeArrayTaggedPointer();
|
2016-06-24 10:14:07 +00:00
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
// Returns a raw pointer to first entry in the interpreter dispatch table.
|
|
|
|
compiler::Node* DispatchTableRawPointer();
|
|
|
|
|
2016-04-06 07:57:35 +00:00
|
|
|
// Returns the accumulator value without checking whether bytecode
|
|
|
|
// uses it. This is intended to be used only in dispatch and in
|
|
|
|
// tracing as these need to bypass accumulator use validity checks.
|
|
|
|
compiler::Node* GetAccumulatorUnchecked();
|
|
|
|
|
2016-06-24 10:14:07 +00:00
|
|
|
// Returns the frame pointer for the interpreted frame of the function being
|
|
|
|
// interpreted.
|
|
|
|
compiler::Node* GetInterpretedFramePointer();
|
|
|
|
|
2018-01-26 10:53:27 +00:00
|
|
|
// Operations on registers.
|
|
|
|
compiler::Node* RegisterLocation(Register reg);
|
|
|
|
compiler::Node* RegisterLocation(compiler::Node* reg_index);
|
|
|
|
compiler::Node* NextRegister(compiler::Node* reg_index);
|
|
|
|
compiler::Node* LoadRegister(Node* reg_index);
|
|
|
|
void StoreRegister(compiler::Node* value, compiler::Node* reg_index);
|
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
// Saves and restores interpreter bytecode offset to the interpreter stack
|
|
|
|
// frame when performing a call.
|
2016-12-28 15:47:34 +00:00
|
|
|
void CallPrologue();
|
|
|
|
void CallEpilogue();
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2016-04-11 11:57:31 +00:00
|
|
|
// Increment the dispatch counter for the (current, next) bytecode pair.
|
|
|
|
void TraceBytecodeDispatch(compiler::Node* target_index);
|
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
// Traces the current bytecode by calling |function_id|.
|
|
|
|
void TraceBytecode(Runtime::FunctionId function_id);
|
|
|
|
|
2017-01-24 22:09:02 +00:00
|
|
|
// Updates the bytecode array's interrupt budget by a 32-bit unsigned |weight|
|
|
|
|
// and calls Runtime::kInterrupt if counter reaches zero. If |backward|, then
|
|
|
|
// the interrupt budget is decremented, otherwise it is incremented.
|
|
|
|
void UpdateInterruptBudget(compiler::Node* weight, bool backward);
|
2016-02-19 18:46:54 +00:00
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
// Returns the offset of register |index| relative to RegisterFilePointer().
|
|
|
|
compiler::Node* RegisterFrameOffset(compiler::Node* index);
|
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
// Returns the offset of an operand relative to the current bytecode offset.
|
|
|
|
compiler::Node* OperandOffset(int operand_index);
|
|
|
|
|
|
|
|
// Returns a value built from an sequence of bytes in the bytecode
|
|
|
|
// array starting at |relative_offset| from the current bytecode.
|
|
|
|
// The |result_type| determines the size and signedness. of the
|
|
|
|
// value read. This method should only be used on architectures that
|
|
|
|
// do not support unaligned memory accesses.
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeOperandReadUnaligned(
|
|
|
|
int relative_offset, MachineType result_type,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-03-26 15:44:44 +00:00
|
|
|
|
|
|
|
// Returns zero- or sign-extended to word32 value of the operand.
|
|
|
|
compiler::Node* BytecodeOperandUnsignedByte(
|
|
|
|
int operand_index,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeOperandSignedByte(
|
|
|
|
int operand_index,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeOperandUnsignedShort(
|
|
|
|
int operand_index,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeOperandSignedShort(
|
|
|
|
int operand_index,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeOperandUnsignedQuad(
|
|
|
|
int operand_index,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeOperandSignedQuad(
|
|
|
|
int operand_index,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2016-03-21 17:08:21 +00:00
|
|
|
|
2016-12-12 14:52:30 +00:00
|
|
|
// Returns zero- or sign-extended to word32 value of the operand of
|
|
|
|
// given size.
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeSignedOperand(
|
|
|
|
int operand_index, OperandSize operand_size,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeUnsignedOperand(
|
|
|
|
int operand_index, OperandSize operand_size,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2018-01-29 12:16:32 +00:00
|
|
|
// Returns the word-size sign-extended register index for bytecode operand
|
|
|
|
// |operand_index| in the current bytecode. Value is not poisoned on
|
|
|
|
// speculation since the value loaded from the register is poisoned instead.
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeOperandReg(
|
|
|
|
int operand_index,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-01-29 12:16:32 +00:00
|
|
|
|
|
|
|
// Returns the word zero-extended index immediate for bytecode operand
|
|
|
|
// |operand_index| in the current bytecode for use when loading a .
|
2018-03-26 15:44:44 +00:00
|
|
|
compiler::Node* BytecodeOperandConstantPoolIdx(
|
|
|
|
int operand_index,
|
2018-04-30 12:13:54 +00:00
|
|
|
LoadSensitivity needs_poisoning = LoadSensitivity::kCritical);
|
2018-01-29 12:16:32 +00:00
|
|
|
|
2017-01-24 22:09:02 +00:00
|
|
|
// Jump relative to the current bytecode by the |jump_offset|. If |backward|,
|
|
|
|
// then jump backward (subtract the offset), otherwise jump forward (add the
|
|
|
|
// offset). Helper function for Jump and JumpBackward.
|
|
|
|
compiler::Node* Jump(compiler::Node* jump_offset, bool backward);
|
|
|
|
|
|
|
|
// Jump forward relative to the current bytecode by |jump_offset| if the
|
2016-05-13 11:44:37 +00:00
|
|
|
// |condition| is true. Helper function for JumpIfWordEqual and
|
|
|
|
// JumpIfWordNotEqual.
|
|
|
|
void JumpConditional(compiler::Node* condition, compiler::Node* jump_offset);
|
|
|
|
|
2017-04-11 20:45:07 +00:00
|
|
|
// Save the bytecode offset to the interpreter frame.
|
|
|
|
void SaveBytecodeOffset();
|
2017-08-25 09:32:36 +00:00
|
|
|
// Reload the bytecode offset from the interpreter frame.
|
|
|
|
Node* ReloadBytecodeOffset();
|
2017-04-11 20:45:07 +00:00
|
|
|
|
2016-07-20 12:51:11 +00:00
|
|
|
// Updates and returns BytecodeOffset() advanced by the current bytecode's
|
|
|
|
// size. Traces the exit of the current bytecode.
|
|
|
|
compiler::Node* Advance();
|
|
|
|
|
|
|
|
// Updates and returns BytecodeOffset() advanced by delta bytecodes.
|
|
|
|
// Traces the exit of the current bytecode.
|
2016-02-10 16:38:49 +00:00
|
|
|
compiler::Node* Advance(int delta);
|
2017-01-24 22:09:02 +00:00
|
|
|
compiler::Node* Advance(compiler::Node* delta, bool backward = false);
|
2016-02-10 16:38:49 +00:00
|
|
|
|
2016-07-20 12:51:11 +00:00
|
|
|
// Load the bytecode at |bytecode_offset|.
|
|
|
|
compiler::Node* LoadBytecode(compiler::Node* bytecode_offset);
|
|
|
|
|
|
|
|
// Look ahead for Star and inline it in a branch. Returns a new target
|
|
|
|
// bytecode node for dispatch.
|
|
|
|
compiler::Node* StarDispatchLookahead(compiler::Node* target_bytecode);
|
|
|
|
|
|
|
|
// Build code for Star at the current BytecodeOffset() and Advance() to the
|
|
|
|
// next dispatch offset.
|
|
|
|
void InlineStar();
|
|
|
|
|
2016-04-14 10:03:48 +00:00
|
|
|
// Dispatch to the bytecode handler with code offset |handler|.
|
2016-04-22 13:55:53 +00:00
|
|
|
compiler::Node* DispatchToBytecodeHandler(compiler::Node* handler,
|
2018-01-29 12:16:32 +00:00
|
|
|
compiler::Node* bytecode_offset,
|
|
|
|
compiler::Node* target_bytecode);
|
2016-04-14 10:03:48 +00:00
|
|
|
|
|
|
|
// Dispatch to the bytecode handler with code entry point |handler_entry|.
|
2016-04-22 13:55:53 +00:00
|
|
|
compiler::Node* DispatchToBytecodeHandlerEntry(
|
2018-01-29 12:16:32 +00:00
|
|
|
compiler::Node* handler_entry, compiler::Node* bytecode_offset,
|
|
|
|
compiler::Node* target_bytecode);
|
2016-04-14 10:03:48 +00:00
|
|
|
|
2017-04-10 15:42:03 +00:00
|
|
|
int CurrentBytecodeSize() const;
|
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
OperandScale operand_scale() const { return operand_scale_; }
|
|
|
|
|
2016-02-10 16:38:49 +00:00
|
|
|
Bytecode bytecode_;
|
2016-03-21 17:08:21 +00:00
|
|
|
OperandScale operand_scale_;
|
2016-06-24 10:14:07 +00:00
|
|
|
CodeStubAssembler::Variable interpreted_frame_pointer_;
|
2017-02-16 16:54:02 +00:00
|
|
|
CodeStubAssembler::Variable bytecode_array_;
|
2017-08-22 10:26:42 +00:00
|
|
|
CodeStubAssembler::Variable bytecode_offset_;
|
2017-02-16 16:54:02 +00:00
|
|
|
CodeStubAssembler::Variable dispatch_table_;
|
2016-02-11 13:10:45 +00:00
|
|
|
CodeStubAssembler::Variable accumulator_;
|
2016-04-06 07:57:35 +00:00
|
|
|
AccumulatorUse accumulator_use_;
|
2016-04-18 15:23:29 +00:00
|
|
|
bool made_call_;
|
2017-02-16 16:54:02 +00:00
|
|
|
bool reloaded_frame_ptr_;
|
2017-08-22 10:26:42 +00:00
|
|
|
bool bytecode_array_valid_;
|
2016-02-10 16:38:49 +00:00
|
|
|
|
|
|
|
DISALLOW_COPY_AND_ASSIGN(InterpreterAssembler);
|
|
|
|
};
|
|
|
|
|
|
|
|
} // namespace interpreter
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|
|
|
|
|
|
|
|
#endif // V8_INTERPRETER_INTERPRETER_ASSEMBLER_H_
|