2015-08-03 10:42:16 +00:00
|
|
|
// Copyright 2014 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2017-10-27 19:53:15 +00:00
|
|
|
#include <limits>
|
|
|
|
|
2015-08-03 10:42:16 +00:00
|
|
|
#include "src/v8.h"
|
|
|
|
|
2017-02-10 17:22:45 +00:00
|
|
|
#include "src/ast/scopes.h"
|
2015-08-03 10:42:16 +00:00
|
|
|
#include "src/interpreter/bytecode-array-builder.h"
|
2015-09-24 15:20:47 +00:00
|
|
|
#include "src/interpreter/bytecode-array-iterator.h"
|
2017-05-15 16:31:05 +00:00
|
|
|
#include "src/interpreter/bytecode-jump-table.h"
|
2016-06-03 14:52:59 +00:00
|
|
|
#include "src/interpreter/bytecode-label.h"
|
2016-01-14 13:43:14 +00:00
|
|
|
#include "src/interpreter/bytecode-register-allocator.h"
|
2017-01-09 13:43:28 +00:00
|
|
|
#include "src/objects-inl.h"
|
2017-12-13 12:19:44 +00:00
|
|
|
#include "test/unittests/interpreter/bytecode-utils.h"
|
2015-08-13 11:27:54 +00:00
|
|
|
#include "test/unittests/test-utils.h"
|
2015-08-03 10:42:16 +00:00
|
|
|
|
2015-08-13 11:27:54 +00:00
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
namespace interpreter {
|
2015-08-03 10:42:16 +00:00
|
|
|
|
2015-08-28 15:40:52 +00:00
|
|
|
class BytecodeArrayBuilderTest : public TestWithIsolateAndZone {
|
2015-08-13 11:27:54 +00:00
|
|
|
public:
|
2018-09-13 09:22:50 +00:00
|
|
|
BytecodeArrayBuilderTest() = default;
|
|
|
|
~BytecodeArrayBuilderTest() override = default;
|
2015-08-13 11:27:54 +00:00
|
|
|
};
|
|
|
|
|
2017-04-05 15:40:27 +00:00
|
|
|
using ToBooleanMode = BytecodeArrayBuilder::ToBooleanMode;
|
|
|
|
|
2015-08-13 11:27:54 +00:00
|
|
|
TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) {
|
2017-10-19 15:12:42 +00:00
|
|
|
FeedbackVectorSpec feedback_spec(zone());
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, 131, &feedback_spec);
|
2016-05-12 19:18:07 +00:00
|
|
|
Factory* factory = isolate()->factory();
|
2017-02-10 17:22:45 +00:00
|
|
|
AstValueFactory ast_factory(zone(), isolate()->ast_string_constants(),
|
|
|
|
isolate()->heap()->HashSeed());
|
|
|
|
DeclarationScope scope(zone(), &ast_factory);
|
2015-08-03 10:42:16 +00:00
|
|
|
|
2016-01-19 16:06:10 +00:00
|
|
|
CHECK_EQ(builder.locals_count(), 131);
|
2017-05-24 15:48:27 +00:00
|
|
|
CHECK_EQ(builder.fixed_register_count(), 131);
|
2015-08-03 10:42:16 +00:00
|
|
|
|
2016-05-12 19:18:07 +00:00
|
|
|
Register reg(0);
|
|
|
|
Register other(reg.index() + 1);
|
|
|
|
Register wide(128);
|
2017-12-13 12:19:44 +00:00
|
|
|
RegisterList empty;
|
|
|
|
RegisterList single = BytecodeUtils::NewRegisterList(0, 1);
|
|
|
|
RegisterList pair = BytecodeUtils::NewRegisterList(0, 2);
|
|
|
|
RegisterList triple = BytecodeUtils::NewRegisterList(0, 3);
|
|
|
|
RegisterList reg_list = BytecodeUtils::NewRegisterList(0, 10);
|
2016-05-12 19:18:07 +00:00
|
|
|
|
[runtime] Optimize and unify rest parameters.
Replace the somewhat awkward RestParamAccessStub, which would always
call into the runtime anyway with a proper FastNewRestParameterStub,
which is basically based on the code that was already there for strict
arguments object materialization. But for rest parameters we could
optimize even further (leading to 8-10x improvements for functions with
rest parameters), by fixing the internal formal parameter count:
Every SharedFunctionInfo has a formal_parameter_count field, which
specifies the number of formal parameters, and is used to decide whether
we need to create an arguments adaptor frame when calling a function
(i.e. if there's a mismatch between the actual and expected parameters).
Previously the formal_parameter_count included the rest parameter, which
was sort of unfortunate, as that meant that calling a function with only
the non-rest parameters still required an arguments adaptor (plus some
other oddities). Now with this CL we fix, so that we do no longer
include the rest parameter in that count. Thereby checking for rest
parameters is very efficient, as we only need to check whether there is
an arguments adaptor frame, and if not create an empty array, otherwise
check whether the arguments adaptor frame has more parameters than
specified by the formal_parameter_count.
The FastNewRestParameterStub is written in a way that it can be directly
used by Ignition as well, and with some tweaks to the TurboFan backends
and the CodeStubAssembler, we should be able to rewrite it as
TurboFanCodeStub in the near future.
Drive-by-fix: Refactor and unify the CreateArgumentsType which was
different in TurboFan and Ignition; now we have a single enum class
which is used in both TurboFan and Ignition.
R=jarin@chromium.org, rmcilroy@chromium.org
TBR=rossberg@chromium.org
BUG=v8:2159
LOG=n
Review URL: https://codereview.chromium.org/1676883002
Cr-Commit-Position: refs/heads/master@{#33809}
2016-02-08 10:08:21 +00:00
|
|
|
// Emit argument creation operations.
|
2016-02-04 10:02:34 +00:00
|
|
|
builder.CreateArguments(CreateArgumentsType::kMappedArguments)
|
|
|
|
.CreateArguments(CreateArgumentsType::kUnmappedArguments)
|
[runtime] Optimize and unify rest parameters.
Replace the somewhat awkward RestParamAccessStub, which would always
call into the runtime anyway with a proper FastNewRestParameterStub,
which is basically based on the code that was already there for strict
arguments object materialization. But for rest parameters we could
optimize even further (leading to 8-10x improvements for functions with
rest parameters), by fixing the internal formal parameter count:
Every SharedFunctionInfo has a formal_parameter_count field, which
specifies the number of formal parameters, and is used to decide whether
we need to create an arguments adaptor frame when calling a function
(i.e. if there's a mismatch between the actual and expected parameters).
Previously the formal_parameter_count included the rest parameter, which
was sort of unfortunate, as that meant that calling a function with only
the non-rest parameters still required an arguments adaptor (plus some
other oddities). Now with this CL we fix, so that we do no longer
include the rest parameter in that count. Thereby checking for rest
parameters is very efficient, as we only need to check whether there is
an arguments adaptor frame, and if not create an empty array, otherwise
check whether the arguments adaptor frame has more parameters than
specified by the formal_parameter_count.
The FastNewRestParameterStub is written in a way that it can be directly
used by Ignition as well, and with some tweaks to the TurboFan backends
and the CodeStubAssembler, we should be able to rewrite it as
TurboFanCodeStub in the near future.
Drive-by-fix: Refactor and unify the CreateArgumentsType which was
different in TurboFan and Ignition; now we have a single enum class
which is used in both TurboFan and Ignition.
R=jarin@chromium.org, rmcilroy@chromium.org
TBR=rossberg@chromium.org
BUG=v8:2159
LOG=n
Review URL: https://codereview.chromium.org/1676883002
Cr-Commit-Position: refs/heads/master@{#33809}
2016-02-08 10:08:21 +00:00
|
|
|
.CreateArguments(CreateArgumentsType::kRestParameter);
|
2016-02-04 10:02:34 +00:00
|
|
|
|
2015-08-03 10:42:16 +00:00
|
|
|
// Emit constant loads.
|
2016-10-07 13:05:07 +00:00
|
|
|
builder.LoadLiteral(Smi::kZero)
|
2016-05-12 19:18:07 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
2015-08-03 10:42:16 +00:00
|
|
|
.LoadLiteral(Smi::FromInt(8))
|
2017-03-06 16:39:33 +00:00
|
|
|
.CompareOperation(Token::Value::EQ, reg,
|
2016-08-30 10:21:02 +00:00
|
|
|
1) // Prevent peephole optimization
|
|
|
|
// LdaSmi, Star -> LdrSmi.
|
2016-05-12 19:18:07 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
2015-08-28 15:40:52 +00:00
|
|
|
.LoadLiteral(Smi::FromInt(10000000))
|
2016-05-12 19:18:07 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
2017-10-24 00:53:27 +00:00
|
|
|
.LoadLiteral(ast_factory.GetOneByteString("A constant"))
|
2016-05-12 19:18:07 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
2015-08-03 10:42:16 +00:00
|
|
|
.LoadUndefined()
|
2016-11-10 10:41:48 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
2015-08-03 10:42:16 +00:00
|
|
|
.LoadNull()
|
2016-05-12 19:18:07 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
2015-08-03 10:42:16 +00:00
|
|
|
.LoadTheHole()
|
2016-05-12 19:18:07 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
2015-08-03 10:42:16 +00:00
|
|
|
.LoadTrue()
|
2016-05-12 19:18:07 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
|
|
|
.LoadFalse()
|
|
|
|
.StoreAccumulatorInRegister(wide);
|
2015-08-03 10:42:16 +00:00
|
|
|
|
2016-05-27 15:57:35 +00:00
|
|
|
// Emit Ldar and Star taking care to foil the register optimizer.
|
2016-05-12 19:18:07 +00:00
|
|
|
builder.StackCheck(0)
|
|
|
|
.LoadAccumulatorWithRegister(other)
|
2016-08-08 01:15:22 +00:00
|
|
|
.BinaryOperation(Token::ADD, reg, 1)
|
2016-05-12 19:18:07 +00:00
|
|
|
.StoreAccumulatorInRegister(reg)
|
2016-05-27 15:57:35 +00:00
|
|
|
.LoadNull();
|
2015-08-03 10:42:16 +00:00
|
|
|
|
2015-11-20 11:17:10 +00:00
|
|
|
// Emit register-register transfer.
|
|
|
|
builder.MoveRegister(reg, other);
|
2016-01-19 16:06:10 +00:00
|
|
|
builder.MoveRegister(reg, wide);
|
2016-01-04 17:38:17 +00:00
|
|
|
|
2017-10-19 15:12:42 +00:00
|
|
|
FeedbackSlot load_global_slot =
|
|
|
|
feedback_spec.AddLoadGlobalICSlot(NOT_INSIDE_TYPEOF);
|
|
|
|
FeedbackSlot load_global_typeof_slot =
|
|
|
|
feedback_spec.AddLoadGlobalICSlot(INSIDE_TYPEOF);
|
|
|
|
FeedbackSlot sloppy_store_global_slot =
|
|
|
|
feedback_spec.AddStoreGlobalICSlot(LanguageMode::kSloppy);
|
|
|
|
FeedbackSlot load_slot = feedback_spec.AddLoadICSlot();
|
|
|
|
FeedbackSlot keyed_load_slot = feedback_spec.AddKeyedLoadICSlot();
|
|
|
|
FeedbackSlot sloppy_store_slot =
|
|
|
|
feedback_spec.AddStoreICSlot(LanguageMode::kSloppy);
|
|
|
|
FeedbackSlot strict_store_slot =
|
|
|
|
feedback_spec.AddStoreICSlot(LanguageMode::kStrict);
|
|
|
|
FeedbackSlot sloppy_keyed_store_slot =
|
|
|
|
feedback_spec.AddKeyedStoreICSlot(LanguageMode::kSloppy);
|
|
|
|
FeedbackSlot strict_keyed_store_slot =
|
|
|
|
feedback_spec.AddKeyedStoreICSlot(LanguageMode::kStrict);
|
|
|
|
FeedbackSlot store_own_slot = feedback_spec.AddStoreOwnICSlot();
|
2018-03-02 20:30:34 +00:00
|
|
|
FeedbackSlot store_array_element_slot =
|
|
|
|
feedback_spec.AddStoreInArrayLiteralICSlot();
|
2017-10-19 15:12:42 +00:00
|
|
|
|
2015-10-22 14:55:32 +00:00
|
|
|
// Emit global load / store operations.
|
2017-02-10 17:22:45 +00:00
|
|
|
const AstRawString* name = ast_factory.GetOneByteString("var_name");
|
2017-10-19 15:12:42 +00:00
|
|
|
builder
|
|
|
|
.LoadGlobal(name, load_global_slot.ToInt(), TypeofMode::NOT_INSIDE_TYPEOF)
|
|
|
|
.LoadGlobal(name, load_global_typeof_slot.ToInt(),
|
|
|
|
TypeofMode::INSIDE_TYPEOF)
|
2017-12-13 21:44:29 +00:00
|
|
|
.StoreGlobal(name, sloppy_store_global_slot.ToInt());
|
2015-10-30 11:17:07 +00:00
|
|
|
|
2015-10-13 13:09:48 +00:00
|
|
|
// Emit context operations.
|
2016-01-11 16:37:53 +00:00
|
|
|
builder.PushContext(reg)
|
|
|
|
.PopContext(reg)
|
2017-02-07 20:42:03 +00:00
|
|
|
.LoadContextSlot(reg, 1, 0, BytecodeArrayBuilder::kMutableSlot)
|
|
|
|
.StoreContextSlot(reg, 1, 0)
|
|
|
|
.LoadContextSlot(reg, 2, 0, BytecodeArrayBuilder::kImmutableSlot)
|
|
|
|
.StoreContextSlot(reg, 3, 0);
|
2015-09-24 11:48:22 +00:00
|
|
|
|
2016-10-28 10:10:32 +00:00
|
|
|
// Emit context operations which operate on the local context.
|
2017-02-07 20:42:03 +00:00
|
|
|
builder
|
|
|
|
.LoadContextSlot(Register::current_context(), 1, 0,
|
|
|
|
BytecodeArrayBuilder::kMutableSlot)
|
|
|
|
.StoreContextSlot(Register::current_context(), 1, 0)
|
|
|
|
.LoadContextSlot(Register::current_context(), 2, 0,
|
|
|
|
BytecodeArrayBuilder::kImmutableSlot)
|
|
|
|
.StoreContextSlot(Register::current_context(), 3, 0);
|
2016-10-28 10:10:32 +00:00
|
|
|
|
2015-09-09 15:46:04 +00:00
|
|
|
// Emit load / store property operations.
|
2017-10-19 15:12:42 +00:00
|
|
|
builder.LoadNamedProperty(reg, name, load_slot.ToInt())
|
2018-09-27 13:10:08 +00:00
|
|
|
.LoadNamedPropertyNoFeedback(reg, name)
|
2017-10-19 15:12:42 +00:00
|
|
|
.LoadKeyedProperty(reg, keyed_load_slot.ToInt())
|
|
|
|
.StoreNamedProperty(reg, name, sloppy_store_slot.ToInt(),
|
|
|
|
LanguageMode::kSloppy)
|
2018-09-27 13:10:08 +00:00
|
|
|
.StoreNamedPropertyNoFeedback(reg, name, LanguageMode::kStrict)
|
|
|
|
.StoreNamedPropertyNoFeedback(reg, name, LanguageMode::kSloppy)
|
2017-10-19 15:12:42 +00:00
|
|
|
.StoreKeyedProperty(reg, reg, sloppy_keyed_store_slot.ToInt(),
|
|
|
|
LanguageMode::kSloppy)
|
|
|
|
.StoreNamedProperty(reg, name, strict_store_slot.ToInt(),
|
|
|
|
LanguageMode::kStrict)
|
|
|
|
.StoreKeyedProperty(reg, reg, strict_keyed_store_slot.ToInt(),
|
|
|
|
LanguageMode::kStrict)
|
2018-03-02 20:30:34 +00:00
|
|
|
.StoreNamedOwnProperty(reg, name, store_own_slot.ToInt())
|
|
|
|
.StoreInArrayLiteral(reg, reg, store_array_element_slot.ToInt());
|
2015-09-02 13:03:06 +00:00
|
|
|
|
2015-12-16 17:24:20 +00:00
|
|
|
// Emit load / store lookup slots.
|
|
|
|
builder.LoadLookupSlot(name, TypeofMode::NOT_INSIDE_TYPEOF)
|
|
|
|
.LoadLookupSlot(name, TypeofMode::INSIDE_TYPEOF)
|
2017-10-16 10:55:06 +00:00
|
|
|
.StoreLookupSlot(name, LanguageMode::kSloppy, LookupHoistingMode::kNormal)
|
|
|
|
.StoreLookupSlot(name, LanguageMode::kSloppy,
|
2017-06-22 07:49:08 +00:00
|
|
|
LookupHoistingMode::kLegacySloppy)
|
2017-10-16 10:55:06 +00:00
|
|
|
.StoreLookupSlot(name, LanguageMode::kStrict,
|
|
|
|
LookupHoistingMode::kNormal);
|
2015-12-16 17:24:20 +00:00
|
|
|
|
2016-09-16 13:26:44 +00:00
|
|
|
// Emit load / store lookup slots with context fast paths.
|
|
|
|
builder.LoadLookupContextSlot(name, TypeofMode::NOT_INSIDE_TYPEOF, 1, 0)
|
|
|
|
.LoadLookupContextSlot(name, TypeofMode::INSIDE_TYPEOF, 1, 0);
|
|
|
|
|
2016-09-20 10:31:24 +00:00
|
|
|
// Emit load / store lookup slots with global fast paths.
|
|
|
|
builder.LoadLookupGlobalSlot(name, TypeofMode::NOT_INSIDE_TYPEOF, 1, 0)
|
|
|
|
.LoadLookupGlobalSlot(name, TypeofMode::INSIDE_TYPEOF, 1, 0);
|
|
|
|
|
2015-12-16 17:24:20 +00:00
|
|
|
// Emit closure operations.
|
2017-01-09 15:31:00 +00:00
|
|
|
builder.CreateClosure(0, 1, NOT_TENURED);
|
2015-10-13 09:39:55 +00:00
|
|
|
|
2016-08-03 14:41:47 +00:00
|
|
|
// Emit create context operation.
|
2017-02-10 17:22:45 +00:00
|
|
|
builder.CreateBlockContext(&scope);
|
2018-05-04 09:29:20 +00:00
|
|
|
builder.CreateCatchContext(reg, &scope);
|
2018-05-02 13:57:26 +00:00
|
|
|
builder.CreateFunctionContext(&scope, 1);
|
|
|
|
builder.CreateEvalContext(&scope, 1);
|
2017-02-10 17:22:45 +00:00
|
|
|
builder.CreateWithContext(reg, &scope);
|
2016-08-18 15:14:43 +00:00
|
|
|
|
2015-12-09 11:53:07 +00:00
|
|
|
// Emit literal creation operations.
|
2017-02-10 17:22:45 +00:00
|
|
|
builder.CreateRegExpLiteral(ast_factory.GetOneByteString("a"), 0, 0);
|
2017-01-16 11:25:05 +00:00
|
|
|
builder.CreateArrayLiteral(0, 0, 0);
|
|
|
|
builder.CreateObjectLiteral(0, 0, 0, reg);
|
2015-10-13 14:00:40 +00:00
|
|
|
|
2017-09-22 09:57:29 +00:00
|
|
|
// Emit tagged template operations.
|
[esnext] implement spec change to TaggedTemplate callsite caching
Implements the change outlined in https://github.com/tc39/ecma262/pull/890,
which has been ratified and pulled into the specification. In particular,
template callsite objects are no longer kept in a global, eternal Map, but
are instead associated with their callsite, which can be collected. This
prevents a memory leak incurred by TaggedTemplate calls.
Changes, summarized:
- Remove the TemplateMap and TemplateMapShape objects, instead caching
template objects in the feedback vector.
- Remove the `hash` member of TemplateObjectDescriptor, and the Equals
method (used by TemplateMap)
- Add a new FeedbackSlotKind (kTemplateObject), which behaves similarly
to FeedbackSlotKind::kLiteral, but prevents eval caching. This ensures
that a new feedback vector is always created for eval() containing tagged
templates, even when the CompilationCache is used.
- GetTemplateObject bytecode now takes a feedback index, and only calls
into the runtime if the feedback is Smi::kZero (uninitialized).
BUG=v8:3230, v8:2891
R=littledan@chromium.org, yangguo@chromium.org, bmeurer@chromium.org,
rmcilroy@chromium.org
Cq-Include-Trybots: luci.v8.try:v8_linux_noi18n_rel_ng
Change-Id: I7827bc148d3d93e2b056ebf63dd624da196ad423
Reviewed-on: https://chromium-review.googlesource.com/624564
Commit-Queue: Caitlin Potter <caitp@igalia.com>
Reviewed-by: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51248}
2018-02-12 16:27:02 +00:00
|
|
|
builder.GetTemplateObject(0, 0);
|
2017-09-22 09:57:29 +00:00
|
|
|
|
2015-09-14 10:05:18 +00:00
|
|
|
// Call operations.
|
2017-04-11 14:20:30 +00:00
|
|
|
builder.CallAnyReceiver(reg, reg_list, 1)
|
|
|
|
.CallProperty(reg, reg_list, 1)
|
|
|
|
.CallProperty(reg, single, 1)
|
|
|
|
.CallProperty(reg, pair, 1)
|
|
|
|
.CallProperty(reg, triple, 1)
|
|
|
|
.CallUndefinedReceiver(reg, reg_list, 1)
|
|
|
|
.CallUndefinedReceiver(reg, empty, 1)
|
|
|
|
.CallUndefinedReceiver(reg, single, 1)
|
|
|
|
.CallUndefinedReceiver(reg, pair, 1)
|
2016-09-30 09:02:59 +00:00
|
|
|
.CallRuntime(Runtime::kIsArray, reg)
|
2016-10-05 16:44:38 +00:00
|
|
|
.CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, reg_list, pair)
|
2018-04-23 08:13:39 +00:00
|
|
|
.CallJSRuntime(Context::OBJECT_CREATE, reg_list)
|
2018-09-17 08:44:19 +00:00
|
|
|
.CallWithSpread(reg, reg_list, 1)
|
|
|
|
.CallNoFeedback(reg, reg_list);
|
2015-09-14 10:05:18 +00:00
|
|
|
|
2015-09-24 15:20:47 +00:00
|
|
|
// Emit binary operator invocations.
|
2016-08-08 01:15:22 +00:00
|
|
|
builder.BinaryOperation(Token::Value::ADD, reg, 1)
|
|
|
|
.BinaryOperation(Token::Value::SUB, reg, 2)
|
|
|
|
.BinaryOperation(Token::Value::MUL, reg, 3)
|
|
|
|
.BinaryOperation(Token::Value::DIV, reg, 4)
|
2017-11-28 10:05:00 +00:00
|
|
|
.BinaryOperation(Token::Value::MOD, reg, 5)
|
|
|
|
.BinaryOperation(Token::Value::EXP, reg, 6);
|
2015-08-03 10:42:16 +00:00
|
|
|
|
2015-10-12 13:35:57 +00:00
|
|
|
// Emit bitwise operator invocations
|
2016-08-08 01:15:22 +00:00
|
|
|
builder.BinaryOperation(Token::Value::BIT_OR, reg, 6)
|
|
|
|
.BinaryOperation(Token::Value::BIT_XOR, reg, 7)
|
|
|
|
.BinaryOperation(Token::Value::BIT_AND, reg, 8);
|
2015-10-12 13:35:57 +00:00
|
|
|
|
2015-10-12 10:45:19 +00:00
|
|
|
// Emit shift operator invocations
|
2016-08-08 01:15:22 +00:00
|
|
|
builder.BinaryOperation(Token::Value::SHL, reg, 9)
|
|
|
|
.BinaryOperation(Token::Value::SAR, reg, 10)
|
|
|
|
.BinaryOperation(Token::Value::SHR, reg, 11);
|
2015-10-12 10:45:19 +00:00
|
|
|
|
2017-04-10 09:30:51 +00:00
|
|
|
// Emit Smi binary operations.
|
|
|
|
builder.BinaryOperationSmiLiteral(Token::Value::ADD, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::SUB, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::MUL, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::DIV, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::MOD, Smi::FromInt(42), 2)
|
2017-11-28 10:05:00 +00:00
|
|
|
.BinaryOperationSmiLiteral(Token::Value::EXP, Smi::FromInt(42), 2)
|
2017-04-10 09:30:51 +00:00
|
|
|
.BinaryOperationSmiLiteral(Token::Value::BIT_OR, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::BIT_XOR, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::BIT_AND, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::SHL, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::SAR, Smi::FromInt(42), 2)
|
|
|
|
.BinaryOperationSmiLiteral(Token::Value::SHR, Smi::FromInt(42), 2);
|
2016-07-05 13:44:05 +00:00
|
|
|
|
2017-09-12 18:24:58 +00:00
|
|
|
// Emit unary and count operator invocations.
|
|
|
|
builder.UnaryOperation(Token::Value::INC, 1)
|
|
|
|
.UnaryOperation(Token::Value::DEC, 1)
|
|
|
|
.UnaryOperation(Token::Value::ADD, 1)
|
|
|
|
.UnaryOperation(Token::Value::SUB, 1)
|
|
|
|
.UnaryOperation(Token::Value::BIT_NOT, 1);
|
2015-10-22 20:40:20 +00:00
|
|
|
|
2015-10-06 14:15:09 +00:00
|
|
|
// Emit unary operator invocations.
|
2017-04-05 15:40:27 +00:00
|
|
|
builder.LogicalNot(ToBooleanMode::kConvertToBoolean)
|
|
|
|
.LogicalNot(ToBooleanMode::kAlreadyBoolean)
|
2016-05-17 20:39:45 +00:00
|
|
|
.TypeOf();
|
2015-10-06 14:15:09 +00:00
|
|
|
|
2015-10-28 09:49:20 +00:00
|
|
|
// Emit delete
|
2017-10-16 10:55:06 +00:00
|
|
|
builder.Delete(reg, LanguageMode::kSloppy).Delete(reg, LanguageMode::kStrict);
|
2015-10-22 20:40:20 +00:00
|
|
|
|
2017-02-01 09:04:04 +00:00
|
|
|
// Emit construct.
|
2017-07-23 07:16:24 +00:00
|
|
|
builder.Construct(reg, reg_list, 1).ConstructWithSpread(reg, reg_list, 1);
|
2015-10-15 16:46:16 +00:00
|
|
|
|
2015-09-24 15:20:47 +00:00
|
|
|
// Emit test operator invocations.
|
2016-08-30 10:21:02 +00:00
|
|
|
builder.CompareOperation(Token::Value::EQ, reg, 1)
|
2017-03-06 16:39:33 +00:00
|
|
|
.CompareOperation(Token::Value::EQ_STRICT, reg, 2)
|
|
|
|
.CompareOperation(Token::Value::LT, reg, 3)
|
|
|
|
.CompareOperation(Token::Value::GT, reg, 4)
|
|
|
|
.CompareOperation(Token::Value::LTE, reg, 5)
|
|
|
|
.CompareOperation(Token::Value::GTE, reg, 6)
|
2017-03-22 11:08:18 +00:00
|
|
|
.CompareTypeOf(TestTypeOfFlags::LiteralFlag::kNumber)
|
2017-10-23 09:18:57 +00:00
|
|
|
.CompareOperation(Token::Value::INSTANCEOF, reg, 7)
|
2017-04-03 14:17:16 +00:00
|
|
|
.CompareOperation(Token::Value::IN, reg)
|
2018-04-18 00:04:34 +00:00
|
|
|
.CompareReference(reg)
|
2017-04-03 14:17:16 +00:00
|
|
|
.CompareUndetectable()
|
|
|
|
.CompareUndefined()
|
|
|
|
.CompareNull();
|
2015-09-24 15:20:47 +00:00
|
|
|
|
2016-09-02 11:32:51 +00:00
|
|
|
// Emit conversion operator invocations.
|
2018-03-10 17:27:18 +00:00
|
|
|
builder.ToNumber(1).ToNumeric(1).ToObject(reg).ToName(reg).ToString();
|
2015-09-24 15:20:47 +00:00
|
|
|
|
2016-12-19 10:12:22 +00:00
|
|
|
// Emit GetSuperConstructor.
|
|
|
|
builder.GetSuperConstructor(reg);
|
|
|
|
|
2017-06-06 09:00:22 +00:00
|
|
|
// Hole checks.
|
|
|
|
builder.ThrowReferenceErrorIfHole(name)
|
|
|
|
.ThrowSuperAlreadyCalledIfNotHole()
|
|
|
|
.ThrowSuperNotCalledIfHole();
|
|
|
|
|
2016-09-13 13:07:15 +00:00
|
|
|
// Short jumps with Imm8 operands
|
2016-06-21 15:26:50 +00:00
|
|
|
{
|
[ignition] desugar GetIterator() via bytecode rather than via AST
Introduces:
- a new AST node representing the GetIterator() algorithm in the specification, to be used by ForOfStatement, YieldExpression (in the case of delegating yield*), and the future `for-await-of` loop proposed in http://tc39.github.io/proposal-async-iteration/#sec-async-iterator-value-unwrap-functions.
- a new opcode (JumpIfJSReceiver), which is useful for `if Type(object) is not Object` checks which are common throughout the specification. This node is easily eliminated by TurboFan.
The AST node is desugared specially in bytecode, rather than manually when building the AST. The benefit of this is that desugaring in the BytecodeGenerator is much simpler and easier to understand than desugaring the AST.
This also reduces parse time very slightly, and allows us to use LoadIC rather than KeyedLoadIC, which seems to have better baseline performance. This results in a ~20% improvement in test/js-perf-test/Iterators micro-benchmarks, which I believe owes to the use of the slightly faster LoadIC as opposed to the KeyedLoadIC in the baseline case. Both produce identical optimized code via TurboFan when the type check can be eliminated, and the load can be replaced with a constant value.
BUG=v8:4280
R=bmeurer@chromium.org, rmcilroy@chromium.org, adamk@chromium.org, neis@chromium.org, jarin@chromium.org
TBR=rossberg@chromium.org
Review-Url: https://codereview.chromium.org/2557593004
Cr-Commit-Position: refs/heads/master@{#41555}
2016-12-07 15:19:52 +00:00
|
|
|
BytecodeLabel start, after_jump1, after_jump2, after_jump3, after_jump4,
|
2017-04-05 15:40:27 +00:00
|
|
|
after_jump5, after_jump6, after_jump7, after_jump8, after_jump9,
|
2017-06-06 09:00:22 +00:00
|
|
|
after_jump10;
|
2016-09-13 13:07:15 +00:00
|
|
|
builder.Bind(&start)
|
|
|
|
.Jump(&after_jump1)
|
|
|
|
.Bind(&after_jump1)
|
|
|
|
.JumpIfNull(&after_jump2)
|
|
|
|
.Bind(&after_jump2)
|
2017-04-03 14:17:16 +00:00
|
|
|
.JumpIfNotNull(&after_jump3)
|
2016-09-13 13:07:15 +00:00
|
|
|
.Bind(&after_jump3)
|
2017-04-03 14:17:16 +00:00
|
|
|
.JumpIfUndefined(&after_jump4)
|
2016-09-13 13:07:15 +00:00
|
|
|
.Bind(&after_jump4)
|
2017-04-03 14:17:16 +00:00
|
|
|
.JumpIfNotUndefined(&after_jump5)
|
[ignition] desugar GetIterator() via bytecode rather than via AST
Introduces:
- a new AST node representing the GetIterator() algorithm in the specification, to be used by ForOfStatement, YieldExpression (in the case of delegating yield*), and the future `for-await-of` loop proposed in http://tc39.github.io/proposal-async-iteration/#sec-async-iterator-value-unwrap-functions.
- a new opcode (JumpIfJSReceiver), which is useful for `if Type(object) is not Object` checks which are common throughout the specification. This node is easily eliminated by TurboFan.
The AST node is desugared specially in bytecode, rather than manually when building the AST. The benefit of this is that desugaring in the BytecodeGenerator is much simpler and easier to understand than desugaring the AST.
This also reduces parse time very slightly, and allows us to use LoadIC rather than KeyedLoadIC, which seems to have better baseline performance. This results in a ~20% improvement in test/js-perf-test/Iterators micro-benchmarks, which I believe owes to the use of the slightly faster LoadIC as opposed to the KeyedLoadIC in the baseline case. Both produce identical optimized code via TurboFan when the type check can be eliminated, and the load can be replaced with a constant value.
BUG=v8:4280
R=bmeurer@chromium.org, rmcilroy@chromium.org, adamk@chromium.org, neis@chromium.org, jarin@chromium.org
TBR=rossberg@chromium.org
Review-Url: https://codereview.chromium.org/2557593004
Cr-Commit-Position: refs/heads/master@{#41555}
2016-12-07 15:19:52 +00:00
|
|
|
.Bind(&after_jump5)
|
2017-06-06 09:00:22 +00:00
|
|
|
.JumpIfJSReceiver(&after_jump6)
|
2017-04-03 14:17:16 +00:00
|
|
|
.Bind(&after_jump6)
|
2017-06-06 09:00:22 +00:00
|
|
|
.JumpIfTrue(ToBooleanMode::kConvertToBoolean, &after_jump7)
|
2017-04-03 14:17:16 +00:00
|
|
|
.Bind(&after_jump7)
|
2017-06-06 09:00:22 +00:00
|
|
|
.JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &after_jump8)
|
2017-04-05 15:40:27 +00:00
|
|
|
.Bind(&after_jump8)
|
2017-06-06 09:00:22 +00:00
|
|
|
.JumpIfFalse(ToBooleanMode::kConvertToBoolean, &after_jump9)
|
2017-04-05 15:40:27 +00:00
|
|
|
.Bind(&after_jump9)
|
2017-06-06 09:00:22 +00:00
|
|
|
.JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &after_jump10)
|
2017-04-05 15:40:27 +00:00
|
|
|
.Bind(&after_jump10)
|
2016-09-13 13:07:15 +00:00
|
|
|
.JumpLoop(&start, 0);
|
2016-06-21 15:26:50 +00:00
|
|
|
}
|
2016-02-12 15:24:01 +00:00
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
// Longer jumps with constant operands
|
2017-06-06 09:00:22 +00:00
|
|
|
BytecodeLabel end[10];
|
2016-06-21 15:26:50 +00:00
|
|
|
{
|
|
|
|
BytecodeLabel after_jump;
|
|
|
|
builder.Jump(&end[0])
|
|
|
|
.Bind(&after_jump)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfTrue(ToBooleanMode::kConvertToBoolean, &end[1])
|
|
|
|
.JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &end[2])
|
|
|
|
.JumpIfFalse(ToBooleanMode::kConvertToBoolean, &end[3])
|
|
|
|
.JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &end[4])
|
2016-06-21 15:26:50 +00:00
|
|
|
.JumpIfNull(&end[5])
|
2017-04-03 14:17:16 +00:00
|
|
|
.JumpIfNotNull(&end[6])
|
|
|
|
.JumpIfUndefined(&end[7])
|
|
|
|
.JumpIfNotUndefined(&end[8])
|
2017-02-10 17:22:45 +00:00
|
|
|
.LoadLiteral(ast_factory.prototype_string())
|
2017-06-06 09:00:22 +00:00
|
|
|
.JumpIfJSReceiver(&end[9]);
|
2016-06-21 15:26:50 +00:00
|
|
|
}
|
2016-03-21 17:08:21 +00:00
|
|
|
|
2017-05-15 16:31:05 +00:00
|
|
|
// Emit Smi table switch bytecode.
|
|
|
|
BytecodeJumpTable* jump_table = builder.AllocateJumpTable(1, 0);
|
|
|
|
builder.SwitchOnSmiNoFeedback(jump_table).Bind(jump_table, 0);
|
|
|
|
|
2016-11-16 10:46:23 +00:00
|
|
|
// Emit set pending message bytecode.
|
|
|
|
builder.SetPendingMessage();
|
|
|
|
|
2016-02-04 12:33:13 +00:00
|
|
|
// Emit stack check bytecode.
|
2016-04-28 13:16:48 +00:00
|
|
|
builder.StackCheck(0);
|
2016-02-04 12:33:13 +00:00
|
|
|
|
2016-01-22 16:25:46 +00:00
|
|
|
// Emit throw and re-throw in it's own basic block so that the rest of the
|
|
|
|
// code isn't omitted due to being dead.
|
2015-11-03 11:27:54 +00:00
|
|
|
BytecodeLabel after_throw;
|
2016-06-21 15:26:50 +00:00
|
|
|
builder.Throw().Bind(&after_throw);
|
2016-01-22 16:25:46 +00:00
|
|
|
BytecodeLabel after_rethrow;
|
2016-06-21 15:26:50 +00:00
|
|
|
builder.ReThrow().Bind(&after_rethrow);
|
2015-10-19 10:59:00 +00:00
|
|
|
|
2017-09-01 10:49:06 +00:00
|
|
|
builder.ForInEnumerate(reg)
|
|
|
|
.ForInPrepare(triple, 1)
|
2016-08-29 08:47:12 +00:00
|
|
|
.ForInContinue(reg, reg)
|
2016-10-05 16:44:38 +00:00
|
|
|
.ForInNext(reg, reg, pair, 1)
|
2016-01-19 16:06:10 +00:00
|
|
|
.ForInStep(reg);
|
2015-10-29 12:06:00 +00:00
|
|
|
|
2015-10-30 11:17:07 +00:00
|
|
|
// Wide constant pool loads
|
|
|
|
for (int i = 0; i < 256; i++) {
|
|
|
|
// Emit junk in constant pool to force wide constant pool index.
|
2017-10-27 19:53:15 +00:00
|
|
|
builder.LoadLiteral(2.5321 + i);
|
2015-10-30 11:17:07 +00:00
|
|
|
}
|
|
|
|
builder.LoadLiteral(Smi::FromInt(20000000));
|
2017-02-10 17:22:45 +00:00
|
|
|
const AstRawString* wide_name = ast_factory.GetOneByteString("var_wide_name");
|
2015-12-23 09:34:00 +00:00
|
|
|
|
2017-01-05 07:30:01 +00:00
|
|
|
builder.StoreDataPropertyInLiteral(reg, reg,
|
|
|
|
DataPropertyInLiteralFlag::kNoFlags, 0);
|
2016-11-18 12:13:30 +00:00
|
|
|
|
2016-01-11 16:37:53 +00:00
|
|
|
// Emit wide context operations.
|
2017-02-07 20:42:03 +00:00
|
|
|
builder.LoadContextSlot(reg, 1024, 0, BytecodeArrayBuilder::kMutableSlot)
|
|
|
|
.StoreContextSlot(reg, 1024, 0);
|
2016-01-11 16:37:53 +00:00
|
|
|
|
2016-01-05 11:36:26 +00:00
|
|
|
// Emit wide load / store lookup slots.
|
|
|
|
builder.LoadLookupSlot(wide_name, TypeofMode::NOT_INSIDE_TYPEOF)
|
|
|
|
.LoadLookupSlot(wide_name, TypeofMode::INSIDE_TYPEOF)
|
2017-10-16 10:55:06 +00:00
|
|
|
.StoreLookupSlot(wide_name, LanguageMode::kSloppy,
|
2017-06-22 07:49:08 +00:00
|
|
|
LookupHoistingMode::kNormal)
|
2017-10-16 10:55:06 +00:00
|
|
|
.StoreLookupSlot(wide_name, LanguageMode::kSloppy,
|
2017-06-22 07:49:08 +00:00
|
|
|
LookupHoistingMode::kLegacySloppy)
|
2017-10-16 10:55:06 +00:00
|
|
|
.StoreLookupSlot(wide_name, LanguageMode::kStrict,
|
2017-06-22 07:49:08 +00:00
|
|
|
LookupHoistingMode::kNormal);
|
2016-01-05 11:36:26 +00:00
|
|
|
|
2015-11-26 14:33:06 +00:00
|
|
|
// CreateClosureWide
|
2017-01-09 15:31:00 +00:00
|
|
|
builder.CreateClosure(1000, 321, NOT_TENURED);
|
2015-11-26 14:33:06 +00:00
|
|
|
|
2015-12-09 11:53:07 +00:00
|
|
|
// Emit wide variant of literal creation operations.
|
2016-12-16 10:57:06 +00:00
|
|
|
builder
|
2017-02-10 17:22:45 +00:00
|
|
|
.CreateRegExpLiteral(ast_factory.GetOneByteString("wide_literal"), 0, 0)
|
2017-01-16 11:25:05 +00:00
|
|
|
.CreateArrayLiteral(0, 0, 0)
|
2017-07-24 12:12:34 +00:00
|
|
|
.CreateEmptyArrayLiteral(0)
|
2018-09-05 07:50:48 +00:00
|
|
|
.CreateArrayFromIterable()
|
2017-08-21 09:04:41 +00:00
|
|
|
.CreateObjectLiteral(0, 0, 0, reg)
|
[runtime] use new CloneObject bytecode for some ObjectLiteralSpread cases
As discussed in
https://docs.google.com/document/d/1sBdGe8RHgeYP850cKSSgGABTyfMdvaEWLy-vertuTCo/edit?ts=5b3ba5cc#,
this CL introduces a new bytecode (CloneObject), and a new IC type.
In this prototype implementation, the type feedback looks like the
following:
Uninitialized case:
{ uninitialized_sentinel, uninitialized_sentinel }
Monomorphic case:
{ weak 'source' map, strong 'result' map }
Polymorphic case:
{ WeakFixedArray with { weak 'source' map, strong 'result' map }, cleared value }
Megamorphic case:
{ megamorphic_sentinel, cleared_Value }
In the fast case, Object cloning is done by allocating an object with
the saved result map, and a shallow clone of the fast properties from
the source object, as well as cloned fast elements from the source object.
If at any point the fast case can't be taken, the IC transitions to the
slow case and remains there.
This prototype CL does not include any TurboFan optimization, and the
CloneObject operation is merely reduced to a stub call.
It may still be possible to get some further improvements by somehow
incorporating compile-time boilerplate elements into the cloned object,
or simplifying how the boilerplate elements are inserted into the
object.
In terms of performance, we improve the ObjectSpread score in JSTests/ObjectLiteralSpread/
by about 8x, with substantial improvements over the Babel and ObjectAssign scores.
R=gsathya@chromium.org, mvstanton@chromium.org, rmcilroy@chromium.org, neis@chromium.org, bmeurer@chromium.org
BUG=v8:7611
Change-Id: I79e1796eb77016fb4feba0e1d3bb9abb348c183e
Reviewed-on: https://chromium-review.googlesource.com/1127472
Commit-Queue: Caitlin Potter <caitp@igalia.com>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: Michael Stanton <mvstanton@chromium.org>
Reviewed-by: Georg Neis <neis@chromium.org>
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54595}
2018-07-20 15:24:04 +00:00
|
|
|
.CreateEmptyObjectLiteral()
|
|
|
|
.CloneObject(reg, 0, 0);
|
2015-12-09 11:53:07 +00:00
|
|
|
|
2016-11-08 11:01:03 +00:00
|
|
|
// Emit load and store operations for module variables.
|
|
|
|
builder.LoadModuleVariable(-1, 42)
|
|
|
|
.LoadModuleVariable(0, 42)
|
|
|
|
.LoadModuleVariable(1, 42)
|
|
|
|
.StoreModuleVariable(-1, 42)
|
|
|
|
.StoreModuleVariable(0, 42)
|
|
|
|
.StoreModuleVariable(1, 42);
|
|
|
|
|
|
|
|
// Emit generator operations.
|
2018-01-16 10:25:57 +00:00
|
|
|
{
|
|
|
|
// We have to skip over suspend because it returns and marks the remaining
|
|
|
|
// bytecode dead.
|
|
|
|
BytecodeLabel after_suspend;
|
|
|
|
builder.JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &after_suspend)
|
|
|
|
.SuspendGenerator(reg, reg_list, 0)
|
|
|
|
.Bind(&after_suspend)
|
2018-01-23 13:51:38 +00:00
|
|
|
.ResumeGenerator(reg, reg_list);
|
2018-01-16 10:25:57 +00:00
|
|
|
}
|
2018-01-23 13:51:38 +00:00
|
|
|
BytecodeJumpTable* gen_jump_table = builder.AllocateJumpTable(1, 0);
|
|
|
|
builder.SwitchOnGeneratorState(reg, gen_jump_table).Bind(gen_jump_table, 0);
|
2016-04-22 09:17:58 +00:00
|
|
|
|
2016-03-22 11:35:09 +00:00
|
|
|
// Intrinsics handled by the interpreter.
|
2016-09-30 09:02:59 +00:00
|
|
|
builder.CallRuntime(Runtime::kInlineIsArray, reg_list);
|
2016-03-22 11:35:09 +00:00
|
|
|
|
2016-09-13 13:07:15 +00:00
|
|
|
// Emit debugger bytecode.
|
2016-02-04 15:06:25 +00:00
|
|
|
builder.Debugger();
|
2016-09-13 13:07:15 +00:00
|
|
|
|
2017-09-08 11:46:07 +00:00
|
|
|
// Emit abort bytecode.
|
|
|
|
{
|
|
|
|
BytecodeLabel after;
|
2018-01-03 23:27:03 +00:00
|
|
|
builder.Abort(AbortReason::kOperandIsASmi).Bind(&after);
|
2017-09-08 11:46:07 +00:00
|
|
|
}
|
|
|
|
|
2016-09-13 13:07:15 +00:00
|
|
|
// Insert dummy ops to force longer jumps.
|
2017-01-24 22:09:02 +00:00
|
|
|
for (int i = 0; i < 256; i++) {
|
|
|
|
builder.Debugger();
|
2016-09-13 13:07:15 +00:00
|
|
|
}
|
|
|
|
|
2017-06-06 15:44:55 +00:00
|
|
|
// Emit block counter increments.
|
|
|
|
builder.IncBlockCounter(0);
|
|
|
|
|
2016-09-13 13:07:15 +00:00
|
|
|
// Bind labels for long jumps at the very end.
|
2016-03-21 17:08:21 +00:00
|
|
|
for (size_t i = 0; i < arraysize(end); i++) {
|
|
|
|
builder.Bind(&end[i]);
|
|
|
|
}
|
2016-09-13 13:07:15 +00:00
|
|
|
|
|
|
|
// Return must be the last instruction.
|
2015-08-03 10:42:16 +00:00
|
|
|
builder.Return();
|
|
|
|
|
|
|
|
// Generate BytecodeArray.
|
2017-02-10 17:22:45 +00:00
|
|
|
scope.SetScriptScopeInfo(factory->NewScopeInfo(1));
|
2018-05-02 15:30:06 +00:00
|
|
|
ast_factory.Internalize(isolate());
|
2016-08-18 13:42:05 +00:00
|
|
|
Handle<BytecodeArray> the_array = builder.ToBytecodeArray(isolate());
|
2015-10-16 15:29:07 +00:00
|
|
|
CHECK_EQ(the_array->frame_size(),
|
2016-09-30 09:02:59 +00:00
|
|
|
builder.total_register_count() * kPointerSize);
|
2015-08-03 10:42:16 +00:00
|
|
|
|
|
|
|
// Build scorecard of bytecodes encountered in the BytecodeArray.
|
|
|
|
std::vector<int> scorecard(Bytecodes::ToByte(Bytecode::kLast) + 1);
|
2016-03-21 17:08:21 +00:00
|
|
|
|
2015-08-03 10:42:16 +00:00
|
|
|
Bytecode final_bytecode = Bytecode::kLdaZero;
|
2015-10-01 17:22:58 +00:00
|
|
|
int i = 0;
|
|
|
|
while (i < the_array->length()) {
|
2015-08-03 10:42:16 +00:00
|
|
|
uint8_t code = the_array->get(i);
|
|
|
|
scorecard[code] += 1;
|
|
|
|
final_bytecode = Bytecodes::FromByte(code);
|
2016-03-21 17:08:21 +00:00
|
|
|
OperandScale operand_scale = OperandScale::kSingle;
|
|
|
|
int prefix_offset = 0;
|
|
|
|
if (Bytecodes::IsPrefixScalingBytecode(final_bytecode)) {
|
|
|
|
operand_scale = Bytecodes::PrefixBytecodeToOperandScale(final_bytecode);
|
|
|
|
prefix_offset = 1;
|
|
|
|
code = the_array->get(i + 1);
|
2018-01-23 13:51:38 +00:00
|
|
|
scorecard[code] += 1;
|
2016-03-21 17:08:21 +00:00
|
|
|
final_bytecode = Bytecodes::FromByte(code);
|
|
|
|
}
|
|
|
|
i += prefix_offset + Bytecodes::Size(final_bytecode, operand_scale);
|
2015-08-03 10:42:16 +00:00
|
|
|
}
|
|
|
|
|
2016-03-21 17:08:21 +00:00
|
|
|
// Insert entry for illegal bytecode as this is never willingly emitted.
|
|
|
|
scorecard[Bytecodes::ToByte(Bytecode::kIllegal)] = 1;
|
|
|
|
|
[type-profile] Incorporate into inspector protocol.
JavaScript is a dynamically typed language. But most code is
written with fixed types in mind. When debugging JavaScript,
it is helpful to know the types of variables and parameters
at runtime. It is often hard to infer types for complex code.
Type profiling provides this information at runtime.
Node.js uses the inspector protocol. This CL allows Node.js users
to access and analyse type profile for via Node modules or the
in-procress api. Type Profile helps developers to analyze
their code for correctness and performance.
Design doc: https://docs.google.com/a/google.com/document/d/1O1uepXZXBI6IwiawTrYC3ohhiNgzkyTdjn3R8ysbYgk/edit?usp=sharing
Add `takeTypeProfile` to the inspector protocol. It returns a list
of TypeProfileForScripts, which in turn contains the type profile for
each function. We can use TypeProfile data to annotate JavaScript code.
Sample script with data from TypeProfile:
function f(/*Object, number, undefined*/a,
/*Array, number, null*/b,
/*boolean, Object, symbol*/c) {
return 'bye';
/*string*/};
f({}, [], true);
f(3, 2.3, {a: 42});
f(undefined, null, Symbol('hello'));/*string*/
Bug: v8:5933
Cq-Include-Trybots: master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_chromium_rel_ng
Change-Id: I626bfb886b752f90b9c86cc6953601558b18b60d
Reviewed-on: https://chromium-review.googlesource.com/508588
Commit-Queue: Franziska Hinkelmann <franzih@chromium.org>
Reviewed-by: Pavel Feldman <pfeldman@chromium.org>
Reviewed-by: Yang Guo <yangguo@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Reviewed-by: Aleksey Kozyatinskiy <kozyatinskiy@chromium.org>
Reviewed-by: Marja Hölttä <marja@chromium.org>
Cr-Commit-Position: refs/heads/master@{#47920}
2017-09-08 08:28:29 +00:00
|
|
|
// Bytecode for CollectTypeProfile is only emitted when
|
|
|
|
// Type Information for DevTools is turned on.
|
|
|
|
scorecard[Bytecodes::ToByte(Bytecode::kCollectTypeProfile)] = 1;
|
2017-03-16 15:01:31 +00:00
|
|
|
|
2015-08-03 10:42:16 +00:00
|
|
|
// Check return occurs at the end and only once in the BytecodeArray.
|
|
|
|
CHECK_EQ(final_bytecode, Bytecode::kReturn);
|
|
|
|
CHECK_EQ(scorecard[Bytecodes::ToByte(final_bytecode)], 1);
|
|
|
|
|
2016-02-22 13:16:46 +00:00
|
|
|
#define CHECK_BYTECODE_PRESENT(Name, ...) \
|
|
|
|
/* Check Bytecode is marked in scorecard, unless it's a debug break */ \
|
|
|
|
if (!Bytecodes::IsDebugBreak(Bytecode::k##Name)) { \
|
2018-01-23 13:51:38 +00:00
|
|
|
EXPECT_GE(scorecard[Bytecodes::ToByte(Bytecode::k##Name)], 1); \
|
2016-02-22 13:16:46 +00:00
|
|
|
}
|
2015-08-03 10:42:16 +00:00
|
|
|
BYTECODE_LIST(CHECK_BYTECODE_PRESENT)
|
|
|
|
#undef CHECK_BYTECODE_PRESENT
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-08-13 11:27:54 +00:00
|
|
|
TEST_F(BytecodeArrayBuilderTest, FrameSizesLookGood) {
|
2015-08-03 20:38:57 +00:00
|
|
|
for (int locals = 0; locals < 5; locals++) {
|
2017-05-24 15:48:27 +00:00
|
|
|
for (int temps = 0; temps < 3; temps++) {
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, locals);
|
2017-05-24 15:48:27 +00:00
|
|
|
BytecodeRegisterAllocator* allocator(builder.register_allocator());
|
|
|
|
for (int i = 0; i < locals; i++) {
|
|
|
|
builder.LoadLiteral(Smi::kZero);
|
|
|
|
builder.StoreAccumulatorInRegister(Register(i));
|
2015-08-03 10:42:16 +00:00
|
|
|
}
|
2017-05-24 15:48:27 +00:00
|
|
|
for (int i = 0; i < temps; i++) {
|
|
|
|
Register temp = allocator->NewRegister();
|
|
|
|
builder.LoadLiteral(Smi::kZero);
|
|
|
|
builder.StoreAccumulatorInRegister(temp);
|
|
|
|
// Ensure temporaries are used so not optimized away by the
|
|
|
|
// register optimizer.
|
|
|
|
builder.ToName(temp);
|
|
|
|
}
|
|
|
|
builder.Return();
|
|
|
|
|
|
|
|
Handle<BytecodeArray> the_array = builder.ToBytecodeArray(isolate());
|
|
|
|
int total_registers = locals + temps;
|
|
|
|
CHECK_EQ(the_array->frame_size(), total_registers * kPointerSize);
|
2015-08-03 10:42:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-08-13 17:11:25 +00:00
|
|
|
TEST_F(BytecodeArrayBuilderTest, RegisterValues) {
|
|
|
|
int index = 1;
|
|
|
|
|
|
|
|
Register the_register(index);
|
|
|
|
CHECK_EQ(the_register.index(), index);
|
|
|
|
|
|
|
|
int actual_operand = the_register.ToOperand();
|
|
|
|
int actual_index = Register::FromOperand(actual_operand).index();
|
|
|
|
CHECK_EQ(actual_index, index);
|
|
|
|
}
|
|
|
|
|
2015-08-27 10:32:26 +00:00
|
|
|
|
|
|
|
TEST_F(BytecodeArrayBuilderTest, Parameters) {
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 10, 0);
|
2016-05-27 15:57:35 +00:00
|
|
|
|
2017-03-30 08:55:53 +00:00
|
|
|
Register receiver(builder.Receiver());
|
|
|
|
Register param8(builder.Parameter(8));
|
|
|
|
CHECK_EQ(param8.index() - receiver.index(), 9);
|
2015-08-27 10:32:26 +00:00
|
|
|
}
|
|
|
|
|
2015-08-28 15:40:52 +00:00
|
|
|
|
|
|
|
TEST_F(BytecodeArrayBuilderTest, Constants) {
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, 0);
|
2017-02-10 17:22:45 +00:00
|
|
|
AstValueFactory ast_factory(zone(), isolate()->ast_string_constants(),
|
|
|
|
isolate()->heap()->HashSeed());
|
|
|
|
|
2017-10-27 19:53:15 +00:00
|
|
|
double heap_num_1 = 3.14;
|
|
|
|
double heap_num_2 = 5.2;
|
|
|
|
double nan = std::numeric_limits<double>::quiet_NaN();
|
2017-10-24 00:53:27 +00:00
|
|
|
const AstRawString* string = ast_factory.GetOneByteString("foo");
|
|
|
|
const AstRawString* string_copy = ast_factory.GetOneByteString("foo");
|
2016-05-27 15:57:35 +00:00
|
|
|
|
2015-08-28 15:40:52 +00:00
|
|
|
builder.LoadLiteral(heap_num_1)
|
|
|
|
.LoadLiteral(heap_num_2)
|
2017-02-10 17:22:45 +00:00
|
|
|
.LoadLiteral(string)
|
2015-08-28 15:40:52 +00:00
|
|
|
.LoadLiteral(heap_num_1)
|
|
|
|
.LoadLiteral(heap_num_1)
|
2017-10-27 19:53:15 +00:00
|
|
|
.LoadLiteral(nan)
|
2017-02-10 17:22:45 +00:00
|
|
|
.LoadLiteral(string_copy)
|
2017-10-27 19:53:15 +00:00
|
|
|
.LoadLiteral(heap_num_2)
|
|
|
|
.LoadLiteral(nan)
|
2016-02-11 15:17:59 +00:00
|
|
|
.Return();
|
2015-08-28 15:40:52 +00:00
|
|
|
|
2018-05-02 15:30:06 +00:00
|
|
|
ast_factory.Internalize(isolate());
|
2016-08-18 13:42:05 +00:00
|
|
|
Handle<BytecodeArray> array = builder.ToBytecodeArray(isolate());
|
2017-10-27 19:53:15 +00:00
|
|
|
// Should only have one entry for each identical constant.
|
2017-10-24 00:53:27 +00:00
|
|
|
EXPECT_EQ(4, array->constant_pool()->length());
|
2015-08-28 15:40:52 +00:00
|
|
|
}
|
|
|
|
|
2015-09-24 15:20:47 +00:00
|
|
|
TEST_F(BytecodeArrayBuilderTest, ForwardJumps) {
|
2017-01-24 22:09:02 +00:00
|
|
|
static const int kFarJumpDistance = 256 + 20;
|
2015-09-24 15:20:47 +00:00
|
|
|
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, 1);
|
2016-05-27 15:57:35 +00:00
|
|
|
|
2015-10-30 16:48:18 +00:00
|
|
|
Register reg(0);
|
|
|
|
BytecodeLabel far0, far1, far2, far3, far4;
|
|
|
|
BytecodeLabel near0, near1, near2, near3, near4;
|
2016-06-21 15:26:50 +00:00
|
|
|
BytecodeLabel after_jump0, after_jump1;
|
2015-09-24 15:20:47 +00:00
|
|
|
|
|
|
|
builder.Jump(&near0)
|
2016-06-21 15:26:50 +00:00
|
|
|
.Bind(&after_jump0)
|
2016-08-30 10:21:02 +00:00
|
|
|
.CompareOperation(Token::Value::EQ, reg, 1)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &near1)
|
2016-08-30 10:21:02 +00:00
|
|
|
.CompareOperation(Token::Value::EQ, reg, 2)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &near2)
|
2016-08-08 01:15:22 +00:00
|
|
|
.BinaryOperation(Token::Value::ADD, reg, 1)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfTrue(ToBooleanMode::kConvertToBoolean, &near3)
|
2016-08-08 01:15:22 +00:00
|
|
|
.BinaryOperation(Token::Value::ADD, reg, 2)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfFalse(ToBooleanMode::kConvertToBoolean, &near4)
|
2015-09-24 15:20:47 +00:00
|
|
|
.Bind(&near0)
|
|
|
|
.Bind(&near1)
|
|
|
|
.Bind(&near2)
|
2015-10-30 16:48:18 +00:00
|
|
|
.Bind(&near3)
|
|
|
|
.Bind(&near4)
|
2015-09-24 15:20:47 +00:00
|
|
|
.Jump(&far0)
|
2016-06-21 15:26:50 +00:00
|
|
|
.Bind(&after_jump1)
|
2016-08-30 10:21:02 +00:00
|
|
|
.CompareOperation(Token::Value::EQ, reg, 3)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &far1)
|
2016-08-30 10:21:02 +00:00
|
|
|
.CompareOperation(Token::Value::EQ, reg, 4)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &far2)
|
2016-08-08 01:15:22 +00:00
|
|
|
.BinaryOperation(Token::Value::ADD, reg, 3)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfTrue(ToBooleanMode::kConvertToBoolean, &far3)
|
2016-08-08 01:15:22 +00:00
|
|
|
.BinaryOperation(Token::Value::ADD, reg, 4)
|
2017-04-05 15:40:27 +00:00
|
|
|
.JumpIfFalse(ToBooleanMode::kConvertToBoolean, &far4);
|
2016-08-30 10:21:02 +00:00
|
|
|
for (int i = 0; i < kFarJumpDistance - 22; i++) {
|
2016-05-12 19:18:07 +00:00
|
|
|
builder.Debugger();
|
2015-09-24 15:20:47 +00:00
|
|
|
}
|
2015-10-30 16:48:18 +00:00
|
|
|
builder.Bind(&far0).Bind(&far1).Bind(&far2).Bind(&far3).Bind(&far4);
|
2015-09-24 15:20:47 +00:00
|
|
|
builder.Return();
|
|
|
|
|
2016-08-18 13:42:05 +00:00
|
|
|
Handle<BytecodeArray> array = builder.ToBytecodeArray(isolate());
|
2016-08-30 10:21:02 +00:00
|
|
|
DCHECK_EQ(array->length(), 44 + kFarJumpDistance - 22 + 1);
|
2015-09-24 15:20:47 +00:00
|
|
|
|
|
|
|
BytecodeArrayIterator iterator(array);
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJump);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 22);
|
2015-10-30 16:48:18 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
// Ignore compare operation.
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
2017-04-06 15:34:26 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfTrue);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 17);
|
2015-10-30 16:48:18 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
// Ignore compare operation.
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
2017-04-06 15:34:26 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfFalse);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 12);
|
2015-10-30 16:48:18 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
// Ignore add operation.
|
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfToBooleanTrue);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 7);
|
2015-10-30 16:48:18 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
// Ignore add operation.
|
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfToBooleanFalse);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 2);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpConstant);
|
2018-06-18 09:41:32 +00:00
|
|
|
CHECK_EQ(iterator.GetConstantForIndexOperand(0),
|
2015-09-24 15:20:47 +00:00
|
|
|
Smi::FromInt(kFarJumpDistance));
|
2015-10-30 16:48:18 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
// Ignore compare operation.
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
2017-04-06 15:34:26 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfTrueConstant);
|
2018-06-18 09:41:32 +00:00
|
|
|
CHECK_EQ(iterator.GetConstantForIndexOperand(0),
|
2016-08-30 10:21:02 +00:00
|
|
|
Smi::FromInt(kFarJumpDistance - 5));
|
2015-10-30 16:48:18 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
// Ignore compare operation.
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
2017-04-06 15:34:26 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfFalseConstant);
|
2018-06-18 09:41:32 +00:00
|
|
|
CHECK_EQ(iterator.GetConstantForIndexOperand(0),
|
2016-08-30 10:21:02 +00:00
|
|
|
Smi::FromInt(kFarJumpDistance - 10));
|
2015-10-30 16:48:18 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
// Ignore add operation.
|
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpIfToBooleanTrueConstant);
|
2018-06-18 09:41:32 +00:00
|
|
|
CHECK_EQ(iterator.GetConstantForIndexOperand(0),
|
2016-08-30 10:21:02 +00:00
|
|
|
Smi::FromInt(kFarJumpDistance - 15));
|
2015-10-30 16:48:18 +00:00
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
// Ignore add operation.
|
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(),
|
|
|
|
Bytecode::kJumpIfToBooleanFalseConstant);
|
2018-06-18 09:41:32 +00:00
|
|
|
CHECK_EQ(iterator.GetConstantForIndexOperand(0),
|
2016-08-30 10:21:02 +00:00
|
|
|
Smi::FromInt(kFarJumpDistance - 20));
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST_F(BytecodeArrayBuilderTest, BackwardJumps) {
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, 1);
|
2016-05-27 15:57:35 +00:00
|
|
|
|
2015-10-30 16:48:18 +00:00
|
|
|
Register reg(0);
|
2015-09-24 15:20:47 +00:00
|
|
|
|
2016-09-13 13:07:15 +00:00
|
|
|
BytecodeLabel label0;
|
|
|
|
builder.Bind(&label0).JumpLoop(&label0, 0);
|
|
|
|
for (int i = 0; i < 42; i++) {
|
2016-06-21 15:26:50 +00:00
|
|
|
BytecodeLabel after_jump;
|
2016-09-13 13:07:15 +00:00
|
|
|
builder.JumpLoop(&label0, 0).Bind(&after_jump);
|
2015-09-24 15:20:47 +00:00
|
|
|
}
|
2016-03-21 17:08:21 +00:00
|
|
|
|
|
|
|
// Add padding to force wide backwards jumps.
|
|
|
|
for (int i = 0; i < 256; i++) {
|
2016-05-12 19:18:07 +00:00
|
|
|
builder.Debugger();
|
2016-03-21 17:08:21 +00:00
|
|
|
}
|
|
|
|
|
2016-09-13 13:07:15 +00:00
|
|
|
builder.JumpLoop(&label0, 0);
|
2016-06-21 15:26:50 +00:00
|
|
|
BytecodeLabel end;
|
|
|
|
builder.Bind(&end);
|
2015-09-24 15:20:47 +00:00
|
|
|
builder.Return();
|
|
|
|
|
2016-08-18 13:42:05 +00:00
|
|
|
Handle<BytecodeArray> array = builder.ToBytecodeArray(isolate());
|
2015-09-24 15:20:47 +00:00
|
|
|
BytecodeArrayIterator iterator(array);
|
2016-09-13 13:07:15 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 0);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
2017-01-24 22:09:02 +00:00
|
|
|
for (unsigned i = 0; i < 42; i++) {
|
2016-09-13 13:07:15 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
|
2016-03-21 17:08:21 +00:00
|
|
|
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
|
2016-09-13 13:07:15 +00:00
|
|
|
// offset of 3 (because kJumpLoop takes two immediate operands)
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), i * 3 + 3);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
}
|
2016-03-21 17:08:21 +00:00
|
|
|
// Check padding to force wide backwards jumps.
|
|
|
|
for (int i = 0; i < 256; i++) {
|
2016-05-12 19:18:07 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
|
2016-03-21 17:08:21 +00:00
|
|
|
iterator.Advance();
|
|
|
|
}
|
2016-09-13 13:07:15 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
|
2016-03-21 17:08:21 +00:00
|
|
|
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kDouble);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 386);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kReturn);
|
|
|
|
iterator.Advance();
|
|
|
|
CHECK(iterator.done());
|
|
|
|
}
|
|
|
|
|
2017-05-16 10:46:39 +00:00
|
|
|
TEST_F(BytecodeArrayBuilderTest, SmallSwitch) {
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, 1);
|
2017-05-16 10:46:39 +00:00
|
|
|
|
|
|
|
// Small jump table that fits into the single-size constant pool
|
|
|
|
int small_jump_table_size = 5;
|
|
|
|
int small_jump_table_base = -2;
|
|
|
|
BytecodeJumpTable* small_jump_table =
|
|
|
|
builder.AllocateJumpTable(small_jump_table_size, small_jump_table_base);
|
|
|
|
|
|
|
|
builder.LoadLiteral(Smi::FromInt(7)).SwitchOnSmiNoFeedback(small_jump_table);
|
|
|
|
for (int i = 0; i < small_jump_table_size; i++) {
|
|
|
|
builder.Bind(small_jump_table, small_jump_table_base + i).Debugger();
|
|
|
|
}
|
|
|
|
builder.Return();
|
|
|
|
|
|
|
|
Handle<BytecodeArray> array = builder.ToBytecodeArray(isolate());
|
|
|
|
BytecodeArrayIterator iterator(array);
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
|
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kSwitchOnSmiNoFeedback);
|
|
|
|
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kSingle);
|
|
|
|
{
|
|
|
|
int i = 0;
|
|
|
|
int switch_end =
|
|
|
|
iterator.current_offset() + iterator.current_bytecode_size();
|
|
|
|
|
|
|
|
for (const auto& entry : iterator.GetJumpTableTargetOffsets()) {
|
|
|
|
CHECK_EQ(entry.case_value, small_jump_table_base + i);
|
|
|
|
CHECK_EQ(entry.target_offset, switch_end + i);
|
|
|
|
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
CHECK_EQ(i, small_jump_table_size);
|
|
|
|
}
|
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
for (int i = 0; i < small_jump_table_size; i++) {
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
|
|
|
|
iterator.Advance();
|
|
|
|
}
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kReturn);
|
|
|
|
iterator.Advance();
|
|
|
|
CHECK(iterator.done());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(BytecodeArrayBuilderTest, WideSwitch) {
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, 1);
|
2017-05-16 10:46:39 +00:00
|
|
|
|
|
|
|
// Large jump table that requires a wide Switch bytecode.
|
|
|
|
int large_jump_table_size = 256;
|
|
|
|
int large_jump_table_base = -10;
|
|
|
|
BytecodeJumpTable* large_jump_table =
|
|
|
|
builder.AllocateJumpTable(large_jump_table_size, large_jump_table_base);
|
|
|
|
|
|
|
|
builder.LoadLiteral(Smi::FromInt(7)).SwitchOnSmiNoFeedback(large_jump_table);
|
|
|
|
for (int i = 0; i < large_jump_table_size; i++) {
|
|
|
|
builder.Bind(large_jump_table, large_jump_table_base + i).Debugger();
|
|
|
|
}
|
|
|
|
builder.Return();
|
|
|
|
|
|
|
|
Handle<BytecodeArray> array = builder.ToBytecodeArray(isolate());
|
|
|
|
BytecodeArrayIterator iterator(array);
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kLdaSmi);
|
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kSwitchOnSmiNoFeedback);
|
|
|
|
CHECK_EQ(iterator.current_operand_scale(), OperandScale::kDouble);
|
|
|
|
{
|
|
|
|
int i = 0;
|
|
|
|
int switch_end =
|
|
|
|
iterator.current_offset() + iterator.current_bytecode_size();
|
|
|
|
|
|
|
|
for (const auto& entry : iterator.GetJumpTableTargetOffsets()) {
|
|
|
|
CHECK_EQ(entry.case_value, large_jump_table_base + i);
|
|
|
|
CHECK_EQ(entry.target_offset, switch_end + i);
|
|
|
|
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
CHECK_EQ(i, large_jump_table_size);
|
|
|
|
}
|
|
|
|
iterator.Advance();
|
|
|
|
|
|
|
|
for (int i = 0; i < large_jump_table_size; i++) {
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kDebugger);
|
|
|
|
iterator.Advance();
|
|
|
|
}
|
|
|
|
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kReturn);
|
|
|
|
iterator.Advance();
|
|
|
|
CHECK(iterator.done());
|
|
|
|
}
|
2015-09-24 15:20:47 +00:00
|
|
|
|
|
|
|
TEST_F(BytecodeArrayBuilderTest, LabelReuse) {
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, 0);
|
2015-09-24 15:20:47 +00:00
|
|
|
|
|
|
|
// Labels can only have 1 forward reference, but
|
|
|
|
// can be referred to mulitple times once bound.
|
2016-06-21 15:26:50 +00:00
|
|
|
BytecodeLabel label, after_jump0, after_jump1;
|
|
|
|
|
|
|
|
builder.Jump(&label)
|
|
|
|
.Bind(&label)
|
2016-09-13 13:07:15 +00:00
|
|
|
.JumpLoop(&label, 0)
|
2016-06-21 15:26:50 +00:00
|
|
|
.Bind(&after_jump0)
|
2016-09-13 13:07:15 +00:00
|
|
|
.JumpLoop(&label, 0)
|
2016-06-21 15:26:50 +00:00
|
|
|
.Bind(&after_jump1)
|
|
|
|
.Return();
|
2015-09-24 15:20:47 +00:00
|
|
|
|
2016-08-18 13:42:05 +00:00
|
|
|
Handle<BytecodeArray> array = builder.ToBytecodeArray(isolate());
|
2015-09-24 15:20:47 +00:00
|
|
|
BytecodeArrayIterator iterator(array);
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJump);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 2);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
2016-09-13 13:07:15 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 0);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
2016-09-13 13:07:15 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 3);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kReturn);
|
|
|
|
iterator.Advance();
|
|
|
|
CHECK(iterator.done());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
TEST_F(BytecodeArrayBuilderTest, LabelAddressReuse) {
|
|
|
|
static const int kRepeats = 3;
|
|
|
|
|
2017-11-15 14:36:57 +00:00
|
|
|
BytecodeArrayBuilder builder(zone(), 1, 0);
|
2015-09-24 15:20:47 +00:00
|
|
|
for (int i = 0; i < kRepeats; i++) {
|
2016-06-21 15:26:50 +00:00
|
|
|
BytecodeLabel label, after_jump0, after_jump1;
|
|
|
|
builder.Jump(&label)
|
|
|
|
.Bind(&label)
|
2016-09-13 13:07:15 +00:00
|
|
|
.JumpLoop(&label, 0)
|
2016-06-21 15:26:50 +00:00
|
|
|
.Bind(&after_jump0)
|
2016-09-13 13:07:15 +00:00
|
|
|
.JumpLoop(&label, 0)
|
2016-06-21 15:26:50 +00:00
|
|
|
.Bind(&after_jump1);
|
2015-09-24 15:20:47 +00:00
|
|
|
}
|
|
|
|
builder.Return();
|
|
|
|
|
2016-08-18 13:42:05 +00:00
|
|
|
Handle<BytecodeArray> array = builder.ToBytecodeArray(isolate());
|
2015-09-24 15:20:47 +00:00
|
|
|
BytecodeArrayIterator iterator(array);
|
|
|
|
for (int i = 0; i < kRepeats; i++) {
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJump);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 2);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
2016-09-13 13:07:15 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 0);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
2016-09-13 13:07:15 +00:00
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kJumpLoop);
|
2017-01-24 22:09:02 +00:00
|
|
|
CHECK_EQ(iterator.GetUnsignedImmediateOperand(0), 3);
|
2015-09-24 15:20:47 +00:00
|
|
|
iterator.Advance();
|
|
|
|
}
|
|
|
|
CHECK_EQ(iterator.current_bytecode(), Bytecode::kReturn);
|
|
|
|
iterator.Advance();
|
|
|
|
CHECK(iterator.done());
|
|
|
|
}
|
|
|
|
|
2015-08-13 11:27:54 +00:00
|
|
|
} // namespace interpreter
|
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|