[test] Add switch to always use slow path.

Introduce new runtime flag that forces to always use
slow path for regex, promise and array builtins. It
works in DEBUG or with new compile time flag
ENABLE_FASTSLOW_SWITCH.

It will be used in the fast/slow path fuzzer or as a
testing variant to ensure that slow path implementation
behave equivalent to corresponding fast paths (where
applicable).

Bug: v8:7120
Change-Id: Ia2a4ab7aca5051e852723782c529bd2e8e5925ca
Reviewed-on: https://chromium-review.googlesource.com/787291
Commit-Queue: Michał Majewski <majeski@google.com>
Reviewed-by: Sathya Gunasekaran <gsathya@chromium.org>
Reviewed-by: Michael Achenbach <machenbach@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49642}
This commit is contained in:
Michal Majewski 2017-11-27 16:21:48 +01:00 committed by Commit Bot
parent b0a87c8532
commit f2150dbd76
13 changed files with 63 additions and 0 deletions

View File

@ -869,6 +869,10 @@ ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
return ExternalReference(isolate->stress_deopt_count_address()); return ExternalReference(isolate->stress_deopt_count_address());
} }
ExternalReference ExternalReference::force_slow_path(Isolate* isolate) {
return ExternalReference(isolate->force_slow_path_address());
}
ExternalReference ExternalReference::new_deoptimizer_function( ExternalReference ExternalReference::new_deoptimizer_function(
Isolate* isolate) { Isolate* isolate) {
return ExternalReference( return ExternalReference(

View File

@ -1046,6 +1046,8 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference stress_deopt_count(Isolate* isolate); static ExternalReference stress_deopt_count(Isolate* isolate);
static ExternalReference force_slow_path(Isolate* isolate);
static ExternalReference fixed_typed_array_base_data_offset(); static ExternalReference fixed_typed_array_base_data_offset();
private: private:

View File

@ -366,6 +366,8 @@ Node* PromiseBuiltinsAssembler::InternalPromiseThen(Node* context,
VARIABLE(var_deferred_on_resolve, MachineRepresentation::kTagged); VARIABLE(var_deferred_on_resolve, MachineRepresentation::kTagged);
VARIABLE(var_deferred_on_reject, MachineRepresentation::kTagged); VARIABLE(var_deferred_on_reject, MachineRepresentation::kTagged);
GotoIfForceSlowPath(&promise_capability);
Branch(WordEqual(promise_fun, constructor), &fast_promise_capability, Branch(WordEqual(promise_fun, constructor), &fast_promise_capability,
&promise_capability); &promise_capability);
@ -615,6 +617,8 @@ void PromiseBuiltinsAssembler::BranchIfFastPath(Node* native_context,
LoadContextElement(native_context, LoadContextElement(native_context,
Context::PROMISE_FUNCTION_INDEX))); Context::PROMISE_FUNCTION_INDEX)));
GotoIfForceSlowPath(if_ismodified);
Node* const map = LoadMap(promise); Node* const map = LoadMap(promise);
Node* const initial_map = Node* const initial_map =
LoadObjectField(promise_fun, JSFunction::kPrototypeOrInitialMapOffset); LoadObjectField(promise_fun, JSFunction::kPrototypeOrInitialMapOffset);
@ -1463,6 +1467,9 @@ TF_BUILTIN(PromiseReject, PromiseBuiltinsAssembler) {
Label if_nativepromise(this), if_custompromise(this, Label::kDeferred); Label if_nativepromise(this), if_custompromise(this, Label::kDeferred);
Node* const native_context = LoadNativeContext(context); Node* const native_context = LoadNativeContext(context);
GotoIfForceSlowPath(&if_custompromise);
Node* const promise_fun = Node* const promise_fun =
LoadContextElement(native_context, Context::PROMISE_FUNCTION_INDEX); LoadContextElement(native_context, Context::PROMISE_FUNCTION_INDEX);
Branch(WordEqual(promise_fun, receiver), &if_nativepromise, Branch(WordEqual(promise_fun, receiver), &if_nativepromise,

View File

@ -834,6 +834,11 @@ Node* RegExpBuiltinsAssembler::IsFastRegExpNoPrototype(Node* const context,
Label out(this); Label out(this);
VARIABLE(var_result, MachineRepresentation::kWord32); VARIABLE(var_result, MachineRepresentation::kWord32);
#if defined(DEBUG) || defined(ENABLE_FASTSLOW_SWITCH)
var_result.Bind(Int32Constant(0));
GotoIfForceSlowPath(&out);
#endif
Node* const native_context = LoadNativeContext(context); Node* const native_context = LoadNativeContext(context);
Node* const regexp_fun = Node* const regexp_fun =
LoadContextElement(native_context, Context::REGEXP_FUNCTION_INDEX); LoadContextElement(native_context, Context::REGEXP_FUNCTION_INDEX);
@ -871,6 +876,8 @@ void RegExpBuiltinsAssembler::BranchIfFastRegExp(Node* const context,
Label* const if_ismodified) { Label* const if_ismodified) {
CSA_ASSERT(this, WordEqual(LoadMap(object), map)); CSA_ASSERT(this, WordEqual(LoadMap(object), map));
GotoIfForceSlowPath(if_ismodified);
// TODO(ishell): Update this check once map changes for constant field // TODO(ishell): Update this check once map changes for constant field
// tracking are landing. // tracking are landing.

View File

@ -872,6 +872,8 @@ TNode<BoolT> CodeStubAssembler::IsFastJSArray(SloppyTNode<Object> object,
void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context, void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
Label* if_true, Label* if_false) { Label* if_true, Label* if_false) {
GotoIfForceSlowPath(if_false);
// Bailout if receiver is a Smi. // Bailout if receiver is a Smi.
GotoIf(TaggedIsSmi(object), if_false); GotoIf(TaggedIsSmi(object), if_false);
@ -895,6 +897,16 @@ void CodeStubAssembler::BranchIfFastJSArrayForCopy(Node* object, Node* context,
BranchIfFastJSArray(object, context, if_true, if_false); BranchIfFastJSArray(object, context, if_true, if_false);
} }
void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
#if defined(DEBUG) || defined(ENABLE_FASTSLOW_SWITCH)
Node* const force_slow_path_addr =
ExternalConstant(ExternalReference::force_slow_path(isolate()));
Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
GotoIf(force_slow, if_true);
#endif
}
Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags, Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags,
Node* top_address, Node* limit_address) { Node* top_address, Node* limit_address) {
Node* top = Load(MachineType::Pointer(), top_address); Node* top = Load(MachineType::Pointer(), top_address);

View File

@ -413,6 +413,14 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
void BranchIfFastJSArrayForCopy(Node* object, Node* context, Label* if_true, void BranchIfFastJSArrayForCopy(Node* object, Node* context, Label* if_true,
Label* if_false); Label* if_false);
// Branches to {if_true} when --force-slow-path flag has been passed.
// It's used for testing to ensure that slow path implementation behave
// equivalent to corresponding fast paths (where applicable).
//
// Works only in DEBUG mode or with ENABLE_FASTSLOW_SWITCH compile time flag.
// Nop otherwise.
void GotoIfForceSlowPath(Label* if_true);
// Load value from current frame by given offset in bytes. // Load value from current frame by given offset in bytes.
Node* LoadFromFrame(int offset, MachineType rep = MachineType::AnyTagged()); Node* LoadFromFrame(int offset, MachineType rep = MachineType::AnyTagged());
// Load value from current parent frame by given offset in bytes. // Load value from current parent frame by given offset in bytes.

View File

@ -3274,6 +3274,10 @@ class TypedElementsAccessor
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
DisallowJavascriptExecution no_js(isolate); DisallowJavascriptExecution no_js(isolate);
#if defined(DEBUG) || defined(ENABLE_SLOWFAST_SWITCH)
if (isolate->force_slow_path()) return true;
#endif
Object* source_proto = source->map()->prototype(); Object* source_proto = source->map()->prototype();
// Null prototypes are OK - we don't need to do prototype chain lookups on // Null prototypes are OK - we don't need to do prototype chain lookups on

View File

@ -282,6 +282,8 @@ void ExternalReferenceTable::AddReferences(Isolate* isolate) {
"double_constants.minus_one_half"); "double_constants.minus_one_half");
Add(ExternalReference::stress_deopt_count(isolate).address(), Add(ExternalReference::stress_deopt_count(isolate).address(),
"Isolate::stress_deopt_count_address()"); "Isolate::stress_deopt_count_address()");
Add(ExternalReference::force_slow_path(isolate).address(),
"Isolate::force_slow_path_address()");
Add(ExternalReference::runtime_function_table_address(isolate).address(), Add(ExternalReference::runtime_function_table_address(isolate).address(),
"Runtime::runtime_function_table_address()"); "Runtime::runtime_function_table_address()");
Add(ExternalReference::address_of_float_abs_constant().address(), Add(ExternalReference::address_of_float_abs_constant().address(),

View File

@ -757,6 +757,7 @@ DEFINE_BOOL(enable_experimental_builtins, true,
// builtins.cc // builtins.cc
DEFINE_BOOL(allow_unsafe_function_constructor, false, DEFINE_BOOL(allow_unsafe_function_constructor, false,
"allow invoking the function constructor without security checks") "allow invoking the function constructor without security checks")
DEFINE_BOOL(force_slow_path, false, "always take the slow path for builtins")
// builtins-ia32.cc // builtins-ia32.cc
DEFINE_BOOL(inline_new, true, "use fast inline allocation") DEFINE_BOOL(inline_new, true, "use fast inline allocation")

View File

@ -2408,6 +2408,7 @@ Isolate::Isolate(bool enable_serializer)
deferred_handles_head_(nullptr), deferred_handles_head_(nullptr),
optimizing_compile_dispatcher_(nullptr), optimizing_compile_dispatcher_(nullptr),
stress_deopt_count_(0), stress_deopt_count_(0),
force_slow_path_(false),
next_optimization_id_(0), next_optimization_id_(0),
#if V8_SFI_HAS_UNIQUE_ID #if V8_SFI_HAS_UNIQUE_ID
next_unique_sfi_id_(0), next_unique_sfi_id_(0),
@ -2746,6 +2747,7 @@ bool Isolate::Init(StartupDeserializer* des) {
TRACE_ISOLATE(init); TRACE_ISOLATE(init);
stress_deopt_count_ = FLAG_deopt_every_n_times; stress_deopt_count_ = FLAG_deopt_every_n_times;
force_slow_path_ = FLAG_force_slow_path;
has_fatal_error_ = false; has_fatal_error_ = false;

View File

@ -1163,6 +1163,10 @@ class Isolate {
void* stress_deopt_count_address() { return &stress_deopt_count_; } void* stress_deopt_count_address() { return &stress_deopt_count_; }
bool force_slow_path() { return force_slow_path_; }
bool* force_slow_path_address() { return &force_slow_path_; }
V8_EXPORT_PRIVATE base::RandomNumberGenerator* random_number_generator(); V8_EXPORT_PRIVATE base::RandomNumberGenerator* random_number_generator();
V8_EXPORT_PRIVATE base::RandomNumberGenerator* fuzzer_rng(); V8_EXPORT_PRIVATE base::RandomNumberGenerator* fuzzer_rng();
@ -1599,6 +1603,8 @@ class Isolate {
// Counts deopt points if deopt_every_n_times is enabled. // Counts deopt points if deopt_every_n_times is enabled.
unsigned int stress_deopt_count_; unsigned int stress_deopt_count_;
bool force_slow_path_;
int next_optimization_id_; int next_optimization_id_;
#if V8_SFI_HAS_UNIQUE_ID #if V8_SFI_HAS_UNIQUE_ID

View File

@ -2613,6 +2613,10 @@ bool Object::IterationHasObservableEffects() {
JSArray* array = JSArray::cast(this); JSArray* array = JSArray::cast(this);
Isolate* isolate = array->GetIsolate(); Isolate* isolate = array->GetIsolate();
#if defined(DEBUG) || defined(ENABLE_SLOWFAST_SWITCH)
if (isolate->force_slow_path()) return true;
#endif
// Check that we have the original ArrayPrototype. // Check that we have the original ArrayPrototype.
if (!array->map()->prototype()->IsJSObject()) return true; if (!array->map()->prototype()->IsJSObject()) return true;
JSObject* array_proto = JSObject::cast(array->map()->prototype()); JSObject* array_proto = JSObject::cast(array->map()->prototype());

View File

@ -134,6 +134,10 @@ bool RegExpUtils::IsUnmodifiedRegExp(Isolate* isolate, Handle<Object> obj) {
// TODO(ishell): Update this check once map changes for constant field // TODO(ishell): Update this check once map changes for constant field
// tracking are landing. // tracking are landing.
#if defined(DEBUG) || defined(ENABLE_SLOWFAST_SWITCH)
if (isolate->force_slow_path()) return false;
#endif
if (!obj->IsJSReceiver()) return false; if (!obj->IsJSReceiver()) return false;
JSReceiver* recv = JSReceiver::cast(*obj); JSReceiver* recv = JSReceiver::cast(*obj);