diff --git a/include/v8.h b/include/v8.h index 214bf9f2f7..942ef4d95f 100644 --- a/include/v8.h +++ b/include/v8.h @@ -4084,6 +4084,37 @@ class V8_EXPORT Isolate { Scope& operator=(const Scope&); }; + + /** + * Assert that no Javascript code is invoked. + */ + class DisallowJavascriptExecutionScope { + public: + explicit DisallowJavascriptExecutionScope(Isolate* isolate); + ~DisallowJavascriptExecutionScope(); + + private: + void* internal_; + + // Prevent copying of Scope objects. + DisallowJavascriptExecutionScope(const DisallowJavascriptExecutionScope&); + DisallowJavascriptExecutionScope& operator=( + const DisallowJavascriptExecutionScope&); + }; + + + /** + * Introduce exception to DisallowJavascriptExecutionScope. + */ + class AllowJavascriptExecutionScope { + public: + explicit AllowJavascriptExecutionScope(Isolate* isolate); + ~AllowJavascriptExecutionScope(); + + private: + void* internal_; + }; + /** * Types of garbage collections that can be requested via * RequestGarbageCollectionForTesting. diff --git a/src/api.cc b/src/api.cc index 5dd392f8a5..381ca004b1 100644 --- a/src/api.cc +++ b/src/api.cc @@ -6514,6 +6514,32 @@ void Isolate::Exit() { } +Isolate::DisallowJavascriptExecutionScope::DisallowJavascriptExecutionScope( + Isolate* isolate) { + i::Isolate* i_isolate = reinterpret_cast(isolate); + internal_ = reinterpret_cast( + new i::DisallowJavascriptExecution(i_isolate)); +} + + +Isolate::DisallowJavascriptExecutionScope::~DisallowJavascriptExecutionScope() { + delete reinterpret_cast(internal_); +} + + +Isolate::AllowJavascriptExecutionScope::AllowJavascriptExecutionScope( + Isolate* isolate) { + i::Isolate* i_isolate = reinterpret_cast(isolate); + internal_ = reinterpret_cast( + new i::AllowJavascriptExecution(i_isolate)); +} + + +Isolate::AllowJavascriptExecutionScope::~AllowJavascriptExecutionScope() { + delete reinterpret_cast(internal_); +} + + void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) { i::Isolate* isolate = reinterpret_cast(this); if (!isolate->IsInitialized()) { diff --git a/src/assert-scope.cc b/src/assert-scope.cc new file mode 100644 index 0000000000..960567cfa3 --- /dev/null +++ b/src/assert-scope.cc @@ -0,0 +1,21 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + + +#include "assert-scope.h" +#include "v8.h" + +namespace v8 { +namespace internal { + +uint32_t PerIsolateAssertBase::GetData(Isolate* isolate) { + return isolate->per_isolate_assert_data(); +} + + +void PerIsolateAssertBase::SetData(Isolate* isolate, uint32_t data) { + isolate->set_per_isolate_assert_data(data); +} + +} } // namespace v8::internal diff --git a/src/assert-scope.h b/src/assert-scope.h index 269b280d02..4357056404 100644 --- a/src/assert-scope.h +++ b/src/assert-scope.h @@ -30,6 +30,7 @@ #include "allocation.h" #include "platform.h" +#include "utils.h" namespace v8 { namespace internal { @@ -46,7 +47,12 @@ enum PerThreadAssertType { }; -#ifdef DEBUG +enum PerIsolateAssertType { + JAVASCRIPT_EXECUTION_ASSERT, + ALLOCATION_FAILURE_ASSERT +}; + + class PerThreadAssertData { public: PerThreadAssertData() : nesting_level_(0) { @@ -72,12 +78,9 @@ class PerThreadAssertData { DISALLOW_COPY_AND_ASSIGN(PerThreadAssertData); }; -#endif // DEBUG class PerThreadAssertScopeBase { -#ifdef DEBUG - protected: PerThreadAssertScopeBase() { data_ = GetAssertData(); @@ -110,18 +113,12 @@ class PerThreadAssertScopeBase { static void SetThreadLocalData(PerThreadAssertData* data) { Thread::SetThreadLocal(thread_local_key, data); } -#endif // DEBUG }; - template class PerThreadAssertScope : public PerThreadAssertScopeBase { public: -#ifndef DEBUG - PerThreadAssertScope() { } - static void SetIsAllowed(bool is_allowed) { } -#else PerThreadAssertScope() { old_state_ = data_->get(type); data_->set(type, allow); @@ -136,49 +133,132 @@ class PerThreadAssertScope : public PerThreadAssertScopeBase { private: bool old_state_; + + DISALLOW_COPY_AND_ASSIGN(PerThreadAssertScope); +}; + + +class PerIsolateAssertBase { + protected: + static uint32_t GetData(Isolate* isolate); + static void SetData(Isolate* isolate, uint32_t data); +}; + + +template +class PerIsolateAssertScope : public PerIsolateAssertBase { + public: + explicit PerIsolateAssertScope(Isolate* isolate) : isolate_(isolate) { + STATIC_ASSERT(type < 32); + old_data_ = GetData(isolate_); + SetData(isolate_, DataBit::update(old_data_, allow)); + } + + ~PerIsolateAssertScope() { + SetData(isolate_, old_data_); + } + + static bool IsAllowed(Isolate* isolate) { + return DataBit::decode(GetData(isolate)); + } + + private: + typedef BitField DataBit; + + uint32_t old_data_; + Isolate* isolate_; + + DISALLOW_COPY_AND_ASSIGN(PerIsolateAssertScope); +}; + + +template +#ifdef DEBUG +class PerThreadAssertScopeDebugOnly : public + PerThreadAssertScope { +#else +class PerThreadAssertScopeDebugOnly { + public: + PerThreadAssertScopeDebugOnly() { } #endif }; + +template +#ifdef DEBUG +class PerIsolateAssertScopeDebugOnly : public + PerIsolateAssertScope { + public: + explicit PerIsolateAssertScopeDebugOnly(Isolate* isolate) + : PerIsolateAssertScope(isolate) { } +#else +class PerIsolateAssertScopeDebugOnly { + public: + explicit PerIsolateAssertScopeDebugOnly(Isolate* isolate) { } +#endif +}; + +// Per-thread assert scopes. + // Scope to document where we do not expect handles to be created. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly DisallowHandleAllocation; // Scope to introduce an exception to DisallowHandleAllocation. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly AllowHandleAllocation; // Scope to document where we do not expect any allocation and GC. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly DisallowHeapAllocation; // Scope to introduce an exception to DisallowHeapAllocation. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly AllowHeapAllocation; // Scope to document where we do not expect any handle dereferences. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly DisallowHandleDereference; // Scope to introduce an exception to DisallowHandleDereference. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly AllowHandleDereference; // Scope to document where we do not expect deferred handles to be dereferenced. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly DisallowDeferredHandleDereference; // Scope to introduce an exception to DisallowDeferredHandleDereference. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly AllowDeferredHandleDereference; // Scope to document where we do not expect deferred handles to be dereferenced. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly DisallowCodeDependencyChange; // Scope to introduce an exception to DisallowDeferredHandleDereference. -typedef PerThreadAssertScope +typedef PerThreadAssertScopeDebugOnly AllowCodeDependencyChange; + +// Per-isolate assert scopes. + +// Scope to document where we do not expect javascript execution. +typedef PerIsolateAssertScope + DisallowJavascriptExecution; + +// Scope to introduce an exception to DisallowJavascriptExecution. +typedef PerIsolateAssertScope + AllowJavascriptExecution; + +// Scope to document where we do not expect an allocation failure. +typedef PerIsolateAssertScopeDebugOnly + DisallowAllocationFailure; + +// Scope to introduce an exception to DisallowAllocationFailure. +typedef PerIsolateAssertScopeDebugOnly + AllowAllocationFailure; + } } // namespace v8::internal #endif // V8_ASSERT_SCOPE_H_ diff --git a/src/builtins.cc b/src/builtins.cc index 38e090e9bc..e90e7aa041 100644 --- a/src/builtins.cc +++ b/src/builtins.cc @@ -1665,7 +1665,7 @@ void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) { { // During startup it's OK to always allocate and defer GC to later. // This simplifies things because we don't need to retry. - AlwaysAllocateScope __scope__; + AlwaysAllocateScope __scope__(isolate); { MaybeObject* maybe_code = heap->CreateCode(desc, flags, masm.CodeObject()); if (!maybe_code->ToObject(&code)) { diff --git a/src/execution.cc b/src/execution.cc index ac848e15a9..bef01a2a91 100644 --- a/src/execution.cc +++ b/src/execution.cc @@ -77,6 +77,7 @@ static Handle Invoke(bool is_construct, // Entering JavaScript. VMState state(isolate); + CHECK(AllowJavascriptExecution::IsAllowed(isolate)); // Placeholder for return value. MaybeObject* value = reinterpret_cast(kZapValue); diff --git a/src/heap-inl.h b/src/heap-inl.h index 29a2fbf841..c36a6fd795 100644 --- a/src/heap-inl.h +++ b/src/heap-inl.h @@ -223,7 +223,7 @@ MaybeObject* Heap::AllocateRaw(int size_in_bytes, HeapProfiler* profiler = isolate_->heap_profiler(); #ifdef DEBUG if (FLAG_gc_interval >= 0 && - !disallow_allocation_failure_ && + AllowAllocationFailure::IsAllowed(isolate_) && Heap::allocation_timeout_-- <= 0) { return Failure::RetryAfterGC(space); } @@ -663,7 +663,7 @@ Isolate* Heap::isolate() { (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ { \ - AlwaysAllocateScope __scope__; \ + AlwaysAllocateScope __scope__(ISOLATE); \ __maybe_object__ = FUNCTION_CALL; \ } \ if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \ @@ -778,21 +778,20 @@ void Heap::CompletelyClearInstanceofCache() { } -AlwaysAllocateScope::AlwaysAllocateScope() { +AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate) + : heap_(isolate->heap()), daf_(isolate) { // We shouldn't hit any nested scopes, because that requires // non-handle code to call handle code. The code still works but // performance will degrade, so we want to catch this situation // in debug mode. - Isolate* isolate = Isolate::Current(); - ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); - isolate->heap()->always_allocate_scope_depth_++; + ASSERT(heap_->always_allocate_scope_depth_ == 0); + heap_->always_allocate_scope_depth_++; } AlwaysAllocateScope::~AlwaysAllocateScope() { - Isolate* isolate = Isolate::Current(); - isolate->heap()->always_allocate_scope_depth_--; - ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0); + heap_->always_allocate_scope_depth_--; + ASSERT(heap_->always_allocate_scope_depth_ == 0); } @@ -848,23 +847,6 @@ double GCTracer::SizeOfHeapObjects() { } -DisallowAllocationFailure::DisallowAllocationFailure() { -#ifdef DEBUG - Isolate* isolate = Isolate::Current(); - old_state_ = isolate->heap()->disallow_allocation_failure_; - isolate->heap()->disallow_allocation_failure_ = true; -#endif -} - - -DisallowAllocationFailure::~DisallowAllocationFailure() { -#ifdef DEBUG - Isolate* isolate = Isolate::Current(); - isolate->heap()->disallow_allocation_failure_ = old_state_; -#endif -} - - } } // namespace v8::internal #endif // V8_HEAP_INL_H_ diff --git a/src/heap.cc b/src/heap.cc index 37a3804315..6790fe9847 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -105,7 +105,6 @@ Heap::Heap() unflattened_strings_length_(0), #ifdef DEBUG allocation_timeout_(0), - disallow_allocation_failure_(false), #endif // DEBUG new_space_high_promotion_mode_active_(false), old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit), @@ -7618,7 +7617,7 @@ void DescriptorLookupCache::Clear() { void Heap::GarbageCollectionGreedyCheck() { ASSERT(FLAG_gc_greedy); if (isolate_->bootstrapper()->IsActive()) return; - if (disallow_allocation_failure()) return; + if (!AllowAllocationFailure::IsAllowed(isolate_)) return; CollectGarbage(NEW_SPACE); } #endif diff --git a/src/heap.h b/src/heap.h index 81bf8508db..4c280aa8f4 100644 --- a/src/heap.h +++ b/src/heap.h @@ -1496,10 +1496,6 @@ class Heap { allocation_timeout_ = timeout; } - bool disallow_allocation_failure() { - return disallow_allocation_failure_; - } - void TracePathToObjectFrom(Object* target, Object* root); void TracePathToObject(Object* target); void TracePathToGlobal(); @@ -2009,10 +2005,6 @@ class Heap { // variable holds the value indicating the number of allocations // remain until the next failure and garbage collection. int allocation_timeout_; - - // Do we expect to be able to handle allocation failure at this - // time? - bool disallow_allocation_failure_; #endif // DEBUG // Indicates that the new space should be kept small due to high promotion @@ -2521,15 +2513,11 @@ class Heap { MemoryChunk* chunks_queued_for_free_; Mutex* relocation_mutex_; -#ifdef DEBUG - bool relocation_mutex_locked_by_optimizer_thread_; -#endif // DEBUG; int gc_callbacks_depth_; friend class Factory; friend class GCTracer; - friend class DisallowAllocationFailure; friend class AlwaysAllocateScope; friend class Page; friend class Isolate; @@ -2580,26 +2568,15 @@ class HeapStats { }; -class DisallowAllocationFailure { - public: - inline DisallowAllocationFailure(); - inline ~DisallowAllocationFailure(); - -#ifdef DEBUG - private: - bool old_state_; -#endif -}; - - class AlwaysAllocateScope { public: - inline AlwaysAllocateScope(); + explicit inline AlwaysAllocateScope(Isolate* isolate); inline ~AlwaysAllocateScope(); private: // Implicitly disable artificial allocation failures. - DisallowAllocationFailure disallow_allocation_failure_; + Heap* heap_; + DisallowAllocationFailure daf_; }; diff --git a/src/isolate.cc b/src/isolate.cc index c48e2d350e..50b402b04b 100644 --- a/src/isolate.cc +++ b/src/isolate.cc @@ -1918,7 +1918,7 @@ bool Isolate::Init(Deserializer* des) { } // The initialization process does not handle memory exhaustion. - DisallowAllocationFailure disallow_allocation_failure; + DisallowAllocationFailure disallow_allocation_failure(this); InitializeLoggingAndCounters(); diff --git a/src/isolate.h b/src/isolate.h index dfab99df1a..d1f7003c12 100644 --- a/src/isolate.h +++ b/src/isolate.h @@ -380,6 +380,7 @@ typedef List DebugObjectCache; V(CodeTracer*, code_tracer, NULL) \ V(bool, fp_stubs_generated, false) \ V(int, max_available_threads, 0) \ + V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu) \ ISOLATE_INIT_SIMULATOR_LIST(V) \ ISOLATE_DEBUGGER_INIT_LIST(V) diff --git a/src/mark-compact.cc b/src/mark-compact.cc index 24139307b7..eea9eb4dbe 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -3045,7 +3045,7 @@ void MarkCompactCollector::EvacuateNewSpace() { // There are soft limits in the allocation code, designed trigger a mark // sweep collection by failing allocations. But since we are already in // a mark-sweep allocation, there is no sense in trying to trigger one. - AlwaysAllocateScope scope; + AlwaysAllocateScope scope(isolate()); heap()->CheckNewSpaceExpansionCriteria(); NewSpace* new_space = heap()->new_space(); @@ -3077,7 +3077,7 @@ void MarkCompactCollector::EvacuateNewSpace() { void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(isolate()); PagedSpace* space = static_cast(p->owner()); ASSERT(p->IsEvacuationCandidate() && !p->WasSwept()); p->MarkSweptPrecisely(); diff --git a/test/cctest/cctest.status b/test/cctest/cctest.status index 0fcdfc1a7b..753421c56b 100644 --- a/test/cctest/cctest.status +++ b/test/cctest/cctest.status @@ -46,6 +46,10 @@ # This test always fails. It tests that LiveEdit causes abort when turned off. 'test-debug/LiveEditDisabled': [FAIL], + # This test always fails. It tests that DisallowJavascriptExecutionScope + # works as intended. + 'test-api/DisallowJavascriptExecutionScope': [FAIL], + # TODO(gc): Temporarily disabled in the GC branch. 'test-log/EquivalenceOfLoggingAndTraversal': [PASS, FAIL], diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc index de73da58ee..74afab2c1b 100644 --- a/test/cctest/test-api.cc +++ b/test/cctest/test-api.cc @@ -14218,7 +14218,8 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) { // have remnants of state from other code. v8::Isolate* isolate = v8::Isolate::New(); isolate->Enter(); - i::Heap* heap = reinterpret_cast(isolate)->heap(); + i::Isolate* i_isolate = reinterpret_cast(isolate); + i::Heap* heap = i_isolate->heap(); { v8::HandleScope scope(isolate); @@ -14238,7 +14239,7 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) { const int kIterations = 10; for (int i = 0; i < kIterations; ++i) { LocalContext env(isolate); - i::AlwaysAllocateScope always_allocate; + i::AlwaysAllocateScope always_allocate(i_isolate); SimulateFullSpace(heap->code_space()); CompileRun(script); @@ -17649,7 +17650,7 @@ TEST(DynamicWithSourceURLInStackTraceString) { static void CreateGarbageInOldSpace() { i::Factory* factory = CcTest::i_isolate()->factory(); v8::HandleScope scope(CcTest::isolate()); - i::AlwaysAllocateScope always_allocate; + i::AlwaysAllocateScope always_allocate(CcTest::i_isolate()); for (int i = 0; i < 1000; i++) { factory->NewFixedArray(1000, i::TENURED); } @@ -22392,3 +22393,23 @@ TEST(Promises) { CHECK_EQ(3, global->Get(v8_str("x1"))->Int32Value()); CHECK_EQ(4, global->Get(v8_str("x2"))->Int32Value()); } + + +TEST(DisallowJavascriptExecutionScope) { + LocalContext context; + v8::Isolate* isolate = context->GetIsolate(); + v8::HandleScope scope(isolate); + v8::Isolate::DisallowJavascriptExecutionScope no_js(isolate); + CompileRun("2+2"); +} + + +TEST(AllowJavascriptExecutionScope) { + LocalContext context; + v8::Isolate* isolate = context->GetIsolate(); + v8::HandleScope scope(isolate); + v8::Isolate::DisallowJavascriptExecutionScope no_js(isolate); + { v8::Isolate::AllowJavascriptExecutionScope yes_js(isolate); + CompileRun("1+1"); + } +} diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index 376c735f64..96af44be15 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -1025,7 +1025,7 @@ TEST(Regression39128) { // Step 4: clone jsobject, but force always allocate first to create a clone // in old pointer space. Address old_pointer_space_top = heap->old_pointer_space()->top(); - AlwaysAllocateScope aa_scope; + AlwaysAllocateScope aa_scope(isolate); Object* clone_obj = heap->CopyJSObject(jsobject)->ToObjectChecked(); JSObject* clone = JSObject::cast(clone_obj); if (clone->address() != old_pointer_space_top) { @@ -1599,7 +1599,7 @@ TEST(TestSizeOfObjects) { { // Allocate objects on several different old-space pages so that // lazy sweeping kicks in for subsequent GC runs. - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(CcTest::i_isolate()); int filler_size = static_cast(FixedArray::SizeFor(8192)); for (int i = 1; i <= 100; i++) { CcTest::heap()->AllocateFixedArray(8192, TENURED)->ToObjectChecked(); @@ -1666,7 +1666,7 @@ static void FillUpNewSpace(NewSpace* new_space) { Isolate* isolate = heap->isolate(); Factory* factory = isolate->factory(); HandleScope scope(isolate); - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(isolate); intptr_t available = new_space->EffectiveCapacity() - new_space->Size(); intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1; for (intptr_t i = 0; i < number_of_fillers; i++) { @@ -2045,7 +2045,7 @@ TEST(PrototypeTransitionClearing) { Handle prototype; PagedSpace* space = CcTest::heap()->old_pointer_space(); { - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(isolate); SimulateFullSpace(space); prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED); } @@ -2173,7 +2173,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) { v8::HandleScope scope(CcTest::isolate()); SimulateFullSpace(CcTest::heap()->new_space()); - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(CcTest::i_isolate()); v8::Local res = CompileRun( "function c(x) {" " this.x = x;" @@ -2555,7 +2555,7 @@ TEST(OptimizedPretenuringCallNew) { v8::HandleScope scope(CcTest::isolate()); CcTest::heap()->SetNewSpaceHighPromotionModeActive(true); - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(CcTest::i_isolate()); v8::Local res = CompileRun( "function g() { this.a = 0; }" "function f() {" @@ -2587,7 +2587,7 @@ TEST(Regress1465) { static const int transitions_count = 256; { - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(CcTest::i_isolate()); for (int i = 0; i < transitions_count; i++) { EmbeddedVector buffer; OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i); @@ -2717,7 +2717,7 @@ TEST(ReleaseOverReservedPages) { PagedSpace* old_pointer_space = heap->old_pointer_space(); CHECK_EQ(1, old_pointer_space->CountTotalPages()); for (int i = 0; i < number_of_test_pages; i++) { - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(isolate); SimulateFullSpace(old_pointer_space); factory->NewFixedArray(1, TENURED); } @@ -2766,7 +2766,7 @@ TEST(Regress2237) { // Generate a sliced string that is based on the above parent and // lives in old-space. SimulateFullSpace(CcTest::heap()->new_space()); - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(isolate); Handle t = factory->NewProperSubString(s, 5, 35); CHECK(t->IsSlicedString()); CHECK(!CcTest::heap()->InNewSpace(*t)); @@ -3359,7 +3359,7 @@ TEST(Regress169928) { // This should crash with a protection violation if we are running a build // with the bug. - AlwaysAllocateScope aa_scope; + AlwaysAllocateScope aa_scope(isolate); v8::Script::Compile(mote_code_string)->Run(); } diff --git a/test/cctest/test-strings.cc b/test/cctest/test-strings.cc index 4b31e614d5..129e6cf3d6 100644 --- a/test/cctest/test-strings.cc +++ b/test/cctest/test-strings.cc @@ -661,7 +661,7 @@ void TestStringCharacterStream(BuildString build, int test_cases) { for (int i = 0; i < test_cases; i++) { printf("%d\n", i); HandleScope inner_scope(isolate); - AlwaysAllocateScope always_allocate; + AlwaysAllocateScope always_allocate(isolate); // Build flat version of cons string. Handle flat_string = build(i, &data); ConsStringStats flat_string_stats; diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp index 6039ce180c..5967bfef88 100644 --- a/tools/gyp/v8.gyp +++ b/tools/gyp/v8.gyp @@ -258,6 +258,7 @@ '../../src/assembler.cc', '../../src/assembler.h', '../../src/assert-scope.h', + '../../src/assert-scope.cc', '../../src/ast.cc', '../../src/ast.h', '../../src/atomicops.h',