c42f5829a1
In the shell sample don't print the result of executing a script, only evaluating expressions. Fixed issue when building samples on Windows using a shared V8 library. Added visibility option on Linux build which makes the generated library 18% smaller. Changed build system to accept multiple build modes in one build and generate seperate objects, libraries and executables for each mode. Removed deferred negation optimization (a * -b => -(a * b)) since this visibly changes operand conversion order. Improved parsing performance by introducing stack guard in preparsing. Without a stack guard preparsing always bails out with stack overflow. git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
316 lines
10 KiB
C++
316 lines
10 KiB
C++
// Copyright 2006-2008 Google Inc. All Rights Reserved.
|
|
// Redistribution and use in source and binary forms, with or without
|
|
// modification, are permitted provided that the following conditions are
|
|
// met:
|
|
//
|
|
// * Redistributions of source code must retain the above copyright
|
|
// notice, this list of conditions and the following disclaimer.
|
|
// * Redistributions in binary form must reproduce the above
|
|
// copyright notice, this list of conditions and the following
|
|
// disclaimer in the documentation and/or other materials provided
|
|
// with the distribution.
|
|
// * Neither the name of Google Inc. nor the names of its
|
|
// contributors may be used to endorse or promote products derived
|
|
// from this software without specific prior written permission.
|
|
//
|
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include "v8.h"
|
|
|
|
#include "global-handles.h"
|
|
#include "snapshot.h"
|
|
#include "top.h"
|
|
#include "cctest.h"
|
|
|
|
namespace v8 { namespace internal {
|
|
DECLARE_bool(gc_global);
|
|
DECLARE_bool(always_compact);
|
|
} }
|
|
|
|
using namespace v8::internal;
|
|
|
|
static v8::Persistent<v8::Context> env;
|
|
|
|
static void InitializeVM() {
|
|
if (env.IsEmpty()) env = v8::Context::New();
|
|
v8::HandleScope scope;
|
|
env->Enter();
|
|
}
|
|
|
|
|
|
TEST(MarkingStack) {
|
|
int mem_size = 20 * kPointerSize;
|
|
byte* mem = NewArray<byte>(20*kPointerSize);
|
|
Address low = reinterpret_cast<Address>(mem);
|
|
Address high = low + mem_size;
|
|
MarkingStack s;
|
|
s.Initialize(low, high);
|
|
|
|
Address address = NULL;
|
|
while (!s.is_full()) {
|
|
s.Push(HeapObject::FromAddress(address));
|
|
address += kPointerSize;
|
|
}
|
|
|
|
while (!s.is_empty()) {
|
|
Address value = s.Pop()->address();
|
|
address -= kPointerSize;
|
|
CHECK_EQ(address, value);
|
|
}
|
|
|
|
CHECK_EQ(NULL, address);
|
|
DeleteArray(mem);
|
|
}
|
|
|
|
|
|
TEST(Promotion) {
|
|
// Test the situation that some objects in new space are promoted to the
|
|
// old space
|
|
|
|
// Ensure that we get a compacting collection so that objects are promoted
|
|
// from new space.
|
|
FLAG_gc_global = true;
|
|
FLAG_always_compact = true;
|
|
Snapshot::DisableInternal();
|
|
Heap::ConfigureHeap(2*256*KB, 4*MB);
|
|
|
|
InitializeVM();
|
|
|
|
v8::HandleScope sc;
|
|
|
|
// Allocate a fixed array in the new space.
|
|
int array_size =
|
|
(Heap::MaxHeapObjectSize() - Array::kHeaderSize) / (kPointerSize * 4);
|
|
Object* obj = Heap::AllocateFixedArray(array_size);
|
|
CHECK(!obj->IsFailure());
|
|
|
|
Handle<FixedArray> array(FixedArray::cast(obj));
|
|
|
|
// Array should be in the new space.
|
|
CHECK(Heap::InSpace(*array, NEW_SPACE));
|
|
|
|
// Call the m-c collector, so array becomes an old object.
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
|
|
// Array now sits in the old space
|
|
CHECK(Heap::InSpace(*array, OLD_SPACE));
|
|
}
|
|
|
|
|
|
TEST(NoPromotion) {
|
|
Snapshot::DisableInternal();
|
|
Heap::ConfigureHeap(2*256*KB, 4*MB);
|
|
|
|
// Test the situation that some objects in new space are promoted to
|
|
// the old space
|
|
InitializeVM();
|
|
|
|
v8::HandleScope sc;
|
|
|
|
// Do a mark compact GC to shrink the heap.
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
|
|
// Allocate a big Fixed array in the new space.
|
|
int size = (Heap::MaxHeapObjectSize() - Array::kHeaderSize) / kPointerSize;
|
|
Object* obj = Heap::AllocateFixedArray(size);
|
|
|
|
Handle<FixedArray> array(FixedArray::cast(obj));
|
|
|
|
// Array still stays in the new space.
|
|
CHECK(Heap::InSpace(*array, NEW_SPACE));
|
|
|
|
// Allocate objects in the old space until out of memory.
|
|
FixedArray* host = *array;
|
|
while (true) {
|
|
Object* obj = Heap::AllocateFixedArray(100, TENURED);
|
|
if (obj->IsFailure()) break;
|
|
|
|
host->set(0, obj);
|
|
host = FixedArray::cast(obj);
|
|
}
|
|
|
|
// Call mark compact GC, and it should pass.
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
|
|
// array should not be promoted because the old space is full.
|
|
CHECK(Heap::InSpace(*array, NEW_SPACE));
|
|
}
|
|
|
|
|
|
TEST(MarkCompactCollector) {
|
|
InitializeVM();
|
|
|
|
v8::HandleScope sc;
|
|
// call mark-compact when heap is empty
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
|
|
// keep allocating garbage in new space until it fails
|
|
const int ARRAY_SIZE = 100;
|
|
Object* array;
|
|
do {
|
|
array = Heap::AllocateFixedArray(ARRAY_SIZE);
|
|
} while (!array->IsFailure());
|
|
CHECK(Heap::CollectGarbage(0, NEW_SPACE));
|
|
|
|
array = Heap::AllocateFixedArray(ARRAY_SIZE);
|
|
CHECK(!array->IsFailure());
|
|
|
|
// keep allocating maps until it fails
|
|
Object* mapp;
|
|
do {
|
|
mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
|
} while (!mapp->IsFailure());
|
|
CHECK(Heap::CollectGarbage(0, MAP_SPACE));
|
|
mapp = Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
|
CHECK(!mapp->IsFailure());
|
|
|
|
// allocate a garbage
|
|
String* func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
|
|
SharedFunctionInfo* function_share =
|
|
SharedFunctionInfo::cast(Heap::AllocateSharedFunctionInfo(func_name));
|
|
JSFunction* function =
|
|
JSFunction::cast(Heap::AllocateFunction(*Top::function_map(),
|
|
function_share,
|
|
Heap::undefined_value()));
|
|
Map* initial_map =
|
|
Map::cast(Heap::AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize));
|
|
function->set_initial_map(initial_map);
|
|
Top::context()->global()->SetProperty(func_name, function, NONE);
|
|
|
|
JSObject* obj = JSObject::cast(Heap::AllocateJSObject(function));
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
|
|
func_name = String::cast(Heap::LookupAsciiSymbol("theFunction"));
|
|
CHECK(Top::context()->global()->HasLocalProperty(func_name));
|
|
Object* func_value = Top::context()->global()->GetProperty(func_name);
|
|
CHECK(func_value->IsJSFunction());
|
|
function = JSFunction::cast(func_value);
|
|
|
|
obj = JSObject::cast(Heap::AllocateJSObject(function));
|
|
String* obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
|
|
Top::context()->global()->SetProperty(obj_name, obj, NONE);
|
|
String* prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
|
obj->SetProperty(prop_name, Smi::FromInt(23), NONE);
|
|
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
|
|
obj_name = String::cast(Heap::LookupAsciiSymbol("theObject"));
|
|
CHECK(Top::context()->global()->HasLocalProperty(obj_name));
|
|
CHECK(Top::context()->global()->GetProperty(obj_name)->IsJSObject());
|
|
obj = JSObject::cast(Top::context()->global()->GetProperty(obj_name));
|
|
prop_name = String::cast(Heap::LookupAsciiSymbol("theSlot"));
|
|
CHECK(obj->GetProperty(prop_name) == Smi::FromInt(23));
|
|
}
|
|
|
|
|
|
static int gc_starts = 0;
|
|
static int gc_ends = 0;
|
|
|
|
static void GCPrologueCallbackFunc() {
|
|
CHECK(gc_starts == gc_ends);
|
|
gc_starts++;
|
|
}
|
|
|
|
|
|
static void GCEpilogueCallbackFunc() {
|
|
CHECK(gc_starts == gc_ends + 1);
|
|
gc_ends++;
|
|
}
|
|
|
|
|
|
TEST(GCCallback) {
|
|
InitializeVM();
|
|
|
|
Heap::SetGlobalGCPrologueCallback(&GCPrologueCallbackFunc);
|
|
Heap::SetGlobalGCEpilogueCallback(&GCEpilogueCallbackFunc);
|
|
|
|
// Scavenge does not call GC callback functions.
|
|
Heap::PerformScavenge();
|
|
|
|
CHECK_EQ(0, gc_starts);
|
|
CHECK_EQ(gc_ends, gc_starts);
|
|
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
CHECK_EQ(1, gc_starts);
|
|
CHECK_EQ(gc_ends, gc_starts);
|
|
}
|
|
|
|
|
|
static int NumberOfWeakCalls = 0;
|
|
static void WeakPointerCallback(v8::Persistent<v8::Object> handle, void* id) {
|
|
NumberOfWeakCalls++;
|
|
}
|
|
|
|
TEST(ObjectGroups) {
|
|
InitializeVM();
|
|
|
|
NumberOfWeakCalls = 0;
|
|
v8::HandleScope handle_scope;
|
|
|
|
Handle<Object> g1s1 =
|
|
GlobalHandles::Create(Heap::AllocateFixedArray(1));
|
|
Handle<Object> g1s2 =
|
|
GlobalHandles::Create(Heap::AllocateFixedArray(1));
|
|
GlobalHandles::MakeWeak(g1s1.location(),
|
|
reinterpret_cast<void*>(1234),
|
|
&WeakPointerCallback);
|
|
GlobalHandles::MakeWeak(g1s2.location(),
|
|
reinterpret_cast<void*>(1234),
|
|
&WeakPointerCallback);
|
|
|
|
Handle<Object> g2s1 =
|
|
GlobalHandles::Create(Heap::AllocateFixedArray(1));
|
|
Handle<Object> g2s2 =
|
|
GlobalHandles::Create(Heap::AllocateFixedArray(1));
|
|
GlobalHandles::MakeWeak(g2s1.location(),
|
|
reinterpret_cast<void*>(1234),
|
|
&WeakPointerCallback);
|
|
GlobalHandles::MakeWeak(g2s2.location(),
|
|
reinterpret_cast<void*>(1234),
|
|
&WeakPointerCallback);
|
|
|
|
Handle<Object> root = GlobalHandles::Create(*g1s1); // make a root.
|
|
|
|
// Connect group 1 and 2, make a cycle.
|
|
Handle<FixedArray>::cast(g1s2)->set(0, *g2s2);
|
|
Handle<FixedArray>::cast(g2s1)->set(0, *g1s1);
|
|
|
|
GlobalHandles::AddToGroup(reinterpret_cast<void*>(1), g1s1.location());
|
|
GlobalHandles::AddToGroup(reinterpret_cast<void*>(1), g1s2.location());
|
|
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s1.location());
|
|
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s2.location());
|
|
// Do a full GC
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
|
|
// All object should be alive.
|
|
CHECK_EQ(0, NumberOfWeakCalls);
|
|
|
|
// Weaken the root.
|
|
GlobalHandles::MakeWeak(root.location(),
|
|
reinterpret_cast<void*>(1234),
|
|
&WeakPointerCallback);
|
|
|
|
// Groups are deleted, rebuild groups.
|
|
GlobalHandles::AddToGroup(reinterpret_cast<void*>(1), g1s1.location());
|
|
GlobalHandles::AddToGroup(reinterpret_cast<void*>(1), g1s2.location());
|
|
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s1.location());
|
|
GlobalHandles::AddToGroup(reinterpret_cast<void*>(2), g2s2.location());
|
|
|
|
CHECK(Heap::CollectGarbage(0, OLD_SPACE));
|
|
|
|
// All objects should be gone. 5 global handles in total.
|
|
CHECK_EQ(5, NumberOfWeakCalls);
|
|
}
|