Remove heap protection support.

It does not currently work and when it did work we never got it fast
enough to be useful.

R=kmillikin@chromium.org

Review URL: http://codereview.chromium.org/7324051

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8601 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
ager@chromium.org 2011-07-11 14:03:21 +00:00
parent 6cb055ae5d
commit 15429da469
17 changed files with 0 additions and 345 deletions

View File

@ -490,19 +490,6 @@ DEFINE_bool(sliding_state_window, false,
DEFINE_string(logfile, "v8.log", "Specify the name of the log file.")
DEFINE_bool(ll_prof, false, "Enable low-level linux profiler.")
//
// Heap protection flags
// Using heap protection requires ENABLE_LOGGING_AND_PROFILING as well.
//
#ifdef ENABLE_HEAP_PROTECTION
#undef FLAG
#define FLAG FLAG_FULL
DEFINE_bool(protect_heap, false,
"Protect/unprotect V8's heap when leaving/entring the VM.")
#endif
//
// Disassembler only flags
//

View File

@ -543,11 +543,6 @@ Handle<Object> SetAccessor(Handle<JSObject> obj, Handle<AccessorInfo> info) {
// associated with the wrapper and get rid of both the wrapper and the
// handle.
static void ClearWrapperCache(Persistent<v8::Value> handle, void*) {
#ifdef ENABLE_HEAP_PROTECTION
// Weak reference callbacks are called as if from outside V8. We
// need to reeenter to unprotect the heap.
VMState state(OTHER);
#endif
Handle<Object> cache = Utils::OpenHandle(*handle);
JSValue* wrapper = JSValue::cast(*cache);
Foreign* foreign = Script::cast(wrapper->value())->wrapper();

View File

@ -5213,28 +5213,6 @@ void Heap::Shrink() {
}
#ifdef ENABLE_HEAP_PROTECTION
void Heap::Protect() {
if (HasBeenSetup()) {
AllSpaces spaces;
for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Protect();
}
}
void Heap::Unprotect() {
if (HasBeenSetup()) {
AllSpaces spaces;
for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Unprotect();
}
}
#endif
void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) {
ASSERT(callback != NULL);
GCPrologueCallbackPair pair(callback, gc_type);

View File

@ -409,12 +409,6 @@ class Heap {
// Uncommit unused semi space.
bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the heap by marking all spaces read-only/writable.
void Protect();
void Unprotect();
#endif
// Allocates and initializes a new JavaScript object based on a
// constructor.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation

View File

@ -166,23 +166,6 @@ void OS::Free(void* address, const size_t size) {
}
#ifdef ENABLE_HEAP_PROTECTION
void OS::Protect(void* address, size_t size) {
// TODO(1240712): mprotect has a return value which is ignored here.
mprotect(address, size, PROT_READ);
}
void OS::Unprotect(void* address, size_t size, bool is_executable) {
// TODO(1240712): mprotect has a return value which is ignored here.
int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
mprotect(address, size, prot);
}
#endif
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);

View File

@ -181,20 +181,6 @@ void OS::Free(void* buf, const size_t length) {
}
#ifdef ENABLE_HEAP_PROTECTION
void OS::Protect(void* address, size_t size) {
UNIMPLEMENTED();
}
void OS::Unprotect(void* address, size_t size, bool is_executable) {
UNIMPLEMENTED();
}
#endif
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);

View File

@ -390,23 +390,6 @@ void OS::Free(void* address, const size_t size) {
}
#ifdef ENABLE_HEAP_PROTECTION
void OS::Protect(void* address, size_t size) {
// TODO(1240712): mprotect has a return value which is ignored here.
mprotect(address, size, PROT_READ);
}
void OS::Unprotect(void* address, size_t size, bool is_executable) {
// TODO(1240712): mprotect has a return value which is ignored here.
int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
mprotect(address, size, prot);
}
#endif
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);

View File

@ -169,20 +169,6 @@ void OS::Free(void* address, const size_t size) {
}
#ifdef ENABLE_HEAP_PROTECTION
void OS::Protect(void* address, size_t size) {
UNIMPLEMENTED();
}
void OS::Unprotect(void* address, size_t size, bool is_executable) {
UNIMPLEMENTED();
}
#endif
void OS::Sleep(int milliseconds) {
usleep(1000 * milliseconds);
}

View File

@ -217,20 +217,6 @@ void OS::Free(void* buf, const size_t length) {
}
#ifdef ENABLE_HEAP_PROTECTION
void OS::Protect(void* address, size_t size) {
UNIMPLEMENTED();
}
void OS::Unprotect(void* address, size_t size, bool is_executable) {
UNIMPLEMENTED();
}
#endif
void OS::Sleep(int milliseconds) {
UNIMPLEMENTED();
}

View File

@ -179,20 +179,6 @@ void OS::Free(void* buf, const size_t length) {
}
#ifdef ENABLE_HEAP_PROTECTION
void OS::Protect(void* address, size_t size) {
UNIMPLEMENTED();
}
void OS::Unprotect(void* address, size_t size, bool is_executable) {
UNIMPLEMENTED();
}
#endif
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);

View File

@ -192,23 +192,6 @@ void OS::Free(void* address, const size_t size) {
}
#ifdef ENABLE_HEAP_PROTECTION
void OS::Protect(void* address, size_t size) {
// TODO(1240712): mprotect has a return value which is ignored here.
mprotect(address, size, PROT_READ);
}
void OS::Unprotect(void* address, size_t size, bool is_executable) {
// TODO(1240712): mprotect has a return value which is ignored here.
int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
mprotect(address, size, prot);
}
#endif
void OS::Sleep(int milliseconds) {
useconds_t ms = static_cast<useconds_t>(milliseconds);
usleep(1000 * ms);

View File

@ -939,25 +939,6 @@ void OS::Free(void* address, const size_t size) {
}
#ifdef ENABLE_HEAP_PROTECTION
void OS::Protect(void* address, size_t size) {
// TODO(1240712): VirtualProtect has a return value which is ignored here.
DWORD old_protect;
VirtualProtect(address, size, PAGE_READONLY, &old_protect);
}
void OS::Unprotect(void* address, size_t size, bool is_executable) {
// TODO(1240712): VirtualProtect has a return value which is ignored here.
DWORD new_protect = is_executable ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
DWORD old_protect;
VirtualProtect(address, size, new_protect, &old_protect);
}
#endif
void OS::Sleep(int milliseconds) {
::Sleep(milliseconds);
}

View File

@ -206,12 +206,6 @@ class OS {
// Get the Alignment guaranteed by Allocate().
static size_t AllocateAlignment();
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect a block of memory by marking it read-only/writable.
static void Protect(void* address, size_t size);
static void Unprotect(void* address, size_t size, bool is_executable);
#endif
// Returns an indication of whether a pointer is in a space that
// has been allocated by Allocate(). This method may conservatively
// always return false, but giving more accurate information may

View File

@ -378,35 +378,6 @@ bool MemoryAllocator::InInitialChunk(Address address) {
}
#ifdef ENABLE_HEAP_PROTECTION
void MemoryAllocator::Protect(Address start, size_t size) {
OS::Protect(start, size);
}
void MemoryAllocator::Unprotect(Address start,
size_t size,
Executability executable) {
OS::Unprotect(start, size, executable);
}
void MemoryAllocator::ProtectChunkFromPage(Page* page) {
int id = GetChunkId(page);
OS::Protect(chunks_[id].address(), chunks_[id].size());
}
void MemoryAllocator::UnprotectChunkFromPage(Page* page) {
int id = GetChunkId(page);
OS::Unprotect(chunks_[id].address(), chunks_[id].size(),
chunks_[id].owner()->executable() == EXECUTABLE);
}
#endif
// --------------------------------------------------------------------------
// PagedSpace

View File

@ -868,30 +868,6 @@ void PagedSpace::TearDown() {
}
#ifdef ENABLE_HEAP_PROTECTION
void PagedSpace::Protect() {
Page* page = first_page_;
while (page->is_valid()) {
Isolate::Current()->memory_allocator()->ProtectChunkFromPage(page);
page = Isolate::Current()->memory_allocator()->
FindLastPageInSameChunk(page)->next_page();
}
}
void PagedSpace::Unprotect() {
Page* page = first_page_;
while (page->is_valid()) {
Isolate::Current()->memory_allocator()->UnprotectChunkFromPage(page);
page = Isolate::Current()->memory_allocator()->
FindLastPageInSameChunk(page)->next_page();
}
}
#endif
void PagedSpace::MarkAllPagesClean() {
PageIterator it(this, PageIterator::ALL_PAGES);
while (it.has_next()) {
@ -1258,24 +1234,6 @@ void NewSpace::TearDown() {
}
#ifdef ENABLE_HEAP_PROTECTION
void NewSpace::Protect() {
heap()->isolate()->memory_allocator()->Protect(ToSpaceLow(), Capacity());
heap()->isolate()->memory_allocator()->Protect(FromSpaceLow(), Capacity());
}
void NewSpace::Unprotect() {
heap()->isolate()->memory_allocator()->Unprotect(ToSpaceLow(), Capacity(),
to_space_.executable());
heap()->isolate()->memory_allocator()->Unprotect(FromSpaceLow(), Capacity(),
from_space_.executable());
}
#endif
void NewSpace::Flip() {
SemiSpace tmp = from_space_;
from_space_ = to_space_;
@ -2809,31 +2767,6 @@ void LargeObjectSpace::TearDown() {
}
#ifdef ENABLE_HEAP_PROTECTION
void LargeObjectSpace::Protect() {
LargeObjectChunk* chunk = first_chunk_;
while (chunk != NULL) {
heap()->isolate()->memory_allocator()->Protect(chunk->address(),
chunk->size());
chunk = chunk->next();
}
}
void LargeObjectSpace::Unprotect() {
LargeObjectChunk* chunk = first_chunk_;
while (chunk != NULL) {
bool is_code = chunk->GetObject()->IsCode();
heap()->isolate()->memory_allocator()->Unprotect(chunk->address(),
chunk->size(), is_code ? EXECUTABLE : NOT_EXECUTABLE);
chunk = chunk->next();
}
}
#endif
MaybeObject* LargeObjectSpace::AllocateRawInternal(int requested_size,
int object_size,
Executability executable) {

View File

@ -380,12 +380,6 @@ class Space : public Malloced {
// (e.g. see LargeObjectSpace).
virtual intptr_t SizeOfObjects() { return Size(); }
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the space by marking it read-only/writable.
virtual void Protect() = 0;
virtual void Unprotect() = 0;
#endif
#ifdef DEBUG
virtual void Print() = 0;
#endif
@ -641,17 +635,6 @@ class MemoryAllocator {
Page** last_page,
Page** last_page_in_use);
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect a block of memory by marking it read-only/writable.
inline void Protect(Address start, size_t size);
inline void Unprotect(Address start, size_t size,
Executability executable);
// Protect/unprotect a chunk given a page in the chunk.
inline void ProtectChunkFromPage(Page* page);
inline void UnprotectChunkFromPage(Page* page);
#endif
#ifdef DEBUG
// Reports statistic info of the space.
void ReportStatistics();
@ -1157,12 +1140,6 @@ class PagedSpace : public Space {
// Ensures that the capacity is at least 'capacity'. Returns false on failure.
bool EnsureCapacity(int capacity);
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the space by marking it read-only/writable.
void Protect();
void Unprotect();
#endif
#ifdef DEBUG
// Print meta info and objects in this space.
virtual void Print();
@ -1392,12 +1369,6 @@ class SemiSpace : public Space {
bool Commit();
bool Uncommit();
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the space by marking it read-only/writable.
virtual void Protect() {}
virtual void Unprotect() {}
#endif
#ifdef DEBUG
virtual void Print();
virtual void Verify();
@ -1628,12 +1599,6 @@ class NewSpace : public Space {
template <typename StringType>
inline void ShrinkStringAtAllocationBoundary(String* string, int len);
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the space by marking it read-only/writable.
virtual void Protect();
virtual void Unprotect();
#endif
#ifdef DEBUG
// Verify the active semispace.
virtual void Verify();
@ -2296,12 +2261,6 @@ class LargeObjectSpace : public Space {
// may use some memory, leaving less for large objects.
virtual bool ReserveSpace(int bytes);
#ifdef ENABLE_HEAP_PROTECTION
// Protect/unprotect the space by marking it read-only/writable.
void Protect();
void Unprotect();
#endif
#ifdef DEBUG
virtual void Verify();
virtual void Print();

View File

@ -69,19 +69,6 @@ VMState::VMState(Isolate* isolate, StateTag tag)
#endif
isolate_->SetCurrentVMState(tag);
#ifdef ENABLE_HEAP_PROTECTION
if (FLAG_protect_heap) {
if (tag == EXTERNAL) {
// We are leaving V8.
ASSERT(previous_tag_ != EXTERNAL);
isolate_->heap()->Protect();
} else if (previous_tag_ = EXTERNAL) {
// We are entering V8.
isolate_->heap()->Unprotect();
}
}
#endif
}
@ -96,24 +83,7 @@ VMState::~VMState() {
}
#endif // ENABLE_LOGGING_AND_PROFILING
#ifdef ENABLE_HEAP_PROTECTION
StateTag tag = isolate_->current_vm_state();
#endif
isolate_->SetCurrentVMState(previous_tag_);
#ifdef ENABLE_HEAP_PROTECTION
if (FLAG_protect_heap) {
if (tag == EXTERNAL) {
// We are reentering V8.
ASSERT(previous_tag_ != EXTERNAL);
isolate_->heap()->Unprotect();
} else if (previous_tag_ == EXTERNAL) {
// We are leaving V8.
isolate_->heap()->Protect();
}
}
#endif // ENABLE_HEAP_PROTECTION
}
#endif // ENABLE_VMSTATE_TRACKING