diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 48ef1ac47e..fd4cb2ddf0 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -704,6 +704,7 @@ DEFINE_IMPLICATION(trace_detached_contexts, track_detached_contexts)
 #ifdef VERIFY_HEAP
 DEFINE_BOOL(verify_heap, false, "verify heap pointers before and after GC")
 #endif
+DEFINE_BOOL(move_object_start, false, "enable moving of object starts")
 DEFINE_BOOL(memory_reducer, true, "use memory reducer")
 DEFINE_BOOL(scavenge_reclaim_unmodified_objects, false,
             "remove unmodified and unreferenced objects")
diff --git a/src/heap/heap.cc b/src/heap/heap.cc
index 674b03b6da..0bd763bc76 100644
--- a/src/heap/heap.cc
+++ b/src/heap/heap.cc
@@ -3082,6 +3082,24 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
 }
 
 
+bool Heap::CanMoveObjectStart(HeapObject* object) {
+  if (!FLAG_move_object_start) return false;
+
+  Address address = object->address();
+
+  if (lo_space()->Contains(object)) return false;
+
+  Page* page = Page::FromAddress(address);
+  // We can move the object start if:
+  // (1) the object is not in old space,
+  // (2) the page of the object was already swept,
+  // (3) the page was already concurrently swept. This case is an optimization
+  // for concurrent sweeping. The WasSwept predicate for concurrently swept
+  // pages is set after sweeping all pages.
+  return !InOldSpace(address) || page->WasSwept() || page->SweepingCompleted();
+}
+
+
 void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) {
   if (incremental_marking()->IsMarking() &&
       Marking::IsBlack(Marking::MarkBitFrom(object->address()))) {
@@ -3094,6 +3112,55 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) {
 }
 
 
+FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
+                                         int elements_to_trim) {
+  DCHECK(!object->IsFixedTypedArrayBase());
+  const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize;
+  const int bytes_to_trim = elements_to_trim * element_size;
+  Map* map = object->map();
+
+  // For now this trick is only applied to objects in new and paged space.
+  // In large object space the object's start must coincide with chunk
+  // and thus the trick is just not applicable.
+  DCHECK(!lo_space()->Contains(object));
+  DCHECK(object->map() != fixed_cow_array_map());
+
+  STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
+  STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
+  STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
+
+  const int len = object->length();
+  DCHECK(elements_to_trim <= len);
+
+  // Calculate location of new array start.
+  Address new_start = object->address() + bytes_to_trim;
+
+  // Technically in new space this write might be omitted (except for
+  // debug mode which iterates through the heap), but to play safer
+  // we still do it.
+  CreateFillerObjectAt(object->address(), bytes_to_trim);
+
+  // Initialize header of the trimmed array. Since left trimming is only
+  // performed on pages which are not concurrently swept creating a filler
+  // object does not require synchronization.
+  DCHECK(CanMoveObjectStart(object));
+  Object** former_start = HeapObject::RawField(object, 0);
+  int new_start_index = elements_to_trim * (element_size / kPointerSize);
+  former_start[new_start_index] = map;
+  former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim);
+  FixedArrayBase* new_object =
+      FixedArrayBase::cast(HeapObject::FromAddress(new_start));
+
+  // Maintain consistency of live bytes during incremental marking
+  Marking::TransferMark(this, object->address(), new_start);
+  AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER);
+
+  // Notify the heap profiler of change in object layout.
+  OnMoveEvent(new_object, object, new_object->Size());
+  return new_object;
+}
+
+
 // Force instantiation of templatized method.
 template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(
     FixedArrayBase*, int);
diff --git a/src/heap/heap.h b/src/heap/heap.h
index 3919bdcc2c..0e5586e95e 100644
--- a/src/heap/heap.h
+++ b/src/heap/heap.h
@@ -835,9 +835,15 @@ class Heap {
   // when introducing gaps within pages.
   void CreateFillerObjectAt(Address addr, int size);
 
+  bool CanMoveObjectStart(HeapObject* object);
+
   // Maintain consistency of live bytes during incremental marking.
   void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
 
+  // Trim the given array from the left. Note that this relocates the object
+  // start and hence is only valid if there is only a single reference to it.
+  FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
+
   // Trim the given array from the right.
   template<Heap::InvocationMode mode>
   void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
index 81fc924050..8cd0c699d3 100644
--- a/src/heap/mark-compact.cc
+++ b/src/heap/mark-compact.cc
@@ -599,6 +599,43 @@ bool MarkCompactCollector::IsSweepingCompleted() {
 }
 
 
+void Marking::TransferMark(Heap* heap, Address old_start, Address new_start) {
+  // This is only used when resizing an object.
+  DCHECK(MemoryChunk::FromAddress(old_start) ==
+         MemoryChunk::FromAddress(new_start));
+
+  if (!heap->incremental_marking()->IsMarking()) return;
+
+  // If the mark doesn't move, we don't check the color of the object.
+  // It doesn't matter whether the object is black, since it hasn't changed
+  // size, so the adjustment to the live data count will be zero anyway.
+  if (old_start == new_start) return;
+
+  MarkBit new_mark_bit = MarkBitFrom(new_start);
+  MarkBit old_mark_bit = MarkBitFrom(old_start);
+
+#ifdef DEBUG
+  ObjectColor old_color = Color(old_mark_bit);
+#endif
+
+  if (Marking::IsBlack(old_mark_bit)) {
+    Marking::BlackToWhite(old_mark_bit);
+    Marking::MarkBlack(new_mark_bit);
+    return;
+  } else if (Marking::IsGrey(old_mark_bit)) {
+    Marking::GreyToWhite(old_mark_bit);
+    heap->incremental_marking()->WhiteToGreyAndPush(
+        HeapObject::FromAddress(new_start), new_mark_bit);
+    heap->incremental_marking()->RestartIfNotMarking();
+  }
+
+#ifdef DEBUG
+  ObjectColor new_color = Color(new_mark_bit);
+  DCHECK(new_color == old_color);
+#endif
+}
+
+
 const char* AllocationSpaceName(AllocationSpace space) {
   switch (space) {
     case NEW_SPACE:
diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h
index 322e8b8386..1ade4372f6 100644
--- a/src/heap/mark-compact.h
+++ b/src/heap/mark-compact.h
@@ -83,6 +83,12 @@ class Marking : public AllStatic {
     markbit.Clear();
   }
 
+  INLINE(static void GreyToWhite(MarkBit markbit)) {
+    DCHECK(IsGrey(markbit));
+    markbit.Clear();
+    markbit.Next().Clear();
+  }
+
   INLINE(static void BlackToGrey(MarkBit markbit)) {
     DCHECK(IsBlack(markbit));
     markbit.Next().Set();
@@ -113,6 +119,8 @@ class Marking : public AllStatic {
     markbit.Next().Set();
   }
 
+  static void TransferMark(Heap* heap, Address old_start, Address new_start);
+
 #ifdef DEBUG
   enum ObjectColor {
     BLACK_OBJECT,