diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h index fdb1d7345b4d79..39110f6d58e17a 100644 --- a/deps/v8/src/heap/heap-inl.h +++ b/deps/v8/src/heap/heap-inl.h @@ -393,12 +393,35 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { return false; } - void Heap::CopyBlock(Address dst, Address src, int byte_size) { CopyWords(reinterpret_cast(dst), reinterpret_cast(src), static_cast(byte_size / kPointerSize)); } +bool Heap::PurgeLeftTrimmedObject(Object** object) { + HeapObject* current = reinterpret_cast(*object); + const MapWord map_word = current->map_word(); + if (current->IsFiller() && !map_word.IsForwardingAddress()) { +#ifdef DEBUG + // We need to find a FixedArrayBase map after walking the fillers. + while (current->IsFiller()) { + Address next = reinterpret_cast
(current); + if (current->map() == one_pointer_filler_map()) { + next += kPointerSize; + } else if (current->map() == two_pointer_filler_map()) { + next += 2 * kPointerSize; + } else { + next += current->Size(); + } + current = reinterpret_cast(next); + } + DCHECK(current->IsFixedArrayBase()); +#endif // DEBUG + *object = nullptr; + return true; + } + return false; +} void Heap::MoveBlock(Address dst, Address src, int byte_size) { DCHECK(IsAligned(byte_size, kPointerSize)); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 0afac311c4816e..529050c8bf51ce 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -590,6 +590,12 @@ class Heap { // jslimit_/real_jslimit_ variable in the StackGuard. void SetStackLimits(); + // We cannot avoid stale handles to left-trimmed objects, but can only make + // sure all handles still needed are updated. Filter out a stale pointer + // and clear the slot to allow post processing of handles (needed because + // the sweeper might actually free the underlying page). + inline bool PurgeLeftTrimmedObject(Object** object); + // Notifies the heap that is ok to start marking or other activities that // should not happen during deserialization. void NotifyDeserializationComplete(); diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc index e39ff83e9f381e..3a71578f713632 100644 --- a/deps/v8/src/heap/mark-compact.cc +++ b/deps/v8/src/heap/mark-compact.cc @@ -1650,31 +1650,7 @@ class RootMarkingVisitor : public ObjectVisitor { HeapObject* object = ShortCircuitConsString(p); - // We cannot avoid stale handles to left-trimmed objects, but can only make - // sure all handles still needed are updated. Filter out any stale pointers - // and clear the slot to allow post processing of handles (needed because - // the sweeper might actually free the underlying page). - if (object->IsFiller()) { -#ifdef DEBUG - // We need to find a FixedArrayBase map after walking the fillers. - Heap* heap = collector_->heap(); - HeapObject* current = object; - while (current->IsFiller()) { - Address next = reinterpret_cast
(current); - if (current->map() == heap->one_pointer_filler_map()) { - next += kPointerSize; - } else if (current->map() == heap->two_pointer_filler_map()) { - next += 2 * kPointerSize; - } else { - next += current->Size(); - } - current = reinterpret_cast(next); - } - DCHECK(current->IsFixedArrayBase()); -#endif // DEBUG - *p = nullptr; - return; - } + if (collector_->heap()->PurgeLeftTrimmedObject(p)) return; MarkBit mark_bit = Marking::MarkBitFrom(object); if (Marking::IsBlackOrGrey(mark_bit)) return; diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h index 3caf52bff4b086..3e91c2bac97051 100644 --- a/deps/v8/src/objects-inl.h +++ b/deps/v8/src/objects-inl.h @@ -1351,7 +1351,7 @@ Map* MapWord::ToMap() { } -bool MapWord::IsForwardingAddress() { +bool MapWord::IsForwardingAddress() const { return HAS_SMI_TAG(reinterpret_cast(value_)); } diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h index 1c5743eb838918..5481b1834de762 100644 --- a/deps/v8/src/objects.h +++ b/deps/v8/src/objects.h @@ -1382,7 +1382,7 @@ class MapWord BASE_EMBEDDED { // True if this map word is a forwarding address for a scavenge // collection. Only valid during a scavenge collection (specifically, // when all map words are heap object pointers, i.e. not during a full GC). - inline bool IsForwardingAddress(); + inline bool IsForwardingAddress() const; // Create a map word from a forwarding address. static inline MapWord FromForwardingAddress(HeapObject* object); diff --git a/deps/v8/test/mjsunit/regress/regress-621869.js b/deps/v8/test/mjsunit/regress/regress-621869.js new file mode 100644 index 00000000000000..ee1b58b0266032 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-621869.js @@ -0,0 +1,18 @@ +// Copyright 2016 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --expose-gc + +var o0 = []; +var o1 = []; +var cnt = 0; +var only_scavenge = true; +o1.__defineGetter__(0, function() { + if (cnt++ > 2) return; + o0.shift(); + gc(only_scavenge); + o0.push((64)); + o0.concat(o1); +}); +o1[0]; \ No newline at end of file