Skip to content

Commit

Permalink
deps: backport 8d6a228 from the v8's upstream
Browse files Browse the repository at this point in the history
Original commit message:

    [heap] fix crash during the scavenge of ArrayBuffer
    Scavenger should not attempt to visit ArrayBuffer's storage, it is a
    user-supplied pointer that may have any alignment. Visiting it, may
    result in a crash.

    BUG=
    R=jochen

    Review URL: https://codereview.chromium.org/1406133003

    Cr-Commit-Position: refs/heads/master@{nodejs#31611}

PR-URL: nodejs#4259
Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
  • Loading branch information
indutny authored and Michael Scovetta committed Apr 2, 2016
1 parent 05ad666 commit 2520ea6
Show file tree
Hide file tree
Showing 3 changed files with 97 additions and 36 deletions.
104 changes: 68 additions & 36 deletions deps/v8/src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1876,42 +1876,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
// for pointers to from semispace instead of looking for pointers
// to new space.
DCHECK(!target->IsMap());
Address obj_address = target->address();

// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
MarkBit mark_bit = Marking::MarkBitFrom(target);
record_slots = Marking::IsBlack(mark_bit);
}
#if V8_DOUBLE_FIELDS_UNBOXING
LayoutDescriptorHelper helper(target->map());
bool has_only_tagged_fields = helper.all_fields_tagged();

if (!has_only_tagged_fields) {
for (int offset = 0; offset < size;) {
int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace(
target, obj_address + offset,
obj_address + end_of_region_offset, record_slots,
&Scavenger::ScavengeObject);
}
offset = end_of_region_offset;
}
} else {
#endif
IterateAndMarkPointersToFromSpace(target, obj_address,
obj_address + size, record_slots,
&Scavenger::ScavengeObject);
#if V8_DOUBLE_FIELDS_UNBOXING
}
#endif

IteratePointersToFromSpace(target, size, &Scavenger::ScavengeObject);
}
}

Expand Down Expand Up @@ -4438,6 +4404,72 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
}


void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
ObjectSlotCallback callback) {
Address obj_address = target->address();

// We are not collecting slots on new space objects during mutation
// thus we have to scan for pointers to evacuation candidates when we
// promote objects. But we should not record any slots in non-black
// objects. Grey object's slots would be rescanned.
// White object might not survive until the end of collection
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
MarkBit mark_bit = Marking::MarkBitFrom(target);
record_slots = Marking::IsBlack(mark_bit);
}

// Do not scavenge JSArrayBuffer's contents
switch (target->ContentType()) {
case HeapObjectContents::kTaggedValues: {
IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
record_slots, callback);
break;
}
case HeapObjectContents::kMixedValues: {
if (target->IsFixedTypedArrayBase()) {
IterateAndMarkPointersToFromSpace(
target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
callback);
} else if (target->IsBytecodeArray()) {
IterateAndMarkPointersToFromSpace(
target, obj_address + BytecodeArray::kConstantPoolOffset,
obj_address + BytecodeArray::kHeaderSize, record_slots, callback);
} else if (target->IsJSArrayBuffer()) {
IterateAndMarkPointersToFromSpace(
target, obj_address,
obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
record_slots, callback);
IterateAndMarkPointersToFromSpace(
target, obj_address + JSArrayBuffer::kSize, obj_address + size,
record_slots, callback);
#if V8_DOUBLE_FIELDS_UNBOXING
} else if (FLAG_unbox_double_fields) {
LayoutDescriptorHelper helper(target->map());
DCHECK(!helper.all_fields_tagged());

for (int offset = 0; offset < size;) {
int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace(
target, obj_address + offset,
obj_address + end_of_region_offset, record_slots, callback);
}
offset = end_of_region_offset;
}
#endif
}
break;
}
case HeapObjectContents::kRawValues: {
break;
}
}
}


void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
IterateStrongRoots(v, mode);
IterateWeakRoots(v, mode);
Expand Down
3 changes: 3 additions & 0 deletions deps/v8/src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -1237,6 +1237,9 @@ class Heap {

// Iterate pointers to from semispace of new space found in memory interval
// from start to end within |object|.
void IteratePointersToFromSpace(HeapObject* target, int size,
ObjectSlotCallback callback);

void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
Address end, bool record_slots,
ObjectSlotCallback callback);
Expand Down
26 changes: 26 additions & 0 deletions deps/v8/test/cctest/test-api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14191,6 +14191,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
}


THREADED_TEST(SkipArrayBufferDuringScavenge) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
v8::HandleScope handle_scope(isolate);

// Make sure the pointer looks like a heap object
Local<v8::Object> tmp = v8::Object::New(isolate);
uint8_t* store_ptr =
reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));

// Make `store_ptr` point to from space
CcTest::heap()->CollectGarbage(i::NEW_SPACE);

// Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);

// Should not crash,
// i.e. backing store pointer should not be treated as a heap object pointer
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now

// Use `ab` to silence compiler warning
CHECK_EQ(ab->GetContents().Data(), store_ptr);
}


THREADED_TEST(SharedUint8Array) {
i::FLAG_harmony_sharedarraybuffer = true;
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::FixedUint8Array,
Expand Down

0 comments on commit 2520ea6

Please sign in to comment.