Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion lib/Common/Core/FinalizableObject.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class FinalizableObject : public IRecyclerVisitedObject
Mark(static_cast<Recycler*>(recycler));
}

void Trace(IRecyclerHeapMarkingContext* markingContext) final
void Trace(IRecyclerHeapMarkingContext* markingContext)
{
AssertMsg(false, "Trace called on object that isn't implemented by the host");
}
Expand Down
19 changes: 19 additions & 0 deletions lib/Common/Memory/LargeHeapBlock.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -553,6 +553,16 @@ LargeHeapBlock::AllocFreeListEntry(DECLSPEC_GUARD_OVERFLOW size_t size, ObjectIn
#ifdef RECYCLER_WRITE_BARRIER
header->hasWriteBarrier = (attributes & WithBarrierBit) == WithBarrierBit;
#endif

if ((attributes & (FinalizeBit | TrackBit)) != 0)
{
// Make sure a valid vtable is installed as once the attributes have been set this allocation may be traced by background marking
allocObject = (char *)new (allocObject) DummyVTableObject();
#if defined(_M_ARM32_OR_ARM64)
// On ARM, make sure the v-table write is performed before setting the attributes
MemoryBarrier();
#endif
}
header->SetAttributes(this->heapInfo->recycler->Cookie, (attributes & StoredObjectInfoBitMask));
header->markOnOOMRescan = false;
header->SetNext(this->heapInfo->recycler->Cookie, nullptr);
Expand Down Expand Up @@ -618,6 +628,15 @@ LargeHeapBlock::Alloc(DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attrib
#ifdef RECYCLER_WRITE_BARRIER
header->hasWriteBarrier = (attributes&WithBarrierBit) == WithBarrierBit;
#endif
if ((attributes & (FinalizeBit | TrackBit)) != 0)
{
// Make sure a valid vtable is installed as once the attributes have been set this allocation may be traced by background marking
allocObject = (char *)new (allocObject) DummyVTableObject();
#if defined(_M_ARM32_OR_ARM64)
// On ARM, make sure the v-table write is performed before setting the attributes
MemoryBarrier();
#endif
}
header->SetAttributes(recycler->Cookie, (attributes & StoredObjectInfoBitMask));
HeaderList()[allocCount++] = header;
finalizeCount += ((attributes & FinalizeBit) != 0);
Expand Down
55 changes: 30 additions & 25 deletions lib/Common/Memory/MarkContext.inl
Original file line number Diff line number Diff line change
Expand Up @@ -205,42 +205,47 @@ void MarkContext::ProcessMark()
while (!markStack.IsEmpty() || !preciseStack.IsEmpty())
#endif
{
// It is possible that when the stacks were split, only one of them had any chunks to process.
// If that is the case, one of the stacks might not be initialized, so we must check !IsEmpty before popping.
if (!markStack.IsEmpty())
{
#if defined(_M_IX86) || defined(_M_X64)
MarkCandidate current, next;
MarkCandidate current, next;

while (markStack.Pop(&current))
{
// Process entries and prefetch as we go.
while (markStack.Pop(&next))
while (markStack.Pop(&current))
{
// Prefetch the next entry so it's ready when we need it.
_mm_prefetch((char *)next.obj, _MM_HINT_T0);
// Process entries and prefetch as we go.
while (markStack.Pop(&next))
{
// Prefetch the next entry so it's ready when we need it.
_mm_prefetch((char *)next.obj, _MM_HINT_T0);

// Process the previously retrieved entry.
ScanObject<parallel, interior>(current.obj, current.byteCount);
// Process the previously retrieved entry.
ScanObject<parallel, interior>(current.obj, current.byteCount);

_mm_prefetch((char *)*(next.obj), _MM_HINT_T0);
_mm_prefetch((char *)*(next.obj), _MM_HINT_T0);

current = next;
}
current = next;
}

// The stack is empty, but we still have a previously retrieved entry; process it now.
ScanObject<parallel, interior>(current.obj, current.byteCount);
// The stack is empty, but we still have a previously retrieved entry; process it now.
ScanObject<parallel, interior>(current.obj, current.byteCount);

// Processing that entry may have generated more entries in the mark stack, so continue the loop.
}
// Processing that entry may have generated more entries in the mark stack, so continue the loop.
}
#else
// _mm_prefetch intrinsic is specific to Intel platforms.
// CONSIDER: There does seem to be a compiler intrinsic for prefetch on ARM,
// however, the information on this is scarce, so for now just don't do prefetch on ARM.
MarkCandidate current;
// _mm_prefetch intrinsic is specific to Intel platforms.
// CONSIDER: There does seem to be a compiler intrinsic for prefetch on ARM,
// however, the information on this is scarce, so for now just don't do prefetch on ARM.
MarkCandidate current;

while (markStack.Pop(&current))
{
ScanObject<parallel, interior>(current.obj, current.byteCount);
}
while (markStack.Pop(&current))
{
ScanObject<parallel, interior>(current.obj, current.byteCount);
}
#endif

}

Assert(markStack.IsEmpty());

#ifdef RECYCLER_VISITED_HOST
Expand Down
15 changes: 15 additions & 0 deletions lib/Common/Memory/Recycler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7300,6 +7300,21 @@ Recycler::FillCheckPad(void * address, size_t size, size_t alignedAllocSize, boo
addressToVerify = ((char*) address + size);
sizeToVerify = (alignedAllocSize - size);
}
else
{
// It could be the case that an uninitialized object already has a dummy vtable installed
// at the beginning of the address. If that is the case, we can't verify the fill pattern
// on that memory, since it's already been initialized.
// Note that FillPadNoCheck will skip over the first sizeof(FreeObject) bytes, which
// prevents overwriting of the vtable.
static_assert(sizeof(DummyVTableObject) == sizeof(void*), "Incorrect size for a DummyVTableObject - it must contain a single v-table pointer");
DummyVTableObject dummy;
if ((*(void**)(&dummy)) == *((void**)address))
{
addressToVerify = (char*)address + sizeof(DummyVTableObject);
sizeToVerify = alignedAllocSize - sizeof(DummyVTableObject);
}
}

// Actually this is filling the non-pad to zero
VerifyCheckFill(addressToVerify, sizeToVerify - sizeof(size_t));
Expand Down
14 changes: 4 additions & 10 deletions lib/Common/Memory/Recycler.inl
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,10 @@ namespace Memory
class DummyVTableObject : public FinalizableObject
{
public:
virtual void Finalize(bool isShutdown) {}
virtual void Dispose(bool isShutdown) {}
virtual void Mark(Recycler * recycler) {}
virtual void Finalize(bool isShutdown) final {}
virtual void Dispose(bool isShutdown) final {}
virtual void Mark(Recycler * recycler) final {}
virtual void Trace(IRecyclerHeapMarkingContext* markingContext) final {}
};
}

Expand Down Expand Up @@ -171,13 +172,6 @@ Recycler::AllocWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size)
#endif


#pragma prefast(suppress:6313, "attributes is a template parameter and can be 0")
if (attributes & (FinalizeBit | TrackBit))
{
// Make sure a valid vtable is installed in case of OOM before the real vtable is set
memBlock = (char *)new (memBlock) DummyVTableObject();
}

#ifdef RECYCLER_WRITE_BARRIER
SwbVerboseTrace(this->GetRecyclerFlagsTable(), _u("Allocated SWB memory: 0x%p\n"), memBlock);

Expand Down
27 changes: 24 additions & 3 deletions lib/Common/Memory/SmallHeapBlockAllocator.h
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,15 @@ SmallHeapBlockAllocator<TBlockType>::InlinedAllocImpl(Recycler * recycler, DECLS

if (NeedSetAttributes(attributes))
{
if ((attributes & (FinalizeBit | TrackBit)) != 0)
{
// Make sure a valid vtable is installed as once the attributes have been set this allocation may be traced by background marking
memBlock = (char *)new (memBlock) DummyVTableObject();
#if defined(_M_ARM32_OR_ARM64)
// On ARM, make sure the v-table write is performed before setting the attributes
MemoryBarrier();
#endif
}
heapBlock->SetAttributes(memBlock, (attributes & StoredObjectInfoBitMask));
}

Expand All @@ -138,6 +147,11 @@ SmallHeapBlockAllocator<TBlockType>::InlinedAllocImpl(Recycler * recycler, DECLS
if (memBlock != nullptr && endAddress == nullptr)
{
// Free list allocation
freeObjectList = ((FreeObject *)memBlock)->GetNext();
#ifdef RECYCLER_MEMORY_VERIFY
((FreeObject *)memBlock)->DebugFillNext();
#endif

Assert(!this->IsBumpAllocMode());
if (NeedSetAttributes(attributes))
{
Expand All @@ -149,13 +163,20 @@ SmallHeapBlockAllocator<TBlockType>::InlinedAllocImpl(Recycler * recycler, DECLS
Assert(allocationHeapBlock != nullptr);
Assert(!allocationHeapBlock->IsLargeHeapBlock());
}

if ((attributes & (FinalizeBit | TrackBit)) != 0)
{
// Make sure a valid vtable is installed as once the attributes have been set this allocation may be traced by background marking
memBlock = (char *)new (memBlock) DummyVTableObject();
#if defined(_M_ARM32_OR_ARM64)
// On ARM, make sure the v-table write is performed before setting the attributes
MemoryBarrier();
#endif
}
allocationHeapBlock->SetAttributes(memBlock, (attributes & StoredObjectInfoBitMask));
}
freeObjectList = ((FreeObject *)memBlock)->GetNext();

#ifdef RECYCLER_MEMORY_VERIFY
((FreeObject *)memBlock)->DebugFillNext();

if (this->IsExplicitFreeObjectListAllocMode())
{
HeapBlock* heapBlock = recycler->FindHeapBlock(memBlock);
Expand Down