Skip to content
This repository was archived by the owner on Dec 14, 2018. It is now read-only.

Add option to MemoryCacheOptions to specify maximum cache entries count #327

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public enum EvictionReason
TokenExpired,

/// <summary>
/// GC, overflow
/// Overflow
/// </summary>
Capacity,
}
Expand Down
39 changes: 38 additions & 1 deletion src/Microsoft.Extensions.Caching.Memory/MemoryCache.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ namespace Microsoft.Extensions.Caching.Memory
public class MemoryCache : IMemoryCache
{
private readonly ConcurrentDictionary<object, CacheEntry> _entries;
private readonly SemaphoreSlim _compactionSemaphore = new SemaphoreSlim(initialCount: 1, maxCount: 1);
private bool _disposed;

// We store the delegates locally to prevent allocations
Expand All @@ -27,6 +28,7 @@ public class MemoryCache : IMemoryCache
private readonly Action<CacheEntry> _entryExpirationNotification;

private readonly ISystemClock _clock;
private readonly int? _entryCountLimit;

private TimeSpan _expirationScanFrequency;
private DateTimeOffset _lastExpirationScan;
Expand All @@ -49,6 +51,7 @@ public MemoryCache(IOptions<MemoryCacheOptions> optionsAccessor)

_clock = options.Clock ?? new SystemClock();
_expirationScanFrequency = options.ExpirationScanFrequency;
_entryCountLimit = options.EntryCountLimit;
_lastExpirationScan = _clock.UtcNow;
}

Expand Down Expand Up @@ -147,7 +150,18 @@ private void SetEntry(CacheEntry entry)

if (entryAdded)
{
entry.AttachTokens();
// Remove the entry and compact if the given maximum number of cache entries is exceeded
if (_entries.Count > _entryCountLimit)
{
entry.SetExpired(EvictionReason.Capacity);
RemoveEntry(entry);

TriggerOvercapacityCompaction();
}
else
{
entry.AttachTokens();
}
}
else
{
Expand Down Expand Up @@ -272,6 +286,29 @@ private static void ScanForExpiredItems(MemoryCache cache)
}
}

private void TriggerOvercapacityCompaction()
{
if (!_compactionSemaphore.Wait(0))
{
// Another compaction is running, exit immediately.
// Avoid overpurging when multiple overcapacity compactions are triggered concurrently.
return;
}

Task.Run(() =>
{
try
{
// Compact 10%
Compact(0.10);
}
finally
{
_compactionSemaphore.Release();
}
});
}

/// Remove at least the given percentage (0.10 for 10%) of the total entries (or estimated memory?), according to the following policy:
/// 1. Remove all expired items.
/// 2. Bucket by CacheItemPriority.
Expand Down
2 changes: 2 additions & 0 deletions src/Microsoft.Extensions.Caching.Memory/MemoryCacheOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ public class MemoryCacheOptions : IOptions<MemoryCacheOptions>

public TimeSpan ExpirationScanFrequency { get; set; } = TimeSpan.FromMinutes(1);

public int? EntryCountLimit { get; set; }
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

doc comment


MemoryCacheOptions IOptions<MemoryCacheOptions>.Value
{
get { return this; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,42 @@ public static IServiceCollection AddDistributedMemoryCache(this IServiceCollecti
throw new ArgumentNullException(nameof(services));
}

services.TryAddSingleton<IDistributedCache>(new MemoryDistributedCache(new MemoryCache(new MemoryCacheOptions())));
return services.AddDistributedMemoryCache(_ => { });
}

/// <summary>
/// Adds a default implementation of <see cref="IDistributedCache"/> that stores items in memory
/// to the <see cref="IServiceCollection" />. Frameworks that require a distributed cache to work
/// can safely add this dependency as part of their dependency list to ensure that there is at least
/// one implementation available.
/// </summary>
/// <remarks>
/// <see cref="AddDistributedMemoryCache(IServiceCollection)"/> should only be used in single
/// server scenarios as this cache stores items in memory and doesn't expand across multiple machines.
/// For those scenarios it is recommended to use a proper distributed cache that can expand across
/// multiple machines.
/// </remarks>
/// <param name="services">The <see cref="IServiceCollection" /> to add services to.</param>
/// <param name="setupAction">
/// The <see cref="Action{MemoryCacheOptions}"/> to configure the <see cref="MemoryCacheOptions"/> that is used by the <see cref="MemoryDistributedCache"/>.
/// </param>
/// <returns>The <see cref="IServiceCollection"/> so that additional calls can be chained.</returns>
public static IServiceCollection AddDistributedMemoryCache(this IServiceCollection services, Action<MemoryCacheOptions> setupAction)
{
if (services == null)
{
throw new ArgumentNullException(nameof(services));
}

if (setupAction == null)
{
throw new ArgumentNullException(nameof(services));
}

var memoryCacheOptions = new MemoryCacheOptions();
setupAction(memoryCacheOptions);

services.TryAddSingleton<IDistributedCache>(new MemoryDistributedCache(new MemoryCache(memoryCacheOptions)));

return services;
}
Expand Down
Loading