Skip to content

Commit

Permalink
fix: lock lru-cache to v6.0.0 (#170)
Browse files Browse the repository at this point in the history
  • Loading branch information
wschurman authored Mar 12, 2022
1 parent 02d2f24 commit 293868b
Show file tree
Hide file tree
Showing 6 changed files with 31 additions and 109 deletions.
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"@types/ioredis": "^4.26.4",
"@types/jest": "^26.0.10",
"@types/jsbn": "^1.2.29",
"@types/lru-cache": "^7.4.0",
"@types/lru-cache": "^5.1.1",
"@types/node": "^14.6.0",
"@types/uuid": "^8.3.0",
"@typescript-eslint/eslint-plugin": "^4.14.0",
Expand All @@ -33,7 +33,7 @@
"eslint-plugin-tsdoc": "^0.2.11",
"jest": "^26.6.3",
"lerna": "^4.0.0",
"lru-cache": "^7.3.1",
"lru-cache": "^6.0.0",
"nullthrows": "^1.1.1",
"pg": "^8.6.0",
"prettier": "^2.3.2",
Expand Down
11 changes: 7 additions & 4 deletions packages/entity-cache-adapter-local-memory/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,26 @@
Cross-request [LRU](https://github.com/isaacs/node-lru-cache) cache adapter for `@expo/entity`. Use
this cache with caution - it is nonstandard. The cache is shared between requests in the node process.

Note: This uses version 6.0.0 of `node-lru-cache` since it the most tuned version for our use case (low TTL + LRU). Upgrading
to 7.x will cause high memory usage for the entity cache adapter use case since it allocates fixed-size data structures up front to tune for the non-TTL use case: https://github.com/isaacs/node-lru-cache/issues/208.

[Documentation](https://expo.github.io/entity/modules/_expo_cache_adapter_local_memory.html)

## Why NOT use this cache

Because this is an in-memory cache, cross-box invalidation is not possible. Do not use this cache
Because this is an in-memory cache, cross-machine invalidation is not possible. Do not use this cache
if you have the following use cases:

- The objects stored have high mutability
- The objects stored are mutable
- Reading a stale object from the cache is not acceptable in your application
- Cross-box invalidation is not possible
- Cross-machine invalidation is not possible

## Typical use cases

If your application sees many requests fetching the same objects, you can save a trip to your cache
cluster and backing datastore by using this in-memory cache. Here are some good use cases:

- The objects stored are mostly immutable
- The objects stored are mostly immutable, and reading a stale object for a short TTL is acceptable
- You have a low TTL setting in your cache

## Usage
Expand Down
2 changes: 1 addition & 1 deletion packages/entity-cache-adapter-local-memory/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"@expo/entity": "*"
},
"dependencies": {
"lru-cache": "^7.3.1"
"lru-cache": "^6.0.0"
},
"devDependencies": {
"@expo/entity": "^0.25.2"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,76 +7,6 @@ export const DOES_NOT_EXIST_LOCAL_MEMORY_CACHE = Symbol('doesNotExist');
type LocalMemoryCacheValue<TFields> = Readonly<TFields> | typeof DOES_NOT_EXIST_LOCAL_MEMORY_CACHE;
export type LocalMemoryCache<TFields> = LRUCache<string, LocalMemoryCacheValue<TFields>>;

type LRUCacheOptionsV7<K, V> = {
/**
* the number of most recently used items to keep.
* note that we may store fewer items than this if maxSize is hit.
*/
max: number;

/**
* if you wish to track item size, you must provide a maxSize
* note that we still will only keep up to max *actual items*,
* so size tracking may cause fewer than max items to be stored.
* At the extreme, a single item of maxSize size will cause everything
* else in the cache to be dropped when it is added. Use with caution!
* Note also that size tracking can negatively impact performance,
* though for most cases, only minimally.
*/

maxSize?: number;

/**
* function to calculate size of items. useful if storing strings or
* buffers or other items where memory size depends on the object itself.
* also note that oversized items do NOT immediately get dropped from
* the cache, though they will cause faster turnover in the storage.
*/
sizeCalculation?: (value: V, key: K) => number;

/**
* function to call when the item is removed from the cache
* Note that using this can negatively impact performance.
*/
dispose?: (value: V, key: K) => void;

/**
* max time to live for items before they are considered stale
* note that stale items are NOT preemptively removed by default,
* and MAY live in the cache, contributing to its LRU max, long after
* they have expired.
* Also, as this cache is optimized for LRU/MRU operations, some of
* the staleness/TTL checks will reduce performance, as they will incur
* overhead by deleting items.
* Must be a positive integer in ms, defaults to 0, which means "no TTL"
*/
ttl?: number;

/**
* return stale items from cache.get() before disposing of them
* boolean, default false
*/
allowStale?: boolean;

/**
* update the age of items on cache.get(), renewing their TTL
* boolean, default false
*/
updateAgeOnGet?: boolean;

/**
* update the age of items on cache.has(), renewing their TTL
* boolean, default false
*/
updateAgeOnHas?: boolean;

/**
* update the "recently-used"-ness of items on cache.has()
* boolean, default false
*/
updateRecencyOnHas?: boolean;
};

export default class GenericLocalMemoryCacher<TFields> implements IEntityGenericCacher<TFields> {
constructor(private readonly localMemoryCache: LocalMemoryCache<TFields>) {}

Expand All @@ -86,21 +16,17 @@ export default class GenericLocalMemoryCacher<TFields> implements IEntityGeneric
const DEFAULT_LRU_CACHE_MAX_AGE_SECONDS = 10;
const DEFAULT_LRU_CACHE_SIZE = 10000;
const maxAgeSeconds = options.ttlSeconds ?? DEFAULT_LRU_CACHE_MAX_AGE_SECONDS;
const lruCacheOptions: LRUCacheOptionsV7<string, TFields> = {
return new LRUCache<string, LocalMemoryCacheValue<TFields>>({
max: options.maxSize ?? DEFAULT_LRU_CACHE_SIZE,
maxSize: options.maxSize ?? DEFAULT_LRU_CACHE_SIZE,
sizeCalculation: (value: LocalMemoryCacheValue<TFields>) =>
value === DOES_NOT_EXIST_LOCAL_MEMORY_CACHE ? 0 : 1,
ttl: maxAgeSeconds * 1000, // convert to ms
};
return new LRUCache<string, LocalMemoryCacheValue<TFields>>(lruCacheOptions as any);
length: (value) => (value === DOES_NOT_EXIST_LOCAL_MEMORY_CACHE ? 0 : 1),
maxAge: maxAgeSeconds * 1000, // convert to ms
});
}

static createNoOpCache<TFields>(): LocalMemoryCache<TFields> {
return new LRUCache<string, LocalMemoryCacheValue<TFields>>({
max: 1,
maxSize: 1,
sizeCalculation: () => 10, // make all things larger than max size
max: 0,
maxAge: -1,
});
}

Expand Down Expand Up @@ -142,7 +68,7 @@ export default class GenericLocalMemoryCacher<TFields> implements IEntityGeneric

public async invalidateManyAsync(keys: readonly string[]): Promise<void> {
for (const key of keys) {
this.localMemoryCache.delete(key);
this.localMemoryCache.del(key);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ describe(GenericLocalMemoryCacher, () => {
it('creates a cache with default options', () => {
const cache = GenericLocalMemoryCacher.createLRUCache();
expect(cache.max).toBe(10000);
expect(cache.maxSize).toBe(10000);
expect(cache.ttl).toBe(10000);
expect(cache.maxAge).toBe(10000);
});

it('respects specified options', () => {
Expand All @@ -15,8 +14,7 @@ describe(GenericLocalMemoryCacher, () => {
maxSize: 10,
});
expect(cache.max).toBe(10);
expect(cache.maxSize).toBe(10);
expect(cache.ttl).toBe(3000);
expect(cache.maxAge).toBe(3000);
});
});

Expand Down
29 changes: 12 additions & 17 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -508,35 +508,35 @@
minimist "^1.2.0"

"@expo/entity-cache-adapter-local-memory@file:packages/entity-cache-adapter-local-memory":
version "0.25.1"
version "0.25.2"
dependencies:
lru-cache "^7.3.1"
lru-cache "^6.0.0"

"@expo/entity-cache-adapter-redis@file:packages/entity-cache-adapter-redis":
version "0.25.1"
version "0.25.2"
dependencies:
ioredis "^4.27.3"

"@expo/entity-database-adapter-knex@file:packages/entity-database-adapter-knex":
version "0.25.1"
version "0.25.2"
dependencies:
knex "^1.0.2"

"@expo/entity-ip-address-field@file:packages/entity-ip-address-field":
version "0.25.1"
version "0.25.2"
dependencies:
ip-address "^8.1.0"

"@expo/entity-secondary-cache-local-memory@file:packages/entity-secondary-cache-local-memory":
version "0.25.1"
version "0.25.2"

"@expo/entity-secondary-cache-redis@file:packages/entity-secondary-cache-redis":
version "0.25.1"
version "0.25.2"
dependencies:
ioredis "^4.17.3"

"@expo/entity@file:packages/entity":
version "0.25.1"
version "0.25.2"
dependencies:
"@expo/results" "^1.0.0"
dataloader "^2.0.0"
Expand Down Expand Up @@ -2003,10 +2003,10 @@
resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.1.tgz#459c65fa1867dafe6a8f322c4c51695663cc55e9"
integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==

"@types/lru-cache@^7.4.0":
version "7.4.0"
resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-7.4.0.tgz#74e50606962cb90448d928d6f0c7c872592fc84d"
integrity sha512-jZ/Tb2/3vXw4VYd9AImFC/n6XT3WywZNAxwY8Ox9eM87M9ta9G7KzCx4aKo2Zllvr02k40eY328cjOO3WuK5Kw==
"@types/lru-cache@^5.1.1":
version "5.1.1"
resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-5.1.1.tgz#c48c2e27b65d2a153b19bfc1a317e30872e01eef"
integrity sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw==

"@types/mime@*":
version "2.0.1"
Expand Down Expand Up @@ -6578,11 +6578,6 @@ lru-cache@^6.0.0:
dependencies:
yallist "^4.0.0"

lru-cache@^7.3.1:
version "7.4.4"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.4.4.tgz#a3dabc394ec07e2285af52fd24d0d74b3ac71c29"
integrity sha512-2XbUJmlpIbmc9JvNNmtLzHlF31srxoDxuiQiwBHic7RZyHyltbTdzoO6maRqpdEhOOG5GD80EXvzAU0wR15ccg==

lunr@^2.3.9:
version "2.3.9"
resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1"
Expand Down

0 comments on commit 293868b

Please sign in to comment.