Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: change redis cache adapter to use redis-like interface #200

Merged
merged 1 commit into from
Oct 6, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions packages/entity-cache-adapter-redis/src/GenericRedisCacher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import {
transformFieldsToCacheObject,
IEntityGenericCacher,
} from '@expo/entity';
import { Redis } from 'ioredis';

import { redisTransformerMap } from './RedisCommon';
import wrapNativeRedisCallAsync from './errors/wrapNativeRedisCallAsync';
Expand All @@ -15,11 +14,22 @@ import wrapNativeRedisCallAsync from './errors/wrapNativeRedisCallAsync';
// The sentinel value is distinct from any (positively) cached value.
const DOES_NOT_EXIST_REDIS = '';

export interface IRedisTransaction {
set(key: string, value: string, secondsToken: 'EX', seconds: number): this;
exec(): Promise<any>;
}

export interface IRedis {
mget(...args: [...keys: string[]]): Promise<(string | null)[]>;
multi(): IRedisTransaction;
del(...args: [...keys: string[]]): Promise<any>;
}

export interface GenericRedisCacheContext {
/**
* Instance of ioredis.Redis
*/
redisClient: Redis;
redisClient: IRedis;

/**
* TTL for caching database hits. Successive entity loads within this TTL
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
import { EntityCacheAdapter, EntityConfiguration, CacheLoadResult, mapKeys } from '@expo/entity';
import invariant from 'invariant';
import type { Redis } from 'ioredis';

import GenericRedisCacher from './GenericRedisCacher';
import GenericRedisCacher, { IRedis } from './GenericRedisCacher';

export interface RedisCacheAdapterContext {
/**
* Instance of ioredis.Redis
*/
redisClient: Redis;
redisClient: IRedis;

/**
* Create a key string for key parts (cache key prefix, versions, entity name, etc).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@ import { createRedisIntegrationTestEntityCompanionProvider } from '../testfixtur
class TestViewerContext extends ViewerContext {}

describe(GenericRedisCacher, () => {
const redisClient = new Redis(new URL(process.env['REDIS_URL']!).toString());
let redisCacheAdapterContext: RedisCacheAdapterContext;

beforeAll(() => {
redisCacheAdapterContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -33,10 +34,10 @@ describe(GenericRedisCacher, () => {
});

beforeEach(async () => {
await redisCacheAdapterContext.redisClient.flushdb();
await redisClient.flushdb();
});
afterAll(async () => {
redisCacheAdapterContext.redisClient.disconnect();
redisClient.disconnect();
});

it('has correct caching and loading behavior', async () => {
Expand All @@ -62,7 +63,7 @@ describe(GenericRedisCacher, () => {
]);
await genericRedisCacher.cacheManyAsync(objectMap);

const cachedJSON = await redisCacheAdapterContext.redisClient.get(testKey);
const cachedJSON = await redisClient.get(testKey);
const cachedValue = JSON.parse(cachedJSON!);
expect(cachedValue).toMatchObject({
id: entity1Created.getID(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,12 @@ import { createRedisIntegrationTestEntityCompanionProvider } from '../testfixtur
class TestViewerContext extends ViewerContext {}

describe(RedisCacheAdapter, () => {
const redisClient = new Redis(new URL(process.env['REDIS_URL']!).toString());
let redisCacheAdapterContext: RedisCacheAdapterContext;

beforeAll(() => {
redisCacheAdapterContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -31,10 +32,10 @@ describe(RedisCacheAdapter, () => {
});

beforeEach(async () => {
await redisCacheAdapterContext.redisClient.flushdb();
await redisClient.flushdb();
});
afterAll(async () => {
redisCacheAdapterContext.redisClient.disconnect();
redisClient.disconnect();
});

it('has correct caching behavior', async () => {
Expand All @@ -56,9 +57,7 @@ describe(RedisCacheAdapter, () => {
.enforcing()
.loadByIDAsync(entity1Created.getID());

const cachedJSON = await redisCacheAdapterContext.redisClient.get(
cacheKeyMaker('id', entity1.getID())
);
const cachedJSON = await redisClient.get(cacheKeyMaker('id', entity1.getID()));
const cachedValue = JSON.parse(cachedJSON!);
expect(cachedValue).toMatchObject({
id: entity1.getID(),
Expand All @@ -73,9 +72,7 @@ describe(RedisCacheAdapter, () => {
);
expect(entityNonExistentResult.ok).toBe(false);

const nonExistentCachedValue = await redisCacheAdapterContext.redisClient.get(
cacheKeyMaker('id', nonExistentId)
);
const nonExistentCachedValue = await redisClient.get(cacheKeyMaker('id', nonExistentId));
expect(nonExistentCachedValue).toEqual('');

// load again through entities framework to ensure it reads negative result
Expand All @@ -86,9 +83,7 @@ describe(RedisCacheAdapter, () => {

// invalidate from cache to ensure it invalidates correctly
await RedisTestEntity.loader(viewerContext).invalidateFieldsAsync(entity1.getAllFields());
const cachedValueNull = await redisCacheAdapterContext.redisClient.get(
cacheKeyMaker('id', entity1.getID())
);
const cachedValueNull = await redisClient.get(cacheKeyMaker('id', entity1.getID()));
expect(cachedValueNull).toBe(null);
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@ import { createRedisIntegrationTestEntityCompanionProvider } from '../testfixtur
class TestViewerContext extends ViewerContext {}

describe(RedisCacheAdapter, () => {
const redisClient = new Redis(new URL(process.env['REDIS_URL']!).toString());
let redisCacheAdapterContext: RedisCacheAdapterContext;

beforeAll(() => {
redisCacheAdapterContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -29,11 +30,11 @@ describe(RedisCacheAdapter, () => {
});

beforeEach(async () => {
await redisCacheAdapterContext.redisClient.flushdb();
await redisClient.flushdb();
});

it('throws when redis is disconnected', async () => {
redisCacheAdapterContext.redisClient.disconnect();
redisClient.disconnect();

const vc1 = new TestViewerContext(
createRedisIntegrationTestEntityCompanionProvider(redisCacheAdapterContext)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ async function dropPostgresTable(knex: Knex): Promise<void> {

describe('Entity cache inconsistency', () => {
let knexInstance: Knex;
const redisClient = new Redis(new URL(process.env['REDIS_URL']!).toString());
let redisCacheAdapterContext: RedisCacheAdapterContext;

beforeAll(() => {
Expand All @@ -111,7 +112,7 @@ describe('Entity cache inconsistency', () => {
},
});
redisCacheAdapterContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -128,13 +129,13 @@ describe('Entity cache inconsistency', () => {

beforeEach(async () => {
await createOrTruncatePostgresTables(knexInstance);
await redisCacheAdapterContext.redisClient.flushdb();
await redisClient.flushdb();
});

afterAll(async () => {
await dropPostgresTable(knexInstance);
await knexInstance.destroy();
redisCacheAdapterContext.redisClient.disconnect();
redisClient.disconnect();
});

test('lots of updates in long-ish running transactions', async () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ async function dropPostgresTable(knex: Knex): Promise<void> {

describe('EntityMutator.processEntityDeletionForInboundEdgesAsync', () => {
let knexInstance: Knex;
const redisClient = new Redis(new URL(process.env['REDIS_URL']!).toString());
let redisCacheAdapterContext: RedisCacheAdapterContext;

beforeAll(() => {
Expand All @@ -47,7 +48,7 @@ describe('EntityMutator.processEntityDeletionForInboundEdgesAsync', () => {
},
});
redisCacheAdapterContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -64,13 +65,13 @@ describe('EntityMutator.processEntityDeletionForInboundEdgesAsync', () => {

beforeEach(async () => {
await createOrTruncatePostgresTables(knexInstance);
await redisCacheAdapterContext.redisClient.flushdb();
await redisClient.flushdb();
});

afterAll(async () => {
await dropPostgresTable(knexInstance);
await knexInstance.destroy();
redisCacheAdapterContext.redisClient.disconnect();
redisClient.disconnect();
});

describe('EntityEdgeDeletionBehavior.INVALIDATE_CACHE', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,7 @@ const makeEntityClasses = async (knex: Knex, edgeDeletionBehavior: EntityEdgeDel
};
describe('EntityMutator.processEntityDeletionForInboundEdgesAsync', () => {
let knexInstance: Knex;
const redisClient = new Redis(new URL(process.env['REDIS_URL']!).toString());
let redisCacheAdapterContext: RedisCacheAdapterContext;

beforeAll(() => {
Expand All @@ -190,7 +191,7 @@ describe('EntityMutator.processEntityDeletionForInboundEdgesAsync', () => {
},
});
redisCacheAdapterContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -207,7 +208,7 @@ describe('EntityMutator.processEntityDeletionForInboundEdgesAsync', () => {

afterAll(async () => {
await knexInstance.destroy();
redisCacheAdapterContext.redisClient.disconnect();
redisClient.disconnect();
});

it.each([
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,12 @@ class TestSecondaryRedisCacheLoader extends EntitySecondaryCacheLoader<
}

describe(RedisSecondaryEntityCache, () => {
const redisClient = new Redis(new URL(process.env['REDIS_URL']!).toString());
let redisCacheAdapterContext: RedisCacheAdapterContext;

beforeAll(() => {
redisCacheAdapterContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(..._parts: string[]): string {
throw new Error('should not be used by this test');
},
Expand All @@ -68,10 +69,10 @@ describe(RedisSecondaryEntityCache, () => {
});

beforeEach(async () => {
await redisCacheAdapterContext.redisClient.flushdb();
await redisClient.flushdb();
});
afterAll(async () => {
redisCacheAdapterContext.redisClient.disconnect();
redisClient.disconnect();
});

it('Loads through secondary loader, caches, and invalidates', async () => {
Expand Down