diff --git a/README.md b/README.md index ff4e714..2356b51 100644 --- a/README.md +++ b/README.md @@ -133,6 +133,58 @@ with the original keys `[ 2, 9, 6, 1 ]`: ] ``` +#### Batch Scheduling + +By default DataLoader will coalesce all individual loads which occur within a +single frame of execution before calling your batch function with all requested +keys. This ensures no additional latency while capturing many related requests +into a single batch. In fact, this is the same behavior used in Facebook's +original PHP implementation in 2010. See `enqueuePostPromiseJob` in the +[source code][] for more details about how this works. + +However sometimes this behavior is not desirable or optimal. Perhaps you expect +requests to be spread out over a few subsequent ticks because of an existing use +of `setTimeout`, or you just want manual control over dispatching regardless of +the run loop. DataLoader allows providing a custom batch scheduler to provide +these or any other behaviors. + +A custom scheduler is provided as `batchScheduleFn` in options. It must be a +function which is passed a callback and is expected to call that callback in the +immediate future to execute the batch request. + +As an example, here is a batch scheduler which collects all requests over a +100ms window of time (and as a consequence, adds 100ms of latency): + +```js +const myLoader = new DataLoader(myBatchFn, { + batchScheduleFn: callback => setTimeout(callback, 100) +}) +``` + +As another example, here is a manually dispatched batch scheduler: + +```js +function createScheduler() { + let callbacks = [] + return { + schedule(callback) { + callbacks.push(callback) + }, + dispatch() { + callbacks.forEach(callback => callback()) + callbacks = [] + } + } +} + +const { schedule, dispatch } = createScheduler() +const myLoader = new DataLoader(myBatchFn, { batchScheduleFn: schedule }) + +myLoader.load(1) +myLoader.load(2) +dispatch() +``` + ## Caching @@ -345,6 +397,7 @@ Create a new `DataLoader` given a batch loading function and options. | ---------- | ---- | ------- | ----------- | | *batch* | Boolean | `true` | Set to `false` to disable batching, invoking `batchLoadFn` with a single load key. This is equivalent to setting `maxBatchSize` to `1`. | *maxBatchSize* | Number | `Infinity` | Limits the number of items that get passed in to the `batchLoadFn`. May be set to `1` to disable batching. + | *batchScheduleFn* | Function | See [Batch scheduling](#batch-scheduling) | A function to schedule the later execution of a batch. The function is expected to call the provided callback in the immediate future. | *cache* | Boolean | `true` | Set to `false` to disable memoization caching, creating a new Promise and new key in the `batchLoadFn` for every load of the same key. This is equivalent to setting `cacheMap` to `null`. | *cacheKeyFn* | Function | `key => key` | Produces cache key for a given load key. Useful when objects are keys and two objects should be considered equivalent. | *cacheMap* | Object | `new Map()` | Instance of [Map][] (or an object with a similar API) to be used as cache. May be set to `null` to disable caching. @@ -599,3 +652,4 @@ Listed in alphabetical order [express]: http://expressjs.com/ [babel/polyfill]: https://babeljs.io/docs/usage/polyfill/ [lru_map]: https://github.com/rsms/js-lru +[source code]: https://github.com/graphql/dataloader/blob/master/src/index.js diff --git a/src/__tests__/abuse.test.js b/src/__tests__/abuse.test.js index 95fa1f8..f2bb0dc 100644 --- a/src/__tests__/abuse.test.js +++ b/src/__tests__/abuse.test.js @@ -190,4 +190,11 @@ describe('Provides descriptive error messages for API abuse', () => { new DataLoader(async keys => keys, { cacheKeyFn: null }) ).toThrow('cacheKeyFn must be a function: null'); }); + + it('Requires a function for batchScheduleFn', () => { + expect(() => + // $FlowExpectError + new DataLoader(async keys => keys, { batchScheduleFn: null }) + ).toThrow('batchScheduleFn must be a function: null'); + }); }); diff --git a/src/__tests__/dataloader.test.js b/src/__tests__/dataloader.test.js index 0aad4d6..47b2059 100644 --- a/src/__tests__/dataloader.test.js +++ b/src/__tests__/dataloader.test.js @@ -887,6 +887,54 @@ describe('Accepts options', () => { }); +describe('It allows custom schedulers', () => { + + it('Supports manual dispatch', () => { + function createScheduler() { + let callbacks = []; + return { + schedule(callback) { + callbacks.push(callback); + }, + dispatch() { + callbacks.forEach(callback => callback()); + callbacks = []; + } + }; + } + + const { schedule, dispatch } = createScheduler(); + const [ identityLoader, loadCalls ] = idLoader({ + batchScheduleFn: schedule + }); + + identityLoader.load('A'); + identityLoader.load('B'); + dispatch(); + identityLoader.load('A'); + identityLoader.load('C'); + dispatch(); + // Note: never dispatched! + identityLoader.load('D'); + + expect(loadCalls).toEqual([ [ 'A', 'B' ], [ 'C' ] ]); + }); + + it('Custom batch scheduler is provided loader as this context', () => { + let that; + function batchScheduleFn(callback) { + that = this; + callback(); + } + + const [ identityLoader ] = idLoader({ batchScheduleFn }); + + identityLoader.load('A'); + expect(that).toBe(identityLoader); + }); + +}); + describe('It is resilient to job queue ordering', () => { it('batches loads occuring within promises', async () => { diff --git a/src/index.d.ts b/src/index.d.ts index b76a1aa..6e29ef6 100644 --- a/src/index.d.ts +++ b/src/index.d.ts @@ -90,6 +90,13 @@ declare namespace DataLoader { */ maxBatchSize?: number; + /** + * Default see https://github.com/graphql/dataloader#batch-scheduling. + * A function to schedule the later execution of a batch. The function is + * expected to call the provided callback in the immediate future. + */ + batchScheduleFn?: (callback: () => void) => void; + /** * Default `true`. Set to `false` to disable memoization caching, creating a * new Promise and new key in the `batchLoadFn` for every load of the same diff --git a/src/index.js b/src/index.js index 55ee8e1..969488d 100644 --- a/src/index.js +++ b/src/index.js @@ -17,6 +17,7 @@ export type BatchLoadFn = export type Options = { batch?: boolean; maxBatchSize?: number; + batchScheduleFn?: (callback: () => void) => void; cache?: boolean; cacheKeyFn?: (key: K) => C; cacheMap?: CacheMap> | null; @@ -53,6 +54,7 @@ class DataLoader { } this._batchLoadFn = batchLoadFn; this._maxBatchSize = getValidMaxBatchSize(options); + this._batchScheduleFn = getValidBatchScheduleFn(options); this._cacheKeyFn = getValidCacheKeyFn(options); this._cacheMap = getValidCacheMap(options); this._batch = null; @@ -61,6 +63,7 @@ class DataLoader { // Private _batchLoadFn: BatchLoadFn; _maxBatchSize: number; + _batchScheduleFn: (() => void) => void; _cacheKeyFn: K => C; _cacheMap: CacheMap> | null; _batch: Batch | null; @@ -271,7 +274,7 @@ function getCurrentBatch(loader: DataLoader): Batch { loader._batch = newBatch; // Then schedule a task to dispatch this batch of requests. - enqueuePostPromiseJob(() => dispatchBatch(loader, newBatch)); + loader._batchScheduleFn(() => dispatchBatch(loader, newBatch)); return newBatch; } @@ -381,6 +384,22 @@ function getValidMaxBatchSize(options: ?Options): number { return maxBatchSize; } +// Private +function getValidBatchScheduleFn( + options: ?Options +): (() => void) => void { + var batchScheduleFn = options && options.batchScheduleFn; + if (batchScheduleFn === undefined) { + return enqueuePostPromiseJob; + } + if (typeof batchScheduleFn !== 'function') { + throw new TypeError( + `batchScheduleFn must be a function: ${(batchScheduleFn: any)}` + ); + } + return batchScheduleFn; +} + // Private: given the DataLoader's options, produce a cache key function. function getValidCacheKeyFn(options: ?Options): (K => C) { var cacheKeyFn = options && options.cacheKeyFn;