diff --git a/index.ts b/index.ts index 0c7885aa..0c25f34a 100644 --- a/index.ts +++ b/index.ts @@ -4,7 +4,7 @@ export { RedisGraph, GraphConfigInfo } from './modules/redisgraph' export { RedisGears, RGGetExecutionParameters, RGPyExecuteParameters } from './modules/redisgears' export { RedisBloom, BFInsertParameters, BFResponse, BFReserveParameter } from './modules/redisbloom'; export { RedisBloomTopK, TOPKIncrbyItems, TOPKResponse } from './modules/redisbloom-topk'; -export { RedisBloomCuckoo, CFInsertParameters, CFResponse } from './modules/redisbloom-cuckoo'; +export { RedisBloomCuckoo, CFInsertParameters, CFResponse, CFReserveParameters } from './modules/redisbloom-cuckoo'; export { RedisBloomCMK, CMKIncrbyItems } from './modules/redisbloom-cmk'; export { RedisTimeSeries as RTS, RedisTimeSeries, TSCreateOptions, TSLabel, TSAddOptions, TSKeySet, TSIncrbyDecrbyOptions, TSOptions, TSCreateRule, TSAggregationType, diff --git a/modules/redis.ts b/modules/redis.ts index fa1e42a0..49618da1 100644 --- a/modules/redis.ts +++ b/modules/redis.ts @@ -112,6 +112,7 @@ export type RedisBloomCMKMixin = { * The Redis 'All in One' RedisBloomCuckoo module functions */ export type RedisBloomCuckooMixin = { + bloom_cuckoo_module_reserve: typeof RedisBloomCuckoo.prototype.reserve, bloom_cuckoo_module_add: typeof RedisBloomCuckoo.prototype.add, bloom_cuckoo_module_addnx: typeof RedisBloomCuckoo.prototype.addnx, bloom_cuckoo_module_insert: typeof RedisBloomCuckoo.prototype.insert, diff --git a/modules/redisbloom-cuckoo.ts b/modules/redisbloom-cuckoo.ts index d7dd2261..f94cb677 100644 --- a/modules/redisbloom-cuckoo.ts +++ b/modules/redisbloom-cuckoo.ts @@ -14,6 +14,23 @@ export class RedisBloomCuckoo extends Module { super(RedisBloomCuckoo.name, options, moduleOptions) } + /** + * Creating an empty Bloom Cuckoo filter with a given initial capacity. + * @param key The key under which the filter is to be found + * @param capacity The number of entries you intend to add to the filter. Performance will begin to degrade after adding more items than this number. The actual degradation will depend on how far the limit has been exceeded. Performance will degrade linearly as the number of entries grow exponentially. + * @param options The additional optional parameters + */ + async reserve(key: string, capacity: number, options?: CFReserveParameters): Promise<'OK'> { + let args = [key, capacity]; + if(options && options.bucketSize) + args = args.concat(['BUCKETSIZE', options.bucketSize]) + if(options && options.maxIteractions) + args = args.concat(['MAXITERATIONS', options.maxIteractions]) + if(options && options.expansion) + args = args.concat(['EXPANSION', options.expansion]) + return await this.sendCommand('CF.RESERVE', args); + } + /** * Adding an item to the cuckoo filter, creating the filter if it does not exist. * @param key The name of the filter @@ -132,4 +149,16 @@ export type CFInsertParameters = { * @param 1 Stands for 'true' * @param 0 Stands for 'false' */ -export type CFResponse = '1' | '0'; \ No newline at end of file +export type CFResponse = '1' | '0'; + +/** + * The additional optional parameters of the 'CF.RESERVE' command + * @param bucketSize Number of items in each bucket. A higher bucket size value improves the fill rate but also causes a higher error rate and slightly slower performance. + * @param maxIteractions Number of attempts to swap items between buckets before declaring filter as full and creating an additional filter. A low value is better for performance and a higher number is better for filter fill rate. + * @param expansion When a new filter is created, its size is the size of the current filter multiplied by expansion . Expansion is rounded to the next 2^n number. + */ + export type CFReserveParameters = { + bucketSize?: number, + maxIteractions?: number, + expansion?: number +} \ No newline at end of file diff --git a/tests/redisbloom-cuckoo.ts b/tests/redisbloom-cuckoo.ts index ea089425..5eee7b6f 100644 --- a/tests/redisbloom-cuckoo.ts +++ b/tests/redisbloom-cuckoo.ts @@ -3,12 +3,11 @@ import { expect } from 'chai' import { RedisBloomCuckoo } from '../modules/redisbloom-cuckoo'; import { Redis } from '../modules/redis'; const key1 = 'key1cuckoo' -const key2 = 'key2cuckoo' -const key3 = 'key3cuckoo' +const key2 = '1' +const key3 = 'cuckoo' +const chunks: {iterator: number, data: string}[] = []; let client: RedisBloomCuckoo; let redis: Redis; -let dataIterator: number; -let data: string; describe('RedisBloom Cuckoo filter testing', async function() { before(async () => { @@ -28,8 +27,16 @@ describe('RedisBloom Cuckoo filter testing', async function() { await redis.disconnect(); }) + it('reserve function', async () => { + const response = await client.reserve(key2, 100, { + bucketSize: 1 + }); + expect(response).to.equal('OK', 'The response of the \'CF.RESERVE\' command'); + }) it('add function', async () => { - const response = await client.add(key1, 'item'); + let response = await client.add(key1, 'item'); + expect(response).to.equal(1, 'The response of the CF.ADD command'); + response = await client.add(key2, 'X'); expect(response).to.equal(1, 'The response of the CF.ADD command'); }); it('addnx function', async () => { @@ -52,19 +59,25 @@ describe('RedisBloom Cuckoo filter testing', async function() { const response = await client.count(key1, 'item1'); expect(response).to.equal(1, 'The response of the CF.COUNT command'); }); - it.skip('scandump function', async () => { - await client.add(key2, 'item'); - await client.redis.del(key2); - const response = await client.scandump(key1, 123) - console.log(response) - dataIterator = parseInt(response[0]) - expect(dataIterator).to.equal(1, 'The chunk data iterator'); - data = response[1]; - expect(data).to.not.equal('', 'The chunk data') + it('scandump function', async () => { + let iter = 0; + let response = await client.scandump(key2, iter) + let data = response[1] + chunks.push({iterator: iter, data: data}) + iter = parseInt(response[0]) + while(iter != 0){ + response = await client.scandump(key2, iter) + iter = parseInt(response[0]) + data = response[1] + chunks.push({iterator: iter, data: data}) + } + console.log(chunks) + expect(chunks.length).gt(0, `The count of chunks of key ${key2}`) }); it.skip('loadchunk function', async () => { - const response = await client.loadchunk(key2, dataIterator, data); - console.log(response) + const chunk = chunks[1]; + const res = await client.loadchunk(key2, chunk.iterator, chunk.data.replace(/�/g, 'fffd')); + expect(res).to.equal('OK', `The response of load chunk with iterator ${chunk.iterator}`) }); it('info function', async () => { const response = await client.info(key1); diff --git a/tests/redisbloom.ts b/tests/redisbloom.ts index 8869f307..c45ed96a 100644 --- a/tests/redisbloom.ts +++ b/tests/redisbloom.ts @@ -5,11 +5,9 @@ import { Redis } from '../modules/redis'; let client: RedisBloom; let redis: Redis; const key1 = 'key1bloom'; -const key2 = 'key2bloom'; +const key2 = '1'; const item1 = 'item1'; -const responses = [] -let dataIterator: number; -let data: string; +const chunks: {iterator: number, data: string}[] = []; describe('RedisBloom Module testing', async function() { before(async () => { @@ -30,7 +28,7 @@ describe('RedisBloom Module testing', async function() { }) it('reserve function', async () => { - const response = await client.reserve(key2, 0.1, 1); + const response = await client.reserve(key2, 0.01, 100); expect(response).to.equal('OK', 'The response of the \'BF.RESERVE\' command'); }) it('add function', async () => { @@ -58,38 +56,23 @@ describe('RedisBloom Module testing', async function() { expect(response[0]).to.equal('Capacity', 'The first item of the information') expect(response[1]).to.equal(100, 'The value of the \'Capacity\' item') }); - it.skip('scandump function', async () => { - //responses = []; - let response = await client.scandump(key2, 0) - console.log(response) - dataIterator = parseInt(response[0]) - expect(dataIterator).to.equal(1, 'The chunk data iterator'); - while(parseInt(response[0]) > 0){ - responses.push(response); - response = await client.scandump(key2, dataIterator) - dataIterator = parseInt(response[0]) - console.log(response) + it('scandump function', async () => { + let iter = 0; + let response = await client.scandump(key2, iter) + let data = response[1] + chunks.push({iterator: iter, data: data}) + iter = parseInt(response[0]) + while(iter != 0){ + response = await client.scandump(key2, iter) + iter = parseInt(response[0]) + data = response[1] + chunks.push({iterator: iter, data: data}) } - //const buffer = Buffer.from(response[1], 'hex'); - //console.log(buffer.toString()) - //data = buffer.toString('hex')//Buffer.from(response[1], 'utf16');//Buffer.from(response[1]).toString(); - console.log(data) - expect(data).to.not.equal('', 'The chunk data') + expect(chunks.length).gt(0, `The count of chunks of key ${key2}`) }); - it.skip('loadchunk function', async () => { - await client.redis.del(key2); - for(const res of responses) { - console.log(`\n=== ${res[0]} ===`) - console.log(Buffer.from(res[1], 'ascii').toString('hex')) - console.log(Buffer.from(res[1], 'ascii').toString('ascii')) - console.log(Buffer.from(res[1], 'ascii').toString('base64')) - console.log(Buffer.from(res[1], 'ascii').toString('binary')) - console.log(Buffer.from(res[1], 'ascii').toString('utf-8')) - console.log(Buffer.from(res[1], 'ascii').toString('utf8')) - console.log(Buffer.from(res[1], 'ascii').toString('utf16le')) - console.log(await client.loadchunk(key2, res[0], Buffer.from(res[1], 'ascii').toString('utf8'))) - } - //const response = await client.loadchunk(key2, dataIterator, data) - //console.log(response) + it('loadchunk function', async () => { + const chunk = chunks[1]; + const res = await client.loadchunk(key2, chunk.iterator, chunk.data); + expect(res).to.equal('OK', `The response of load chunk with iterator ${chunk.iterator}`) }); }); \ No newline at end of file