From d50e9759dfca033de70628e2d9f5981803fc38d6 Mon Sep 17 00:00:00 2001 From: shacharPash <s.pashchur@gmail.com> Date: Thu, 29 Sep 2022 20:05:07 +0300 Subject: [PATCH 1/2] Update Tdigest Commands --- src/NRedisStack/ResponseParser.cs | 19 +- .../Tdigest/DataTypes/TdigestInformation.cs | 7 +- src/NRedisStack/Tdigest/Literals/Commands.cs | 4 + src/NRedisStack/Tdigest/TdigestCommands.cs | 305 +++++++++++------- .../NRedisStack.Tests/Tdigest/TdigestTests.cs | 236 ++++++++++++-- 5 files changed, 419 insertions(+), 152 deletions(-) diff --git a/src/NRedisStack/ResponseParser.cs b/src/NRedisStack/ResponseParser.cs index 5e9f87d4..5c9be0e6 100644 --- a/src/NRedisStack/ResponseParser.cs +++ b/src/NRedisStack/ResponseParser.cs @@ -368,11 +368,11 @@ public static IReadOnlyList<TimeSeriesRule> ToRuleArray(this RedisResult result) public static TdigestInformation ToTdigestInfo(this RedisResult result) //TODO: Think about a different implementation, because if the output of CMS.INFO changes or even just the names of the labels then the parsing will not work { - long compression, capacity, mergedNodes, unmergedNodes, totalCompressions; - double mergedWeight, unmergedWeight; + long compression, capacity, mergedNodes, unmergedNodes, totalCompressions, memoryUsage; + double mergedWeight, unmergedWeight, sumWeight; - compression = capacity = mergedNodes = unmergedNodes = totalCompressions = -1; - mergedWeight = unmergedWeight = -1.0; + compression = capacity = mergedNodes = unmergedNodes = totalCompressions = memoryUsage = -1; + mergedWeight = unmergedWeight = sumWeight = -1.0; RedisResult[] redisResults = result.ToArray(); @@ -395,20 +395,25 @@ public static IReadOnlyList<TimeSeriesRule> ToRuleArray(this RedisResult result) unmergedNodes = (long)redisResults[i]; break; case "Merged weight": - mergedWeight = (double)redisResults[i]; break; case "Unmerged weight": unmergedWeight = (double)redisResults[i]; break; + case "Sum weights": + sumWeight = (double)redisResults[i]; + break; case "Total compressions": totalCompressions = (long)redisResults[i]; break; + case "Memory usage": + memoryUsage = (long)redisResults[i]; + break; } } return new TdigestInformation(compression, capacity, mergedNodes, unmergedNodes, - mergedWeight, unmergedWeight, totalCompressions); + mergedWeight, unmergedWeight, sumWeight, totalCompressions, memoryUsage); } public static TimeSeriesInformation ToTimeSeriesInfo(this RedisResult result) @@ -531,7 +536,7 @@ public static IReadOnlyList<string> ToStringArray(this RedisResult result) Array.ForEach(redisResults, str => list.Add((string)str)); return list; } - + public static long?[] ToNullableLongArray(this RedisResult result) { if (result.IsNull) diff --git a/src/NRedisStack/Tdigest/DataTypes/TdigestInformation.cs b/src/NRedisStack/Tdigest/DataTypes/TdigestInformation.cs index fdd39b06..e57fa3b4 100644 --- a/src/NRedisStack/Tdigest/DataTypes/TdigestInformation.cs +++ b/src/NRedisStack/Tdigest/DataTypes/TdigestInformation.cs @@ -12,13 +12,14 @@ public class TdigestInformation public long UnmergedNodes { get; private set; } public double MergedWeight { get; private set; } public double UnmergedWeight { get; private set; } - + public double SumWeights { get; private set; } public long TotalCompressions { get; private set; } + public long MemoryUsage { get; private set; } internal TdigestInformation(long compression, long capacity, long mergedNodes, long unmergedNodes, double mergedWeight, - double unmergedWeight, long totalCompressions) + double unmergedWeight, double sumWeights, long totalCompressions, long memoryUsage) { Compression = compression; @@ -27,7 +28,9 @@ internal TdigestInformation(long compression, long capacity, long mergedNodes, UnmergedNodes = unmergedNodes; MergedWeight = mergedWeight; UnmergedWeight = unmergedWeight; + SumWeights = sumWeights; TotalCompressions = totalCompressions; + MemoryUsage = memoryUsage; } } } \ No newline at end of file diff --git a/src/NRedisStack/Tdigest/Literals/Commands.cs b/src/NRedisStack/Tdigest/Literals/Commands.cs index c9ef8167..66be06eb 100644 --- a/src/NRedisStack/Tdigest/Literals/Commands.cs +++ b/src/NRedisStack/Tdigest/Literals/Commands.cs @@ -13,5 +13,9 @@ internal class TDIGEST public const string CDF = "TDIGEST.CDF"; public const string TRIMMED_MEAN = "TDIGEST.TRIMMED_MEAN"; public const string INFO = "TDIGEST.INFO"; + public const string RANK = "TDIGEST.RANK"; + public const string REVRANK = "TDIGEST.REVRANK"; + public const string BYRANK = "TDIGEST.BYRANK"; + public const string BYREVRANK = "TDIGEST.BYREVRANK"; } } \ No newline at end of file diff --git a/src/NRedisStack/Tdigest/TdigestCommands.cs b/src/NRedisStack/Tdigest/TdigestCommands.cs index ebe413cd..bc975889 100644 --- a/src/NRedisStack/Tdigest/TdigestCommands.cs +++ b/src/NRedisStack/Tdigest/TdigestCommands.cs @@ -20,7 +20,7 @@ public TdigestCommands(IDatabase db) /// <param name="weight">The weight of this observation.</param> /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> - public bool Add(RedisKey key, double item, double weight) + public bool Add(RedisKey key, double item, long weight) { if (weight < 0) throw new ArgumentOutOfRangeException(nameof(weight)); @@ -35,7 +35,7 @@ public bool Add(RedisKey key, double item, double weight) /// <param name="weight">The weight of this observation.</param> /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> - public async Task<bool> AddAsync(RedisKey key, double item, double weight) + public async Task<bool> AddAsync(RedisKey key, double item, int weight) { if (weight < 0) throw new ArgumentOutOfRangeException(nameof(weight)); @@ -50,7 +50,7 @@ public async Task<bool> AddAsync(RedisKey key, double item, double weight) /// <param name="valueWeight">Tuple of the value of the observation and The weight of this observation.</param> /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> - public bool Add(RedisKey key, params Tuple<double, double>[] valueWeight) + public bool Add(RedisKey key, params Tuple<double, long>[] valueWeight) { if (valueWeight.Length < 1) throw new ArgumentOutOfRangeException(nameof(valueWeight)); @@ -73,7 +73,7 @@ public bool Add(RedisKey key, params Tuple<double, double>[] valueWeight) /// <param name="valueWeight">Tuple of the value of the observation and The weight of this observation.</param> /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> - public async Task<bool> AddAsync(RedisKey key, params Tuple<double, double>[] valueWeight) + public async Task<bool> AddAsync(RedisKey key, params Tuple<double, long>[] valueWeight) { if (valueWeight.Length < 1) throw new ArgumentOutOfRangeException(nameof(valueWeight)); @@ -93,25 +93,28 @@ public async Task<bool> AddAsync(RedisKey key, params Tuple<double, double>[] va /// Estimate the fraction of all observations added which are <= value. /// </summary> /// <param name="key">The name of the sketch.</param> - /// <param name="value">upper limit of observation value.</param> + /// <param name="values">upper limit of observation value.</param> /// <returns>double-reply - estimation of the fraction of all observations added which are <= value</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.cdf"/></remarks> - public double CDF(RedisKey key, double value) + public double[] CDF(RedisKey key, params double[] values) { - return _db.Execute(TDIGEST.CDF, key, value).ToDouble(); + var args = new List<object>(values.Length +1) { key }; + foreach(var value in values) args.Add(value); + return _db.Execute(TDIGEST.CDF, args).ToDoubleArray(); } /// <summary> /// Estimate the fraction of all observations added which are <= value. /// </summary> /// <param name="key">The name of the sketch.</param> - /// <param name="value">upper limit of observation value.</param> + /// <param name="values">upper limit of observation value.</param> /// <returns>double-reply - estimation of the fraction of all observations added which are <= value</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.cdf"/></remarks> - public async Task<double> CDFAsync(RedisKey key, double value) + public async Task<double[]> CDFAsync(RedisKey key, params double[] values) { - var result = await _db.ExecuteAsync(TDIGEST.CDF, key, value); - return result.ToDouble(); + var args = new List<object>(values.Length +1) { key }; + foreach(var value in values) args.Add(value); + return (await _db.ExecuteAsync(TDIGEST.CDF, args)).ToDoubleArray(); } /// <summary> @@ -169,7 +172,8 @@ public async Task<TdigestInformation> InfoAsync(RedisKey key) /// <remarks><seealso href="https://redis.io/commands/tdigest.max"/></remarks> public double Max(RedisKey key) { - return _db.Execute(TDIGEST.MAX, key).ToDouble(); + var result = _db.Execute(TDIGEST.MAX, key); + return result.ToDouble(); } /// <summary> @@ -180,7 +184,8 @@ public double Max(RedisKey key) /// <remarks><seealso href="https://redis.io/commands/tdigest.max"/></remarks> public async Task<double> MaxAsync(RedisKey key) { - return (await _db.ExecuteAsync(TDIGEST.MAX, key)).ToDouble(); + var result = await _db.ExecuteAsync(TDIGEST.MAX, key); + return result.ToDouble(); } /// <summary> @@ -206,111 +211,76 @@ public async Task<double> MinAsync(RedisKey key) } /// <summary> - /// Get the minimum observation value from the sketch. - /// </summary> - /// <param name="destinationKey">TSketch to copy observation values to (a t-digest data structure).</param> - /// <param name="sourceKey">Sketch to copy observation values from (a t-digest data structure).</param> - /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.merge"/></remarks> - public bool Merge(RedisKey destinationKey, RedisKey sourceKey) - { - return _db.Execute(TDIGEST.MERGE, destinationKey, sourceKey).OKtoBoolean(); - } - - /// <summary> - /// Get the minimum observation value from the sketch. - /// </summary> - /// <param name="destinationKey">TSketch to copy observation values to (a t-digest data structure).</param> - /// <param name="sourceKey">Sketch to copy observation values from (a t-digest data structure).</param> - /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.merge"/></remarks> - public async Task<bool> MergeAsync(RedisKey destinationKey, RedisKey sourceKey) - { - var result = await _db.ExecuteAsync(TDIGEST.MERGE, destinationKey, sourceKey); - return result.OKtoBoolean(); - } - - /// <summary> - /// Get the minimum observation value from the sketch. + /// Merges all of the values from 'from' keys to 'destination-key' sketch /// </summary> /// <param name="destinationKey">TSketch to copy observation values to (a t-digest data structure).</param> + /// <param name="compression">The compression parameter.</param> + /// <param name="overide">If destination already exists, it is overwritten.</param> /// <param name="sourceKeys">Sketch to copy observation values from (a t-digest data structure).</param> /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.merge"/></remarks> - public bool Merge(RedisKey destinationKey, params RedisKey[] sourceKeys) + public bool Merge(RedisKey destinationKey, long compression = default(long), bool overide = false, params RedisKey[] sourceKeys) { if (sourceKeys.Length < 1) throw new ArgumentOutOfRangeException(nameof(sourceKeys)); - var args = sourceKeys.ToList(); - args.Insert(0, destinationKey); - - return _db.Execute(TDIGEST.MERGE, args).OKtoBoolean(); - } + int numkeys = sourceKeys.Length; + var args = new List<object>() { destinationKey, numkeys}; + foreach(var key in sourceKeys) + { + args.Add(key); + } - /// <summary> - /// Get the minimum observation value from the sketch. - /// </summary> - /// <param name="destinationKey">TSketch to copy observation values to (a t-digest data structure).</param> - /// <param name="sourceKeys">Sketch to copy observation values from (a t-digest data structure).</param> - /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.merge"/></remarks> - public async Task<bool> MergeAsync(RedisKey destinationKey, params RedisKey[] sourceKeys) - { - if (sourceKeys.Length < 1) throw new ArgumentOutOfRangeException(nameof(sourceKeys)); + if (compression != default(long)) + { + args.Add("COMPRESSION"); + args.Add(compression); + } - var args = sourceKeys.ToList(); - args.Insert(0, destinationKey); + if(overide) + { + args.Add("OVERRIDE"); + } - var result = await _db.ExecuteAsync(TDIGEST.MERGE, args); - return result.OKtoBoolean(); + return _db.Execute(TDIGEST.MERGE, args).OKtoBoolean(); } /// <summary> - /// Merges all of the values from 'from' keys to 'destination-key' sketch. + /// Merges all of the values from 'from' keys to 'destination-key' sketch /// </summary> /// <param name="destinationKey">TSketch to copy observation values to (a t-digest data structure).</param> - /// <param name="numkeys">Number of sketch(es) to copy observation values from.</param> /// <param name="compression">The compression parameter.</param> + /// <param name="overide">If destination already exists, it is overwritten.</param> /// <param name="sourceKeys">Sketch to copy observation values from (a t-digest data structure).</param> /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.mergestore"/></remarks> - public bool MergeStore(RedisKey destinationKey, long numkeys, long compression = 100, params RedisKey[] sourceKeys) + /// <remarks><seealso href="https://redis.io/commands/tdigest.merge"/></remarks> + public async Task<bool> MergeAsync(RedisKey destinationKey, long compression = default(long), bool overide = false, params RedisKey[] sourceKeys) { if (sourceKeys.Length < 1) throw new ArgumentOutOfRangeException(nameof(sourceKeys)); - var args = new List<object> { destinationKey, numkeys }; - foreach (var key in sourceKeys) args.Add(key); - args.Add(TdigestArgs.COMPRESSION); - args.Add(compression); - - return _db.Execute(TDIGEST.MERGESTORE, args).OKtoBoolean(); - } + int numkeys = sourceKeys.Length; + var args = new List<object>() { destinationKey, numkeys}; + foreach(var key in sourceKeys) + { + args.Add(key); + } - /// <summary> - /// Merges all of the values from 'from' keys to 'destination-key' sketch. - /// </summary> - /// <param name="destinationKey">TSketch to copy observation values to (a t-digest data structure).</param> - /// <param name="numkeys">Number of sketch(es) to copy observation values from.</param> - /// <param name="compression">The compression parameter.</param> - /// <param name="sourceKeys">Sketch to copy observation values from (a t-digest data structure).</param> - /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.mergestore"/></remarks> - public async Task<bool> MergeStoreAsync(RedisKey destinationKey, long numkeys, long compression = 100, params RedisKey[] sourceKeys) - { - if (sourceKeys.Length < 1) throw new ArgumentOutOfRangeException(nameof(sourceKeys)); + if (compression != default(long)) + { + args.Add("COMPRESSION"); + args.Add(compression); + } - var args = new List<object> { destinationKey, numkeys }; - foreach (var key in sourceKeys) args.Add(key); - args.Add(TdigestArgs.COMPRESSION); - args.Add(compression); + if(overide) + { + args.Add("OVERRIDE"); + } - var result = await _db.ExecuteAsync(TDIGEST.MERGESTORE, args); - return result.OKtoBoolean(); + return (await _db.ExecuteAsync(TDIGEST.MERGE, args)).OKtoBoolean(); } /// <summary> /// Returns estimates of one or more cutoffs such that a specified fraction of the observations - ///added to this t-digest would be less than or equal to each of the specified cutoffs. + /// added to this t-digest would be less than or equal to each of the specified cutoffs. /// </summary> /// <param name="key">The name of the sketch (a t-digest data structure).</param> /// <param name="quantile">The desired fraction (between 0 and 1 inclusively).</param> @@ -344,13 +314,145 @@ public async Task<double[]> QuantileAsync(RedisKey key, params double[] quantile return (await _db.ExecuteAsync(TDIGEST.QUANTILE, args)).ToDoubleArray(); } + /// <summary> + /// Retrieve the estimated rank of value (the number of observations in the sketch + /// that are smaller than value + half the number of observations that are equal to value). + /// </summary> + /// <param name="key">The name of the sketch (a t-digest data structure).</param> + /// <param name="values">input value, for which the rank will be determined.</param> + /// <returns>an array of results populated with rank_1, rank_2, ..., rank_N.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.rank"/></remarks> + public long[] Rank(RedisKey key, params long[] values) + { + if (values.Length < 1) throw new ArgumentOutOfRangeException(nameof(values)); + + var args = new List<object>(values.Length + 1) { key }; + foreach (var v in values) args.Add(v); + return _db.Execute(TDIGEST.RANK, args).ToLongArray(); + } + + /// <summary> + /// Retrieve the estimated rank of value (the number of observations in the sketch + /// that are smaller than value + half the number of observations that are equal to value). + /// </summary> + /// <param name="key">The name of the sketch (a t-digest data structure).</param> + /// <param name="values">input value, for which the rank will be determined.</param> + /// <returns>an array of results populated with rank_1, rank_2, ..., rank_N.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.rank"/></remarks> + public async Task<long[]> RankAsync(RedisKey key, params long[] values) + { + if (values.Length < 1) throw new ArgumentOutOfRangeException(nameof(values)); + + var args = new List<object>(values.Length + 1) { key }; + foreach (var v in values) args.Add(v); + return (await _db.ExecuteAsync(TDIGEST.RANK, args)).ToLongArray(); + } + + /// <summary> + /// Retrieve the estimated rank of value (the number of observations in the sketch + /// that are larger than value + half the number of observations that are equal to value). + /// </summary> + /// <param name="key">The name of the sketch (a t-digest data structure).</param> + /// <param name="values">input value, for which the rank will be determined.</param> + /// <returns>an array of results populated with rank_1, rank_2, ..., rank_N.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.revrank"/></remarks> + public long[] RevRank(RedisKey key, params long[] values) + { + if (values.Length < 1) throw new ArgumentOutOfRangeException(nameof(values)); + + var args = new List<object>(values.Length + 1) { key }; + foreach (var v in values) args.Add(v); + return _db.Execute(TDIGEST.REVRANK, args).ToLongArray(); + } + + /// <summary> + /// Retrieve the estimated rank of value (the number of observations in the sketch + /// that are larger than value + half the number of observations that are equal to value). + /// </summary> + /// <param name="key">The name of the sketch (a t-digest data structure).</param> + /// <param name="values">input value, for which the rank will be determined.</param> + /// <returns>an array of results populated with rank_1, rank_2, ..., rank_N.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.revrank"/></remarks> + public async Task<long[]> RevRankAsync(RedisKey key, params long[] values) + { + if (values.Length < 1) throw new ArgumentOutOfRangeException(nameof(values)); + + var args = new List<object>(values.Length + 1) { key }; + foreach (var v in values) args.Add(v); + return ( await _db.ExecuteAsync(TDIGEST.REVRANK, args)).ToLongArray(); + } + + /// <summary> + /// Retrieve an estimation of the value with the given the rank. + /// </summary> + /// <param name="key">The name of the sketch (a t-digest data structure).</param> + /// <param name="ranks">input rank, for which the value will be determined.</param> + /// <returns>an array of results populated with value_1, value_2, ..., value_N.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.byrank"/></remarks> + public long[] ByRank(RedisKey key, params long[] ranks) + { + if (ranks.Length < 1) throw new ArgumentOutOfRangeException(nameof(ranks)); + + var args = new List<object>(ranks.Length + 1) { key }; + foreach (var v in ranks) args.Add(v); + return _db.Execute(TDIGEST.BYRANK, args).ToLongArray(); + } + + /// <summary> + /// Retrieve an estimation of the value with the given the rank. + /// </summary> + /// <param name="key">The name of the sketch (a t-digest data structure).</param> + /// <param name="ranks">input rank, for which the value will be determined.</param> + /// <returns>an array of results populated with value_1, value_2, ..., value_N.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.byrank"/></remarks> + public async Task<long[]> ByRankAsync(RedisKey key, params long[] ranks) + { + if (ranks.Length < 1) throw new ArgumentOutOfRangeException(nameof(ranks)); + + var args = new List<object>(ranks.Length + 1) { key }; + foreach (var v in ranks) args.Add(v); + return (await _db.ExecuteAsync(TDIGEST.BYRANK, args)).ToLongArray(); + } + + /// <summary> + /// Retrieve an estimation of the value with the given the reverse rank. + /// </summary> + /// <param name="key">The name of the sketch (a t-digest data structure).</param> + /// <param name="ranks">input reverse rank, for which the value will be determined.</param> + /// <returns>an array of results populated with value_1, value_2, ..., value_N.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.byrevrank"/></remarks> + public long[] ByRevRank(RedisKey key, params long[] ranks) + { + if (ranks.Length < 1) throw new ArgumentOutOfRangeException(nameof(ranks)); + + var args = new List<object>(ranks.Length + 1) { key }; + foreach (var v in ranks) args.Add(v); + return _db.Execute(TDIGEST.BYREVRANK, args).ToLongArray(); + } + + /// <summary> + /// Retrieve an estimation of the value with the given the reverse rank. + /// </summary> + /// <param name="key">The name of the sketch (a t-digest data structure).</param> + /// <param name="ranks">input reverse rank, for which the value will be determined.</param> + /// <returns>an array of results populated with value_1, value_2, ..., value_N.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.byrevrank"/></remarks> + public async Task<long[]> ByRevRankAsync(RedisKey key, params long[] ranks) + { + if (ranks.Length < 1) throw new ArgumentOutOfRangeException(nameof(ranks)); + + var args = new List<object>(ranks.Length + 1) { key }; + foreach (var v in ranks) args.Add(v); + return ( await _db.ExecuteAsync(TDIGEST.BYREVRANK, args)).ToLongArray(); + } + /// <summary> /// Reset the sketch - empty the sketch and re-initialize it /// </summary> /// <param name="key">The name of the sketch (a t-digest data structure).</param> /// <returns><see langword="true"/> if executed correctly, error otherwise.</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.reset"/></remarks> - public bool Reset(RedisKey key, params double[] quantile) + public bool Reset(RedisKey key) { return _db.Execute(TDIGEST.RESET, key).OKtoBoolean(); } @@ -361,7 +463,7 @@ public bool Reset(RedisKey key, params double[] quantile) /// <param name="key">The name of the sketch (a t-digest data structure).</param> /// <returns><see langword="true"/> if executed correctly, error otherwise.</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.reset"/></remarks> - public async Task<bool> ResetAsync(RedisKey key, params double[] quantile) + public async Task<bool> ResetAsync(RedisKey key) { return (await _db.ExecuteAsync(TDIGEST.RESET, key)).OKtoBoolean(); } @@ -372,8 +474,8 @@ public async Task<bool> ResetAsync(RedisKey key, params double[] quantile) /// <param name="key">The name of the sketch (a t-digest data structure).</param> /// <param name="lowCutQuantile">Exclude observation values lower than this quantile.</param> /// <param name="highCutQuantile">Exclude observation values higher than this quantile.</param> - /// <returns>estimation of the mean value. Will return DBL_MAX if the sketch is empty.</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.reset"/></remarks> + /// <returns>estimation of the mean value. Will return NaN if the sketch is empty.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.trimmed_mean"/></remarks> public double TrimmedMean(RedisKey key, double lowCutQuantile, double highCutQuantile) { return _db.Execute(TDIGEST.TRIMMED_MEAN, key, lowCutQuantile, highCutQuantile).ToDouble(); @@ -385,24 +487,11 @@ public double TrimmedMean(RedisKey key, double lowCutQuantile, double highCutQua /// <param name="key">The name of the sketch (a t-digest data structure).</param> /// <param name="lowCutQuantile">Exclude observation values lower than this quantile.</param> /// <param name="highCutQuantile">Exclude observation values higher than this quantile.</param> - /// <returns>estimation of the mean value. Will return DBL_MAX if the sketch is empty.</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.reset"/></remarks> + /// <returns>estimation of the mean value. Will return NaN if the sketch is empty.</returns> + /// <remarks><seealso href="https://redis.io/commands/tdigest.trimmed_mean"/></remarks> public async Task<double> TrimmedMeanAsync(RedisKey key, double lowCutQuantile, double highCutQuantile) { return (await _db.ExecuteAsync(TDIGEST.TRIMMED_MEAN, key, lowCutQuantile, highCutQuantile)).ToDouble(); } - - - - - - - - - - - - - } } diff --git a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs index f79cf474..89ade283 100644 --- a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs +++ b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs @@ -90,6 +90,159 @@ public async Task TestCreateAndInfoAsync() } } + [Fact] + public void TestRank() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); + + Assert.True(tdigest.Create("t-digest", 500)); + var tuples = new Tuple<double, long>[20]; + for (int i = 0; i < 20; i++) + { + tuples[i] = new(i, 1); + } + Assert.True(tdigest.Add("t-digest", tuples)); + Assert.Equal(-1, tdigest.Rank("t-digest", -1)[0]); + Assert.Equal(1, tdigest.Rank("t-digest", 0)[0]); + Assert.Equal(11, tdigest.Rank("t-digest", 10)[0]); + Assert.Equal(new long[3] { -1, 20, 10 }, tdigest.Rank("t-digest", -20, 20, 9)); + } + + [Fact] + public async Task TestRankAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); + + Assert.True(tdigest.Create("t-digest", 500)); + var tuples = new Tuple<double, long>[20]; + for (int i = 0; i < 20; i++) + { + tuples[i] = new(i, 1); + } + Assert.True(tdigest.Add("t-digest", tuples)); + Assert.Equal(-1, (await tdigest.RankAsync("t-digest", -1))[0]); + Assert.Equal(1, (await tdigest.RankAsync("t-digest", 0))[0]); + Assert.Equal(11, (await tdigest.RankAsync("t-digest", 10))[0]); + Assert.Equal(new long[3] { -1, 20, 10 }, await tdigest.RankAsync("t-digest", -20, 20, 9)); + } + + [Fact] + public void TestRevRank() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); + + Assert.True(tdigest.Create("t-digest", 500)); + var tuples = new Tuple<double, long>[20]; + for (int i = 0; i < 20; i++) + { + tuples[i] = new(i, 1); + } + + Assert.True(tdigest.Add("t-digest", tuples)); + Assert.Equal(-1, tdigest.RevRank("t-digest", 20)[0]); + Assert.Equal(20, tdigest.RevRank("t-digest", 0)[0]); + Assert.Equal(new long[3] { -1, 20, 10 }, tdigest.RevRank("t-digest", 21, 0, 10)); + } + + [Fact] + public async Task TestRevRankAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); + + Assert.True(tdigest.Create("t-digest", 500)); + var tuples = new Tuple<double, long>[20]; + for (int i = 0; i < 20; i++) + { + tuples[i] = new(i, 1); + } + + Assert.True(tdigest.Add("t-digest", tuples)); + Assert.Equal(-1, (await tdigest.RevRankAsync("t-digest", 20))[0]); + Assert.Equal(20, (await tdigest.RevRankAsync("t-digest", 0))[0]); + Assert.Equal(new long[3] { -1, 20, 10 }, await tdigest.RevRankAsync("t-digest", 21, 0, 10)); + } + + // TODO: fix those tests: + // [Fact] + // public void TestByRanks() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[20]; + // for (int i = 0; i < 20; i++) + // { + // tuples[i] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(1, tdigest.ByRank("t-digest", 0)[0]); + // Assert.Equal(10, tdigest.ByRank("t-digest", 9)[0]); + // } + + // [Fact] + // public async Task TestByRanksAsync() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[20]; + // for (int i = 0; i < 20; i++) + // { + // tuples[i] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(1, (await tdigest.ByRankAsync("t-digest", 0))[0]); + // Assert.Equal(10, (await tdigest.ByRankAsync("t-digest", 9))[0]); + // } + + // [Fact] + // public void TestByRevRanks() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[20]; + // for (int i = 0; i < 20; i++) + // { + // tuples[i] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(10, tdigest.ByRevRank("t-digest", 0)[0]); + // Assert.Equal(2, tdigest.ByRevRank("t-digest", 9)[0]); + // } + + // [Fact] + // public async Task TestByRevRanksAsync() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[20]; + // for (int i = 0; i < 20; i++) + // { + // tuples[i] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(10, (await tdigest.ByRevRankAsync("t-digest", 0))[0]); + // Assert.Equal(2, (await tdigest.ByRevRankAsync("t-digest", 9))[0]); + // } + [Fact] public void TestReset() { @@ -164,7 +317,6 @@ public async Task TestAddAsync() AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 5); } - [Fact] public void TestMerge() { @@ -175,13 +327,13 @@ public void TestMerge() tdigest.Create("td2", 100); tdigest.Create("td4m", 100); - Assert.True(tdigest.Merge("td2", "td4m")); + Assert.True(tdigest.Merge("td2", sourceKeys: "td4m")); AssertMergedUnmergedNodes(tdigest, "td2", 0, 0); tdigest.Add("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); tdigest.Add("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); - Assert.True(tdigest.Merge("td2", "td4m")); + Assert.True(tdigest.Merge("td2", sourceKeys: "td4m")); AssertMergedUnmergedNodes(tdigest, "td2", 3, 2); } @@ -196,54 +348,52 @@ public async Task TestMergeAsync() await tdigest.CreateAsync("td2", 100); await tdigest.CreateAsync("td4m", 100); - Assert.True(await tdigest.MergeAsync("td2", "td4m")); + Assert.True(await tdigest.MergeAsync("td2", sourceKeys: "td4m")); AssertMergedUnmergedNodes(tdigest, "td2", 0, 0); await tdigest.AddAsync("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); await tdigest.AddAsync("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); - Assert.True(await tdigest.MergeAsync("td2", "td4m")); + Assert.True(await tdigest.MergeAsync("td2", sourceKeys: "td4m")); AssertMergedUnmergedNodes(tdigest, "td2", 3, 2); } [Fact] - public void TestMergeStore() + public void MergeMultiAndParams() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); var tdigest = db.TDIGEST(); - tdigest.Create("from1", 100); tdigest.Create("from2", 200); - tdigest.Add("from1", 1, 1); - tdigest.Add("from2", 1, 10); + tdigest.Add("from1", 1d, 1); + tdigest.Add("from2", 1d, 10); - Assert.True(tdigest.MergeStore("to", 2, 100, "from1", "from2")); + Assert.True(tdigest.Merge("to", 2, sourceKeys: new RedisKey[] { "from1", "from2" })); AssertTotalWeight(tdigest, "to", 11d); - Assert.True(tdigest.MergeStore("to50", 2, 50, "from1", "from2")); - Assert.Equal(50, tdigest.Info("to50").Compression); + Assert.True(tdigest.Merge("to", 50, true, "from1", "from2")); + Assert.Equal(50, tdigest.Info("to").Compression); } [Fact] - public async Task TestMergeStoreAsync() + public async Task MergeMultiAndParamsAsync() { IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); var tdigest = db.TDIGEST(); + tdigest.Create("from1", 100); + tdigest.Create("from2", 200); - await tdigest.CreateAsync("from1", 100); - await tdigest.CreateAsync("from2", 200); - - await tdigest.AddAsync("from1", 1, 1); - await tdigest.AddAsync("from2", 1, 10); + tdigest.Add("from1", 1d, 1); + tdigest.Add("from2", 1d, 10); - Assert.True(await tdigest.MergeStoreAsync("to", 2, 100, "from1", "from2")); + Assert.True(await tdigest.MergeAsync("to", 2, sourceKeys: new RedisKey[] { "from1", "from2" })); AssertTotalWeight(tdigest, "to", 11d); - Assert.True(await tdigest.MergeStoreAsync("to50", 2, 50, "from1", "from2")); - Assert.Equal(50, (await tdigest.InfoAsync("to50")).Compression); + Assert.True(await tdigest.MergeAsync("to", 50, true, "from1", "from2")); + Assert.Equal(50, tdigest.Info("to").Compression); } [Fact] @@ -254,11 +404,14 @@ public void TestCDF() var tdigest = db.TDIGEST(); tdigest.Create("tdcdf", 100); - Assert.Equal(double.NaN, tdigest.CDF("tdcdf", 50)); + foreach(var item in tdigest.CDF("tdcdf", 50)) + { + Assert.Equal(double.NaN, item); + } tdigest.Add("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); tdigest.Add("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); - Assert.Equal(0.6, tdigest.CDF("tdcdf", 50)); + Assert.Equal(new double[]{0.6}, tdigest.CDF("tdcdf", 50)); } [Fact] @@ -269,11 +422,14 @@ public async Task TestCDFAsync() var tdigest = db.TDIGEST(); await tdigest.CreateAsync("tdcdf", 100); - Assert.Equal(double.NaN, await tdigest.CDFAsync("tdcdf", 50)); + foreach (var item in (await tdigest.CDFAsync("tdcdf", 50))) + { + Assert.Equal(double.NaN, item); + } await tdigest.AddAsync("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); await tdigest.AddAsync("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); - Assert.Equal(0.6, await tdigest.CDFAsync("tdcdf", 50)); + Assert.Equal(new double[]{0.6}, await tdigest.CDFAsync("tdcdf", 50)); } [Fact] @@ -316,8 +472,8 @@ public void TestMinAndMax() var tdigest = db.TDIGEST(); tdigest.Create(key, 100); - Assert.Equal(double.MaxValue, tdigest.Min(key)); - Assert.Equal(-double.MaxValue, tdigest.Max(key)); + Assert.Equal(double.NaN, tdigest.Min(key)); + Assert.Equal(double.NaN, tdigest.Max(key)); tdigest.Add(key, DefinedValueWeight(2, 1)); tdigest.Add(key, DefinedValueWeight(5, 1)); @@ -333,8 +489,8 @@ public async Task TestMinAndMaxAsync() var tdigest = db.TDIGEST(); await tdigest.CreateAsync(key, 100); - Assert.Equal(double.MaxValue, await tdigest.MinAsync(key)); - Assert.Equal(-double.MaxValue, await tdigest.MaxAsync(key)); + Assert.Equal(double.NaN, await tdigest.MinAsync(key)); + Assert.Equal(double.NaN, await tdigest.MaxAsync(key)); await tdigest.AddAsync(key, DefinedValueWeight(2, 1)); await tdigest.AddAsync(key, DefinedValueWeight(5, 1)); @@ -353,7 +509,7 @@ public void TestTrimmedMean() for (int i = 0; i < 20; i++) { - tdigest.Add(key, new Tuple<double, double>(i, 1)); + tdigest.Add(key, new Tuple<double, long>(i, 1)); } Assert.Equal(9.5, tdigest.TrimmedMean(key, 0.1, 0.9)); @@ -373,7 +529,7 @@ public async Task TestTrimmedMeanAsync() for (int i = 0; i < 20; i++) { - await tdigest.AddAsync(key, new Tuple<double, double>(i, 1)); + await tdigest.AddAsync(key, new Tuple<double, long>(i, 1)); } Assert.Equal(9.5, await tdigest.TrimmedMeanAsync(key, 0.1, 0.9)); @@ -418,15 +574,25 @@ public void TestModulePrefixs1() } - static Tuple<double, double> RandomValueWeight() + static Tuple<double, long> RandomValueWeight() { Random random = new Random(); - return new Tuple<double, double>(random.NextDouble() * 10000, random.NextDouble() * 500 + 1); + return new Tuple<double, long>(random.NextDouble() * 10000, random.NextInt64() + 1); + } + + static Tuple<double, long>[] RandomValueWeightArray(int count) + { + var arr = new Tuple<double, long>[count]; + for (int i = 0; i < count; i++) + { + arr[i] = RandomValueWeight(); + } + return arr; } - static Tuple<double, double> DefinedValueWeight(double value, double weight) + static Tuple<double, long> DefinedValueWeight(double value, long weight) { - return new Tuple<double, double>(value, weight); + return new Tuple<double, long>(value, weight); } } From f78c168f3397faf56716841c92f6e89e33e4c7e7 Mon Sep 17 00:00:00 2001 From: shacharPash <s.pashchur@gmail.com> Date: Sun, 2 Oct 2022 12:46:05 +0300 Subject: [PATCH 2/2] Fix Rank Commands Tests --- src/NRedisStack/Tdigest/TdigestCommands.cs | 16 +- .../NRedisStack.Tests/Tdigest/TdigestTests.cs | 155 +++++++++--------- 2 files changed, 89 insertions(+), 82 deletions(-) diff --git a/src/NRedisStack/Tdigest/TdigestCommands.cs b/src/NRedisStack/Tdigest/TdigestCommands.cs index bc975889..3e373973 100644 --- a/src/NRedisStack/Tdigest/TdigestCommands.cs +++ b/src/NRedisStack/Tdigest/TdigestCommands.cs @@ -389,13 +389,13 @@ public async Task<long[]> RevRankAsync(RedisKey key, params long[] values) /// <param name="ranks">input rank, for which the value will be determined.</param> /// <returns>an array of results populated with value_1, value_2, ..., value_N.</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.byrank"/></remarks> - public long[] ByRank(RedisKey key, params long[] ranks) + public double[] ByRank(RedisKey key, params long[] ranks) { if (ranks.Length < 1) throw new ArgumentOutOfRangeException(nameof(ranks)); var args = new List<object>(ranks.Length + 1) { key }; foreach (var v in ranks) args.Add(v); - return _db.Execute(TDIGEST.BYRANK, args).ToLongArray(); + return _db.Execute(TDIGEST.BYRANK, args).ToDoubleArray(); } /// <summary> @@ -405,13 +405,13 @@ public long[] ByRank(RedisKey key, params long[] ranks) /// <param name="ranks">input rank, for which the value will be determined.</param> /// <returns>an array of results populated with value_1, value_2, ..., value_N.</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.byrank"/></remarks> - public async Task<long[]> ByRankAsync(RedisKey key, params long[] ranks) + public async Task<double[]> ByRankAsync(RedisKey key, params long[] ranks) { if (ranks.Length < 1) throw new ArgumentOutOfRangeException(nameof(ranks)); var args = new List<object>(ranks.Length + 1) { key }; foreach (var v in ranks) args.Add(v); - return (await _db.ExecuteAsync(TDIGEST.BYRANK, args)).ToLongArray(); + return (await _db.ExecuteAsync(TDIGEST.BYRANK, args)).ToDoubleArray(); } /// <summary> @@ -421,13 +421,13 @@ public async Task<long[]> ByRankAsync(RedisKey key, params long[] ranks) /// <param name="ranks">input reverse rank, for which the value will be determined.</param> /// <returns>an array of results populated with value_1, value_2, ..., value_N.</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.byrevrank"/></remarks> - public long[] ByRevRank(RedisKey key, params long[] ranks) + public double[] ByRevRank(RedisKey key, params long[] ranks) { if (ranks.Length < 1) throw new ArgumentOutOfRangeException(nameof(ranks)); var args = new List<object>(ranks.Length + 1) { key }; foreach (var v in ranks) args.Add(v); - return _db.Execute(TDIGEST.BYREVRANK, args).ToLongArray(); + return _db.Execute(TDIGEST.BYREVRANK, args).ToDoubleArray(); } /// <summary> @@ -437,13 +437,13 @@ public long[] ByRevRank(RedisKey key, params long[] ranks) /// <param name="ranks">input reverse rank, for which the value will be determined.</param> /// <returns>an array of results populated with value_1, value_2, ..., value_N.</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.byrevrank"/></remarks> - public async Task<long[]> ByRevRankAsync(RedisKey key, params long[] ranks) + public async Task<double[]> ByRevRankAsync(RedisKey key, params long[] ranks) { if (ranks.Length < 1) throw new ArgumentOutOfRangeException(nameof(ranks)); var args = new List<object>(ranks.Length + 1) { key }; foreach (var v in ranks) args.Add(v); - return ( await _db.ExecuteAsync(TDIGEST.BYREVRANK, args)).ToLongArray(); + return ( await _db.ExecuteAsync(TDIGEST.BYREVRANK, args)).ToDoubleArray(); } /// <summary> diff --git a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs index 89ade283..9201c204 100644 --- a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs +++ b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs @@ -171,77 +171,84 @@ public async Task TestRevRankAsync() } // TODO: fix those tests: - // [Fact] - // public void TestByRanks() - // { - // IDatabase db = redisFixture.Redis.GetDatabase(); - // db.Execute("FLUSHALL"); - // var tdigest = db.TDIGEST(); - - // Assert.True(tdigest.Create("t-digest", 500)); - // var tuples = new Tuple<double, long>[20]; - // for (int i = 0; i < 20; i++) - // { - // tuples[i] = new(i, 1); - // } - // Assert.True(tdigest.Add("t-digest", tuples)); - // Assert.Equal(1, tdigest.ByRank("t-digest", 0)[0]); - // Assert.Equal(10, tdigest.ByRank("t-digest", 9)[0]); - // } - - // [Fact] - // public async Task TestByRanksAsync() - // { - // IDatabase db = redisFixture.Redis.GetDatabase(); - // db.Execute("FLUSHALL"); - // var tdigest = db.TDIGEST(); - - // Assert.True(tdigest.Create("t-digest", 500)); - // var tuples = new Tuple<double, long>[20]; - // for (int i = 0; i < 20; i++) - // { - // tuples[i] = new(i, 1); - // } - // Assert.True(tdigest.Add("t-digest", tuples)); - // Assert.Equal(1, (await tdigest.ByRankAsync("t-digest", 0))[0]); - // Assert.Equal(10, (await tdigest.ByRankAsync("t-digest", 9))[0]); - // } - - // [Fact] - // public void TestByRevRanks() - // { - // IDatabase db = redisFixture.Redis.GetDatabase(); - // db.Execute("FLUSHALL"); - // var tdigest = db.TDIGEST(); - - // Assert.True(tdigest.Create("t-digest", 500)); - // var tuples = new Tuple<double, long>[20]; - // for (int i = 0; i < 20; i++) - // { - // tuples[i] = new(i, 1); - // } - // Assert.True(tdigest.Add("t-digest", tuples)); - // Assert.Equal(10, tdigest.ByRevRank("t-digest", 0)[0]); - // Assert.Equal(2, tdigest.ByRevRank("t-digest", 9)[0]); - // } - - // [Fact] - // public async Task TestByRevRanksAsync() - // { - // IDatabase db = redisFixture.Redis.GetDatabase(); - // db.Execute("FLUSHALL"); - // var tdigest = db.TDIGEST(); - - // Assert.True(tdigest.Create("t-digest", 500)); - // var tuples = new Tuple<double, long>[20]; - // for (int i = 0; i < 20; i++) - // { - // tuples[i] = new(i, 1); - // } - // Assert.True(tdigest.Add("t-digest", tuples)); - // Assert.Equal(10, (await tdigest.ByRevRankAsync("t-digest", 0))[0]); - // Assert.Equal(2, (await tdigest.ByRevRankAsync("t-digest", 9))[0]); - // } + [Fact] + public void TestByRank() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); + + Assert.True(tdigest.Create("t-digest", 500)); + var tuples = new Tuple<double, long>[10]; + for (int i = 1; i <= 10; i++) + { + tuples[i - 1] = new(i, 1); + } + Assert.True(tdigest.Add("t-digest", tuples)); + Assert.Equal(1, tdigest.ByRank("t-digest", 0)[0]); + Assert.Equal(10, tdigest.ByRank("t-digest", 9)[0]); + Assert.True(double.IsInfinity(tdigest.ByRank("t-digest", 100)[0])); + //Assert.Throws<RedisServerException>(() => tdigest.ByRank("t-digest", -1)[0]); + } + + [Fact] + public async Task TestByRankAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); + + Assert.True(tdigest.Create("t-digest", 500)); + var tuples = new Tuple<double, long>[10]; + for (int i = 1; i <= 10; i++) + { + tuples[i - 1] = new(i, 1); + } + Assert.True(tdigest.Add("t-digest", tuples)); + Assert.Equal(1, (await tdigest.ByRankAsync("t-digest", 0))[0]); + Assert.Equal(10, (await tdigest.ByRankAsync("t-digest", 9))[0]); + Assert.True(double.IsInfinity((await tdigest.ByRankAsync("t-digest", 100))[0])); + } + + [Fact] + public void TestByRevRank() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); + + Assert.True(tdigest.Create("t-digest", 500)); + var tuples = new Tuple<double, long>[10]; + for (int i = 1; i <= 10; i++) + { + tuples[i - 1] = new(i, 1); + } + Assert.True(tdigest.Add("t-digest", tuples)); + Assert.Equal(10, tdigest.ByRevRank("t-digest", 0)[0]); + Assert.Equal(2, tdigest.ByRevRank("t-digest", 9)[0]); + Assert.True(double.IsInfinity(-tdigest.ByRevRank("t-digest", 100)[0])); + //Assert.Throws<RedisServerException>(() => tdigest.ByRank("t-digest", -1)[0]); + } + + [Fact] + public async Task TestByRevRankAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var tdigest = db.TDIGEST(); + + Assert.True(tdigest.Create("t-digest", 500)); + var tuples = new Tuple<double, long>[10]; + for (int i = 1; i <= 10; i++) + { + tuples[i - 1] = new(i, 1); + } + Assert.True(tdigest.Add("t-digest", tuples)); + Assert.Equal(10, (await tdigest.ByRevRankAsync("t-digest", 0))[0]); + Assert.Equal(2, (await tdigest.ByRevRankAsync("t-digest", 9))[0]); + Assert.True(double.IsInfinity(-(await tdigest.ByRevRankAsync("t-digest", 100))[0])); + } + [Fact] public void TestReset() @@ -404,14 +411,14 @@ public void TestCDF() var tdigest = db.TDIGEST(); tdigest.Create("tdcdf", 100); - foreach(var item in tdigest.CDF("tdcdf", 50)) + foreach (var item in tdigest.CDF("tdcdf", 50)) { Assert.Equal(double.NaN, item); } tdigest.Add("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); tdigest.Add("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); - Assert.Equal(new double[]{0.6}, tdigest.CDF("tdcdf", 50)); + Assert.Equal(new double[] { 0.6 }, tdigest.CDF("tdcdf", 50)); } [Fact] @@ -429,7 +436,7 @@ public async Task TestCDFAsync() await tdigest.AddAsync("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); await tdigest.AddAsync("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); - Assert.Equal(new double[]{0.6}, await tdigest.CDFAsync("tdcdf", 50)); + Assert.Equal(new double[] { 0.6 }, await tdigest.CDFAsync("tdcdf", 50)); } [Fact]