diff --git a/src/NRedisStack/ResponseParser.cs b/src/NRedisStack/ResponseParser.cs index 5c9be0e6..6f161b0e 100644 --- a/src/NRedisStack/ResponseParser.cs +++ b/src/NRedisStack/ResponseParser.cs @@ -485,6 +485,33 @@ public static TimeSeriesInformation ToTimeSeriesInfo(this RedisResult result) lastTimestamp, retentionTime, chunkCount, chunkSize, labels, sourceKey, rules, duplicatePolicy, keySelfName, chunks); } + public static Dictionary<string, RedisValue> ToFtInfoAsDictionary(this RedisResult value) + { + var res = (RedisResult[])value; + var info = new Dictionary<string, RedisValue>(); + for (int i = 0; i < res.Length; i += 2) + { + var val = res[i + 1]; + if (val.Type != ResultType.MultiBulk) + { + info.Add((string)res[i], (RedisValue)val); + } + } + return info; + } + + public static Dictionary<string, string> ToConfigDictionary(this RedisResult value) + { + var res = (RedisResult[])value; + var dict = new Dictionary<string, string>(); + foreach (var pair in res) + { + var arr = (RedisResult[])pair; + dict.Add(arr[0].ToString(), arr[1].ToString()); + } + return dict; + } + public static IReadOnlyList<TimeSeriesChunck> ToTimeSeriesChunkArray(this RedisResult result) { RedisResult[] redisResults = (RedisResult[])result; diff --git a/src/NRedisStack/Search/AggregationRequest.cs b/src/NRedisStack/Search/AggregationRequest.cs new file mode 100644 index 00000000..ed781bcd --- /dev/null +++ b/src/NRedisStack/Search/AggregationRequest.cs @@ -0,0 +1,217 @@ +using System; +using System.Collections.Generic; +using System.Text; +// using NRediSearch.Aggregation.Reducers; +using StackExchange.Redis; + +namespace NRedisStack.Search.Aggregation +{ + public class AggregationRequest + { + private List<object> args = new List<object>(); // Check if Readonly + private bool isWithCursor = false; + + public AggregationRequest(string query) + { + args.Add(query); + } + + public AggregationRequest() : this("*") { } + + // public AggregationRequest load(params string[] fields) + // { + // return load(FieldName.Convert(fields)); + // } + + public AggregationRequest Load(params FieldName[] fields) + { + args.Add("LOAD"); + int loadCountIndex = args.Count(); + args.Add(null); + int loadCount = 0; + foreach (FieldName fn in fields) + { + loadCount += fn.AddCommandArguments(args); + } + args.Insert(loadCountIndex, loadCount.ToString()); + return this; + } + + public AggregationRequest LoadAll() + { + args.Add("LOAD"); + args.Add("*"); + return this; + } + + public AggregationRequest Limit(int offset, int count) + { + Limit limit = new Limit(offset, count); + limit.SerializeRedisArgs(args); + return this; + } + + public AggregationRequest Limit(int count) + { + return Limit(0, count); + } + + public AggregationRequest SortBy(params SortedField[] Fields) + { + args.Add("SORTBY"); + args.Add(Fields.Length * 2); + foreach (SortedField field in Fields) + { + args.Add(field.FieldName); + args.Add(field.Order); + } + + return this; + } + + public AggregationRequest SortBy(int max, params SortedField[] Fields) + { + SortBy(Fields); + if (max > 0) + { + args.Add("MAX"); + args.Add(max); + } + return this; + } + + public AggregationRequest SortByAsc(string field) + { + return SortBy(SortedField.Asc(field)); + } + + public AggregationRequest SortByDesc(string field) + { + return SortBy(SortedField.Desc(field)); + } + + public AggregationRequest Apply(string projection, string alias) + { + args.Add("APPLY"); + args.Add(projection); + args.Add("AS"); + args.Add(alias); + return this; + } + + public AggregationRequest GroupBy(IList<string> fields, IList<Reducer> reducers) + { + // string[] fieldsArr = new string[fields.size()]; + Group g = new Group(fields); + foreach (Reducer r in reducers) + { + g.Reduce(r); + } + GroupBy(g); + return this; + } + + public AggregationRequest GroupBy(string field, params Reducer[] reducers) + { + return GroupBy(new string[] { field }, reducers); + } + + public AggregationRequest GroupBy(Group group) + { + args.Add("GROUPBY"); + group.SerializeRedisArgs(args); + return this; + } + + public AggregationRequest Filter(string expression) + { + args.Add("FILTER"); + args.Add(expression); + return this; + } + + public AggregationRequest Cursor(int count, long maxIdle) + { + isWithCursor = true; + if (count > 0) + { + args.Add("WITHCURSOR"); + args.Add("COUNT"); + args.Add(count); + if (maxIdle < long.MaxValue && maxIdle >= 0) + { + args.Add("MAXIDLE"); + args.Add(maxIdle); + } + } + return this; + } + + public AggregationRequest Verbatim() + { + args.Add("VERBATIM"); + return this; + } + + public AggregationRequest Timeout(long timeout) + { + if (timeout >= 0) + { + args.Add("TIMEOUT"); + args.Add(timeout); + } + return this; + } + + public AggregationRequest Params(Dictionary<string, object> nameValue) + { + if (nameValue.Count >= 1) + { + args.Add("PARAMS"); + args.Add(nameValue.Count * 2); + foreach (var entry in nameValue) + { + args.Add(entry.Key); + args.Add(entry.Value); + } + } + + return this; + } + + public AggregationRequest Dialect(int dialect) + { + args.Add("DIALECT"); + args.Add(dialect); + return this; + } + + public List<object> GetArgs() + { + return args; + } + + public void SerializeRedisArgs(List<object> redisArgs) + { + foreach (var s in GetArgs()) + { + redisArgs.Add(s); + } + } + + // public string getArgsstring() + // { + // StringBuilder sj = new StringBuilder(" "); + // foreach (var s in GetArgs()) + // { + // sj.Add(s.ToString()); + // } + // return sj.tostring(); + // } + + public bool IsWithCursor() + { + return isWithCursor; + } + } +} diff --git a/src/NRedisStack/Search/AggregationResult.cs b/src/NRedisStack/Search/AggregationResult.cs new file mode 100644 index 00000000..03545895 --- /dev/null +++ b/src/NRedisStack/Search/AggregationResult.cs @@ -0,0 +1,48 @@ +using System.Collections.Generic; +using StackExchange.Redis; + +namespace NRedisStack.Search.Aggregation +{ + public sealed class AggregationResult + { + public long TotalResults { get; } + private readonly Dictionary<string, RedisValue>[] _results; + public long CursorId { get; } + + + internal AggregationResult(RedisResult result, long cursorId = -1) + { + var arr = (RedisResult[])result; + + // the first element is always the number of results + TotalResults = (long)arr[0]; + + _results = new Dictionary<string, RedisValue>[arr.Length - 1]; + for (int i = 1; i < arr.Length; i++) + { + var raw = (RedisResult[])arr[i]; + var cur = new Dictionary<string, RedisValue>(); + for (int j = 0; j < raw.Length;) + { + var key = (string)raw[j++]; + var val = raw[j++]; + if (val.Type != ResultType.MultiBulk) + cur.Add(key, (RedisValue)val); + } + _results[i - 1] = cur; + } + + CursorId = cursorId; + } + public IReadOnlyList<Dictionary<string, RedisValue>> GetResults() => _results; + + public Dictionary<string, RedisValue> this[int index] + => index >= _results.Length ? null : _results[index]; + + public Row GetRow(int index) + { + if (index >= _results.Length) return default; + return new Row(_results[index]); + } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/DataTypes/InfoResult.cs b/src/NRedisStack/Search/DataTypes/InfoResult.cs new file mode 100644 index 00000000..1040f440 --- /dev/null +++ b/src/NRedisStack/Search/DataTypes/InfoResult.cs @@ -0,0 +1,189 @@ +using System.Collections.Generic; +using StackExchange.Redis; + +namespace NRedisStack.Search.DataTypes +{ + public class InfoResult + { + private readonly Dictionary<string, RedisResult> _all = new Dictionary<string, RedisResult>(); + + public string IndexName => GetString("index_name"); + public Dictionary<string, RedisResult> IndexOption => GetRedisResultDictionary("index_options"); + public Dictionary<string, RedisResult[]> IndexDefinition => GetRedisResultsDictionary("index_definition"); + + // public Dictionary<string, RedisResult[]> Attributes => GetRedisResultsDictionary("attributes"); // TODO: check if this is correct + public Dictionary<string, RedisResult>[] Attributes => GetRedisResultDictionaryArray("attributes"); // TODO: check if this is correct + + + public long NumDocs => GetLong("num_docs"); + + public string MaxDocId => GetString("max_doc_id"); + + public long NumTerms => GetLong("num_terms"); + + public long NumRecords => GetLong("num_records"); + + public double InvertedSzMebibytes => GetDouble("inverted_sz_mb"); + + public double VectorIndexSzMebibytes => GetDouble("vector_index_sz_mb"); // TODO: check if double or long + + public double TotalInvertedIndexBlocks => GetDouble("total_inverted_index_blocks"); + + // public double InvertedCapOvh => GetDouble("inverted_cap_ovh"); + + public double OffsetVectorsSzMebibytes => GetDouble("offset_vectors_sz_mb"); + + public double DocTableSizeMebibytes => GetDouble("doc_table_size_mb"); + + public double SortableValueSizeMebibytes => GetDouble("sortable_value_size_mb"); + + public double KeyTableSizeMebibytes => GetDouble("key_table_size_mb"); + + // public double SkipIndexSizeMebibytes => GetDouble("skip_index_size_mb"); + + // public double ScoreIndexSizeMebibytes => GetDouble("score_index_size_mb"); + + public double RecordsPerDocAvg => GetDouble("records_per_doc_avg"); + + public double BytesPerRecordAvg => GetDouble("bytes_per_record_avg"); + + public double OffsetsPerTermAvg => GetDouble("offsets_per_term_avg"); + + public double OffsetBitsPerRecordAvg => GetDouble("offset_bits_per_record_avg"); + + public long HashIndexingFailures => GetLong("hash_indexing_failures"); + + public double TotalIndexingTime => GetDouble("total_indexing_time"); + + public long Indexing => GetLong("indexing"); + + public double PercentIndexed => GetDouble("percent_indexed"); + + public long NumberOfUses => GetLong("number_of_uses"); + + + public Dictionary<string, RedisResult> GcStats => GetRedisResultDictionary("gc_stats"); + + public Dictionary<string, RedisResult> CursorStats => GetRedisResultDictionary("cursor_stats"); + + public InfoResult(RedisResult result) + { + var results = (RedisResult[])result; + + for (var i = 0; i < results.Length; i += 2) + { + var key = (string)results[i]; + var value = results[i + 1]; + + _all.Add(key, value); + } + } + + private string GetString(string key) => _all.TryGetValue(key, out var value) ? (string)value : default; + + private long GetLong(string key) => _all.TryGetValue(key, out var value) ? (long)value : default; + + private double GetDouble(string key) + { + if (_all.TryGetValue(key, out var value)) + { + if ((string)value == "-nan") + { + return default; + } + else + { + return (double)value; + } + } + else + { + return default; + } + } + + private Dictionary<string, RedisResult> GetRedisResultDictionary(string key) + { + if (_all.TryGetValue(key, out var value)) + { + var values = (RedisResult[])value; + var result = new Dictionary<string, RedisResult>(); + + for (var ii = 0; ii < values.Length; ii += 2) + { + result.Add((string)values[ii], values[ii + 1]); + } + + return result; + } + else + { + return default; + } + } + + private Dictionary<string, RedisResult[]> GetRedisResultsDictionary(string key) + { + if (_all.TryGetValue(key, out var value)) + { + var result = new Dictionary<string, RedisResult[]>(); + + foreach (RedisResult[] fv in (RedisResult[])value) + { + result.Add((string)fv[0], fv); + } + + return result; + } + else + { + return default; + } + } + + private Dictionary<string, RedisResult>[] GetRedisResultDictionaryArray(string key) + { + if (_all.TryGetValue(key, out var value)) + { + var values = (RedisResult[])value; + var result = new Dictionary<string, RedisResult>[values.Length]; + for (int i = 0; i < values.Length; i++) + { + var fv = (RedisResult[])values[i]; + var dict = new Dictionary<string, RedisResult>(); + for (int j = 0; j < fv.Length; j += 2) + { + dict.Add((string)fv[j], fv[j + 1]); + } + result[i] = dict; + } + return result; + } + + else + { + return default; + } + } + // private Dictionary<string, RedisResult>[] GetRedisResultsDictionaryTry(string key) + // { + // if (_all.TryGetValue(key, out var value)) + // { + // var result = new List<Dictionary<string, RedisResult>>(); + + // int i = 0; + // foreach (RedisResult[] fv in (RedisResult[])value) + // { + // var res = GetRedisResultDictionary((string)fv[i++]); + // result.Add(res); + // } + + // return result.ToArray(); + // } + // else + // { + // return default; + // } + // } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/DataTypes/SearchInformation.cs b/src/NRedisStack/Search/DataTypes/SearchInformation.cs new file mode 100644 index 00000000..26c6cb63 --- /dev/null +++ b/src/NRedisStack/Search/DataTypes/SearchInformation.cs @@ -0,0 +1,34 @@ +// namespace NRedisStack.Search.DataTypes +// { +// /// <summary> +// /// This class represents the response for SEARCH.INFO command. +// /// This object has Read-only properties and cannot be generated outside a SEARCH.INFO response. +// /// </summary> +// public class SearchInformation +// { +// // TODO: work on it with someone from Search team +// // public string IndexName { get; private set; } +// // public string[] IndexOptions { get; private set; } +// // public long IndexDefinition { get; private set; } +// // public long UnmergedNodes { get; private set; } +// // public double MergedWeight { get; private set; } +// // public double UnmergedWeight { get; private set; } + +// // public long TotalCompressions { get; private set; } + + +// // internal SearchInformation(long compression, long capacity, long mergedNodes, +// // long unmergedNodes, double mergedWeight, +// // double unmergedWeight, long totalCompressions) + +// // { +// // Compression = compression; +// // Capacity = capacity; +// // MergedNodes = mergedNodes; +// // UnmergedNodes = unmergedNodes; +// // MergedWeight = mergedWeight; +// // UnmergedWeight = unmergedWeight; +// // TotalCompressions = totalCompressions; +// // } +// } +// } \ No newline at end of file diff --git a/src/NRedisStack/Search/Document.cs b/src/NRedisStack/Search/Document.cs new file mode 100644 index 00000000..96880700 --- /dev/null +++ b/src/NRedisStack/Search/Document.cs @@ -0,0 +1,93 @@ +using System.Collections.Generic; +using StackExchange.Redis; + +namespace NRedisStack.Search +{ + /// <summary> + /// Document represents a single indexed document or entity in the engine + /// </summary> + public class Document + { + public string Id { get; } + public double Score { get; set;} + public byte[] Payload { get; } + public string[] ScoreExplained { get; private set; } // TODO: check if this is needed (Jedis does not have it) + internal readonly Dictionary<string, RedisValue> _properties; + public Document(string id, double score, byte[] payload) : this(id, null, score, payload) { } + public Document(string id) : this(id, null, 1.0, null) { } + + public Document(string id, Dictionary<string, RedisValue> fields, double score = 1.0) : this(id, fields, score, null) { } + + public Document(string id, Dictionary<string, RedisValue> fields, double score, byte[] payload) + { + Id = id; + _properties = fields ?? new Dictionary<string, RedisValue>(); + Score = score; + Payload = payload; + } + + public IEnumerable<KeyValuePair<string, RedisValue>> GetProperties() => _properties; + + public static Document Load(string id, double score, byte[] payload, RedisValue[] fields) + { + Document ret = new Document(id, score, payload); + if (fields != null) + { + for (int i = 0; i < fields.Length; i += 2) + { + string fieldName = (string)fields[i]; + if (fieldName == "$") { + ret["json"] = fields[i + 1]; + } + else { + ret[fieldName] = fields[i + 1]; + } + } + } + return ret; + } + + public static Document Load(string id, double score, byte[] payload, RedisValue[] fields, string[] scoreExplained) + { + Document ret = Document.Load(id, score, payload, fields); + if (scoreExplained != null) + { + ret.ScoreExplained = scoreExplained; + } + return ret; + } + + public RedisValue this[string key] + { + get { return _properties.TryGetValue(key, out var val) ? val : default(RedisValue); } + internal set { _properties[key] = value; } + } + + public bool HasProperty(string key) => _properties.ContainsKey(key); + + internal static Document Parse(string docId, RedisResult result) + { + if (result == null || result.IsNull) return null; + var arr = (RedisResult[])result; + var doc = new Document(docId); + + for(int i = 0; i < arr.Length; ) + { + doc[(string)arr[i++]] = (RedisValue)arr[i++]; + } + return doc; + } + + public Document Set(string field, RedisValue value) + { + this[field] = value; + return this; + } + + public Document SetScore(double score) + { + Score = score; + return this; + } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/Extensions/IndexDataTypeExtensions.cs b/src/NRedisStack/Search/Extensions/IndexDataTypeExtensions.cs new file mode 100644 index 00000000..13d59c3c --- /dev/null +++ b/src/NRedisStack/Search/Extensions/IndexDataTypeExtensions.cs @@ -0,0 +1,22 @@ +using System; +using NRedisStack.Literals.Enums; + +namespace NRedisStack.Extensions +{ + internal static class IndexIndexDataType + { + public static string AsArg(this IndexDataType dataType) => dataType switch + { + IndexDataType.Hash => "HASH", + IndexDataType.Json => "JSON", + _ => throw new ArgumentOutOfRangeException(nameof(dataType), "Invalid Index DataType"), + }; + + public static IndexDataType AsDataType(string dataType) => dataType switch + { + "HASH" => IndexDataType.Hash, + "JSON" => IndexDataType.Json, + _ => throw new ArgumentOutOfRangeException(nameof(dataType), $"Invalid Index DataType '{dataType}'"), + }; + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/FT.CREATE/FTCreateParams.cs b/src/NRedisStack/Search/FT.CREATE/FTCreateParams.cs new file mode 100644 index 00000000..f5573675 --- /dev/null +++ b/src/NRedisStack/Search/FT.CREATE/FTCreateParams.cs @@ -0,0 +1,315 @@ +using NRedisStack.Extensions; +using NRedisStack.Literals; +using NRedisStack.Literals.Enums; +namespace NRedisStack.Search.FT.CREATE +{ + public class FTCreateParams + { + private IndexDataType dataType; + private List<string> prefixes; + private string filter; + private string language; + private string languageField; + private double score; + private string scoreField; + private byte[] payloadField; + private bool maxTextFields; + private bool noOffsets; + private long temporary; + private bool noHL; + private bool noFields; + private bool noFreqs; + private List<string> stopwords; + private bool skipInitialScan; + + public FTCreateParams() + { + } + + public static FTCreateParams CreateParams() + { + return new FTCreateParams(); + } + + /// <summary> + /// Currently supports HASH (default) and JSON. To index JSON, you must have the RedisJSON module + /// installed. + /// </summary> + public FTCreateParams On(IndexDataType dataType) + { + this.dataType = dataType; + return this; + } + + /// <summary> + /// Tells the index which keys it should index. You can add several prefixes to index. + /// </summary> + public FTCreateParams Prefix(params string[] prefixes) + { + if (this.prefixes == null) + { + this.prefixes = new List<string>(prefixes.Length); + } + this.prefixes.AddRange(prefixes); + return this; + } + + /// <summary> + /// This method can be chained to add multiple prefixes. + /// @see FTCreateParams#prefix(java.lang.params string[]) + /// </summary> + public FTCreateParams AddPrefix(string prefix) + { + if (this.prefixes == null) + { + this.prefixes = new List<string>(); + } + this.prefixes.Add(prefix); + return this; + } + + /// <summary> + /// A filter expression with the full RediSearch aggregation expression language. + /// </summary> + public FTCreateParams Filter(string filter) + { + this.filter = filter; + return this; + } + + /// <summary> + /// default language for documents in the index. + /// </summary> + public FTCreateParams Language(string defaultLanguage) + { + this.language = defaultLanguage; + return this; + } + + /// <summary> + /// Document attribute set as the document language. + /// </summary> + public FTCreateParams LanguageField(string languageAttribute) + { + this.languageField = languageAttribute; + return this; + } + + /// <summary> + /// Default score for documents in the index. + /// </summary> + public FTCreateParams Score(double defaultScore) + { + this.score = defaultScore; + return this; + } + + /// <summary> + /// Document attribute that you use as the document rank based on the user ranking. + /// Ranking must be between 0.0 and 1.0. + /// </summary> + public FTCreateParams ScoreField(string scoreField) + { + this.scoreField = scoreField; + return this; + } + + /// <summary> + /// Document attribute that you use as a binary safe payload string to the document that can be + /// evaluated at query time by a custom scoring function or retrieved to the client. + /// </summary> + public FTCreateParams PayloadField(byte[] payloadAttribute) + { + Array.Copy(this.payloadField, payloadAttribute, payloadAttribute.Length); + return this; + } + + /// <summary> + /// Forces RediSearch to encode indexes as if there were more than 32 text attributes. + /// </summary> + public FTCreateParams MaxTextFields() + { + this.maxTextFields = true; + return this; + } + + /// <summary> + /// Does not store term offsets for documents. It saves memory, but does not allow exact searches + /// or highlighting. + /// </summary> + public FTCreateParams NoOffsets() + { + this.noOffsets = true; + return this; + } + + /// <summary> + /// Creates a lightweight temporary index that expires after a specified period of inactivity. + /// </summary> + public FTCreateParams Temporary(long seconds) + { + this.temporary = seconds; + return this; + } + + /// <summary> + /// Conserves storage space and memory by disabling highlighting support. + /// </summary> + public FTCreateParams NoHL() + { + this.noHL = true; + return this; + } + + /// <summary> + /// @see FTCreateParams#noHL() + /// </summary> + public FTCreateParams NoHighlights() + { + return NoHL(); + } + + /// <summary> + /// Does not store attribute bits for each term. It saves memory, but it does not allow filtering + /// by specific attributes. + /// </summary> + public FTCreateParams NoFields() + { + this.noFields = true; + return this; + } + + /// <summary> + /// Avoids saving the term frequencies in the index. It saves memory, but does not allow sorting + /// based on the frequencies of a given term within the document. + /// </summary> + public FTCreateParams NoFreqs() + { + this.noFreqs = true; + return this; + } + + /// <summary> + /// Sets the index with a custom stopword list, to be ignored during indexing and search time. + /// </summary> + public FTCreateParams topwords(params string[] stopwords) + { + this.stopwords = stopwords.ToList(); + return this; + } + + /// <summary> + /// The index does not have stopwords, not even the default ones. + /// </summary> + public FTCreateParams NoStopwords() + { + this.stopwords = new List<string> { }; + return this; + } + + /// <summary> + /// Does not scan and index. + /// </summary> + public FTCreateParams SkipInitialScan() + { + this.skipInitialScan = true; + return this; + } + + public void AddParams(List<object> args) + { + + if (dataType != default(IndexDataType)) + { + args.Add("ON"); + args.Add(dataType.AsArg()); + } + + if (prefixes != null) + { + args.Add(SearchArgs.PREFIX); + args.Add(prefixes.Count); + foreach (var prefix in prefixes) + if (prefix != null) + args.Add(prefix); + } + + if (filter != null) + { + args.Add(SearchArgs.FILTER); + args.Add(filter); + } + + if (language != null) + { + args.Add(SearchArgs.LANGUAGE); + args.Add(language); + } + if (languageField != null) + { + args.Add(SearchArgs.LANGUAGE_FIELD); + args.Add(languageField); + } + + if (score != default(double)) + { + args.Add(SearchArgs.SCORE); + args.Add(score); + } + if (scoreField != null) + { + args.Add(SearchArgs.SCORE_FIELD); + args.Add(scoreField); + } + + if (payloadField != null) + { + args.Add(SearchArgs.PAYLOAD_FIELD); + args.Add(payloadField); + } + + if (maxTextFields) + { + args.Add(SearchArgs.MAXTEXTFIELDS); + } + //[TEMPORARY seconds] seposed to be here + if (noOffsets) + { + args.Add(SearchArgs.NOOFFSETS); + } + + if (temporary != default(long)) + { + args.Add(SearchArgs.TEMPORARY); + args.Add(temporary); + } + + if (noHL) + { + args.Add(SearchArgs.NOHL); + } + + if (noFields) + { + args.Add(SearchArgs.NOFIELDS); + } + + if (noFreqs) + { + args.Add(SearchArgs.NOFREQS); + } + + if (stopwords != null) + { + args.Add(SearchArgs.STOPWORDS); + args.Add(stopwords.Count); + stopwords.ForEach(w => args.Add(w)); + } + + if (skipInitialScan) + { + args.Add(SearchArgs.SKIPINITIALSCAN); + } + } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/FieldName.cs b/src/NRedisStack/Search/FieldName.cs new file mode 100644 index 00000000..e277a30f --- /dev/null +++ b/src/NRedisStack/Search/FieldName.cs @@ -0,0 +1,53 @@ +using System.Collections.Generic; +using System.Text; + +namespace NRedisStack.Search +{ + public class FieldName + { + private readonly string fieldName; + private string alias; + + public FieldName(string name) : this(name, null) { } + + public FieldName(string name, string attribute) + { + this.fieldName = name; + this.alias = attribute; + } + + public int AddCommandArguments(List<object> args) + { + args.Add(fieldName); + if (alias == null) + { + return 1; + } + + args.Add("AS"); + args.Add(alias); + return 3; + } + + public static FieldName Of(string name) + { + return new FieldName(name); + } + + public FieldName As(string attribute) + { + this.alias = attribute; + return this; + } + + public static FieldName[] convert(params string[] names) + { + if (names == null) return null; + FieldName[] fields = new FieldName[names.Length]; + for (int i = 0; i < names.Length; i++) + fields[i] = FieldName.Of(names[i]); + + return fields; + } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/Group.cs b/src/NRedisStack/Search/Group.cs new file mode 100644 index 00000000..36bca902 --- /dev/null +++ b/src/NRedisStack/Search/Group.cs @@ -0,0 +1,54 @@ +using System.Collections.Generic; + +namespace NRedisStack.Search.Aggregation +{ + public class Group + { + + private readonly IList<Reducer> _reducers = new List<Reducer>(); + private readonly IList<string> _fields; + private Limit _limit = new Limit(0, 0); + + public Group(params string[] fields) => _fields = fields; + public Group(IList<string> fields) => _fields = fields; + + internal Group Limit(Limit limit) + { + _limit = limit; + return this; + } + + internal Group Reduce(Reducer r) + { + _reducers.Add(r); + return this; + } + + internal void SerializeRedisArgs(List<object> args) + { + args.Add(_fields.Count); + foreach (var field in _fields) + args.Add(field); + foreach (var r in _reducers) + { + args.Add("REDUCE"); + args.Add(r.Name); + r.SerializeRedisArgs(args); + var alias = r.Alias; + if (!string.IsNullOrEmpty(alias)) + { + args.Add("AS"); + args.Add(alias); + } + } + _limit.SerializeRedisArgs(args); + } + + public List<object> getArgs() + { + List<object> args = new List<object>(); + SerializeRedisArgs(args); + return args; + } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/Limit.cs b/src/NRedisStack/Search/Limit.cs new file mode 100644 index 00000000..84297c0e --- /dev/null +++ b/src/NRedisStack/Search/Limit.cs @@ -0,0 +1,33 @@ +using System.Collections.Generic; + +namespace NRedisStack.Search.Aggregation +{ + internal readonly struct Limit + { + public static Limit NO_LIMIT = new Limit(0, 0); + private readonly int _offset, _count; + + public Limit(int offset, int count) + { + _offset = offset; + _count = count; + } + +// public void addArgs(List<String> args) { +// if (count == 0) { +// return; +// } +// args.add("LIMIT"); +// args.add(Integer.toString(offset)); +// args.add(Integer.toString(count)); +// } + + internal void SerializeRedisArgs(List<object> args) + { + if (_count == 0) return; + args.Add("LIMIT"); + args.Add(_offset); + args.Add(_count); + } + } +} diff --git a/src/NRedisStack/Search/Literals/AttributeOptions.cs b/src/NRedisStack/Search/Literals/AttributeOptions.cs new file mode 100644 index 00000000..60a161f9 --- /dev/null +++ b/src/NRedisStack/Search/Literals/AttributeOptions.cs @@ -0,0 +1,12 @@ +namespace NRedisStack.Literals +{ + internal class AttributeOptions + { + public const string SORTABLE = "SORTABLE"; + public const string UNF = "UNF"; + public const string NOSTEM = "NOSTEM"; + public const string NOINDEX = "NOINDEX"; + + //TODO: add all options + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/Literals/CommandArgs.cs b/src/NRedisStack/Search/Literals/CommandArgs.cs index 48e36a47..eddb7134 100644 --- a/src/NRedisStack/Search/Literals/CommandArgs.cs +++ b/src/NRedisStack/Search/Literals/CommandArgs.cs @@ -2,6 +2,22 @@ namespace NRedisStack.Literals { internal class SearchArgs { - + public const string ON_HASH = "ON HASH"; + public const string JSON = "JSON"; + public const string PREFIX = "PREFIX"; + public const string FILTER = "FILTER"; + public const string LANGUAGE = "LANGUAGE"; + public const string LANGUAGE_FIELD = "LANGUAGE_FIELD"; + public const string SCORE = "SCORE"; + public const string SCORE_FIELD = "SCORE_FIELD"; + public const string PAYLOAD_FIELD = "PAYLOAD_FIELD"; + public const string MAXTEXTFIELDS = "MAXTEXTFIELDS"; + public const string TEMPORARY = "TEMPORARY"; + public const string NOOFFSETS = "NOOFFSETS"; + public const string NOHL = "NOHL"; + public const string NOFIELDS = "NOFIELDS"; + public const string NOFREQS = "NOFREQS"; + public const string STOPWORDS = "STOPWORDS"; + public const string SKIPINITIALSCAN = "SKIPINITIALSCAN"; } } \ No newline at end of file diff --git a/src/NRedisStack/Search/Literals/Commands.cs b/src/NRedisStack/Search/Literals/Commands.cs index 505ad479..4c57bb4f 100644 --- a/src/NRedisStack/Search/Literals/Commands.cs +++ b/src/NRedisStack/Search/Literals/Commands.cs @@ -8,10 +8,12 @@ internal class FT public const string ALIASDEL = "FT.ALIASDEL"; public const string ALIASUPDATE = "FT.ALIASUPDATE"; public const string ALTER = "FT.ALTER"; + public const string CONFIG = "FT.CONFIG"; public const string CONFIG_GET = "FT.CONFIG GET"; public const string CONFIG_HELP = "FT.CONFIG HELP"; public const string CONFIG_SET = "FT.CONFIG SET"; public const string CREATE = "FT.CREATE"; + public const string CURSOR = "FT.CURSOR"; public const string CURSOR_DEL = "FT.CURSOR DEL"; public const string CURSOR_READ = "FT.CURSOR READ"; public const string DICTADD = "FT.DICTADD"; diff --git a/src/NRedisStack/Search/Literals/Enums/IndexDataType.cs b/src/NRedisStack/Search/Literals/Enums/IndexDataType.cs new file mode 100644 index 00000000..e4b9ae34 --- /dev/null +++ b/src/NRedisStack/Search/Literals/Enums/IndexDataType.cs @@ -0,0 +1,8 @@ +namespace NRedisStack.Literals.Enums +{ + public enum IndexDataType + { + Json, + Hash, + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/Query.cs b/src/NRedisStack/Search/Query.cs new file mode 100644 index 00000000..a145eb39 --- /dev/null +++ b/src/NRedisStack/Search/Query.cs @@ -0,0 +1,680 @@ +using System.Collections.Generic; +using System.Globalization; +using NRedisStack.Search; +using StackExchange.Redis; + +namespace NRedisStack.Search +{ + /// <summary> + /// Query represents query parameters and filters to load results from the engine + /// </summary> + public sealed class Query + { + /// <summary> + /// Filter represents a filtering rules in a query + /// </summary> + public abstract class Filter + { + public string Property { get; } + + internal abstract void SerializeRedisArgs(List<object> args); + + internal Filter(string property) + { + Property = property; + } + } + + /// <summary> + /// NumericFilter wraps a range filter on a numeric field. It can be inclusive or exclusive + /// </summary> + public class NumericFilter : Filter + { + private readonly double min, max; + private readonly bool exclusiveMin, exclusiveMax; + + public NumericFilter(string property, double min, bool exclusiveMin, double max, bool exclusiveMax) : base(property) + { + this.min = min; + this.max = max; + this.exclusiveMax = exclusiveMax; + this.exclusiveMin = exclusiveMin; + } + + public NumericFilter(string property, double min, double max) : this(property, min, false, max, false) { } + + internal override void SerializeRedisArgs(List<object> args) + { + static RedisValue FormatNum(double num, bool exclude) //TODO: understand this: + { + if (!exclude || double.IsInfinity(num)) + { + return (RedisValue)num; // can use directly + } + // need to add leading bracket + return "(" + num.ToString("G17", NumberFormatInfo.InvariantInfo); + } + args.Add("FILTER"); + args.Add(Property); + args.Add(FormatNum(min, exclusiveMin)); + args.Add(FormatNum(max, exclusiveMax)); + } + } + + /// <summary> + /// GeoFilter encapsulates a radius filter on a geographical indexed fields + /// </summary> + public class GeoFilter : Filter + { + public static readonly string KILOMETERS = "km"; + public static readonly string METERS = "m"; + public static readonly string FEET = "ft"; + public static readonly string MILES = "mi"; + private readonly double lon, lat, radius; + private readonly string unit; // TODO: think about implementing this as an enum + + public GeoFilter(string property, double lon, double lat, double radius, string unit) : base(property) + { + this.lon = lon; + this.lat = lat; + this.radius = radius; + this.unit = unit; + } + + internal override void SerializeRedisArgs(List<object> args) + { + args.Add("GEOFILTER"); + args.Add(Property); + args.Add(lon); + args.Add(lat); + args.Add(radius); + args.Add(unit); + } + } + + internal readonly struct Paging + { + public int Offset { get; } + public int Count { get; } + + public Paging(int offset, int count) + { + Offset = offset; + Count = count; + } + } + + public readonly struct HighlightTags + { + public HighlightTags(string open, string close) + { + Open = open; + Close = close; + } + public string Open { get; } + public string Close { get; } + } + + /// <summary> + /// The query's filter list. We only support AND operation on all those filters + /// </summary> + internal readonly List<Filter> _filters = new List<Filter>(); + + /// <summary> + /// The textual part of the query + /// </summary> + public string QueryString { get; } + + /// <summary> + /// The sorting parameters + /// </summary> + internal Paging _paging = new Paging(0, 10); + + /// <summary> + /// Set the query to verbatim mode, disabling stemming and query expansion + /// </summary> + public bool Verbatim { get; set; } + /// <summary> + /// Set the query not to return the contents of documents, and rather just return the ids + /// </summary> + public bool NoContent { get; set; } + /// <summary> + /// Set the query not to filter for stopwords. In general this should not be used + /// </summary> + public bool NoStopwords { get; set; } + /// <summary> + /// Set the query to return a factored score for each results. This is useful to merge results from multiple queries. + /// </summary> + public bool WithScores { get; set; } + /// <summary> + /// Set the query to return object payloads, if any were given + /// </summary> + public bool WithPayloads { get; set; } + + /// <summary> + /// Set the query language, for stemming purposes; see http://redisearch.io for documentation on languages and stemming + /// </summary> + public string Language { get; set; } + + internal string[] _fields = null; + internal string[] _keys = null; + internal string[] _returnFields = null; + internal FieldName[] _returnFieldsNames = null; + internal string[] _highlightFields = null; + internal string[] _summarizeFields = null; + internal HighlightTags? _highlightTags = null; + internal string _summarizeSeparator = null; + internal int _summarizeNumFragments = -1, _summarizeFragmentLen = -1; + + /// <summary> + /// Set the query payload to be evaluated by the scoring function + /// </summary> + public byte[] Payload { get; set; } + + // TODO: Check if I need to add here WITHSORTKEYS + + /// <summary> + /// Set the query parameter to sort by + /// </summary> + public string SortBy { get; set; } + + /// <summary> + /// Set the query parameter to sort by ASC by default + /// </summary> + public bool SortAscending { get; set; } = true; + + // highlight and summarize + internal bool _wantsHighlight = false, _wantsSummarize = false; + + /// <summary> + /// Set the query scoring. see https://oss.redislabs.com/redisearch/Scoring.html for documentation + /// </summary> + public string Scorer { get; set; } + public bool ExplainScore { get; set; } // TODO: Check if this is needed because Jedis doesn't have it + + private Dictionary<String, Object> _params = null; + private int _dialect = 0; + private int _slop = -1; + private long _timeout = -1; + private bool _inOrder = false; + private string _expander = null; + + public Query() : this("*") { } + + /// <summary> + /// Create a new index + /// </summary> + /// <param name="queryString">The query string to use for this query.</param> + public Query(string queryString) + { + QueryString = queryString; + } + + internal void SerializeRedisArgs(List<object> args) + { + args.Add(QueryString); + + if (Verbatim) + { + args.Add("VERBATIM"); + } + if (NoContent) + { + args.Add("NOCONTENT"); + } + if (NoStopwords) + { + args.Add("NOSTOPWORDS"); + } + if (WithScores) + { + args.Add("WITHSCORES"); + } + if (WithPayloads) + { + args.Add("WITHPAYLOADS"); + } + if (Language != null) + { + args.Add("LANGUAGE"); + args.Add(Language); + } + + if (Scorer != null) + { + args.Add("SCORER"); + args.Add(Scorer); + + if (ExplainScore) + { + args.Add("EXPLAINSCORE"); // TODO: Check Why Jedis doesn't have it + } + } + + if (_fields?.Length > 0) + { + args.Add("INFIELDS"); + args.Add(_fields.Length); + args.AddRange(_fields); + } + + if (SortBy != null) + { + args.Add("SORTBY"); + args.Add(SortBy); + args.Add((SortAscending ? "ASC" : "DESC")); + } + if (Payload != null) + { + args.Add("PAYLOAD"); + args.Add(Payload); + } + + if (_paging.Offset != 0 || _paging.Count != 10) + { + args.Add("LIMIT"); + args.Add(_paging.Offset); + args.Add(_paging.Count); + } + + if (_filters?.Count > 0) + { + foreach (var f in _filters) + { + f.SerializeRedisArgs(args); + } + } + + if (_wantsHighlight) + { + args.Add("HIGHLIGHT"); + if (_highlightFields != null) + { + args.Add("FIELDS"); + args.Add(_highlightFields.Length); + foreach (var s in _highlightFields) + { + args.Add(s); + } + } + if (_highlightTags != null) + { + args.Add("TAGS"); + var tags = _highlightTags.GetValueOrDefault(); + args.Add(tags.Open); + args.Add(tags.Close); + } + } + if (_wantsSummarize) + { + args.Add("SUMMARIZE"); + if (_summarizeFields != null) + { + args.Add("FIELDS"); + args.Add(_summarizeFields.Length); + foreach (var s in _summarizeFields) + { + args.Add(s); + } + } + if (_summarizeNumFragments != -1) + { + args.Add("FRAGS"); + args.Add(_summarizeNumFragments); + } + if (_summarizeFragmentLen != -1) + { + args.Add("LEN"); + args.Add(_summarizeFragmentLen); + } + if (_summarizeSeparator != null) + { + args.Add("SEPARATOR"); + args.Add(_summarizeSeparator); + } + } + + if (_keys != null && _keys.Length > 0) + { + args.Add("INKEYS"); + args.Add(_keys.Length); + + foreach (var key in _keys) + { + args.Add(key); + } + } + + if (_keys?.Length > 0) + { + args.Add("INKEYS"); + args.Add(_keys.Length); + args.AddRange(_keys); + } + if (_returnFields?.Length > 0) + { + args.Add("RETURN"); + args.Add(_returnFields.Length); + args.AddRange(_returnFields); + } + else if (_returnFieldsNames?.Length > 0) // TODO: understad this + { + args.Add("RETURN"); + int returnCountIndex = args.Count; + int returnCount = 0; + foreach (FieldName fn in _returnFieldsNames) { + returnCount += fn.AddCommandArguments(args); + } + + args.Insert(returnCountIndex, returnCount); + } + if (_params != null && _params.Count > 0) + { + args.Add("PARAMS"); + args.Add(_params.Count * 2); + foreach (var entry in _params) + { + args.Add(entry.Key); + args.Add(entry.Value); + } + } + + if (_dialect != 0) + { + args.Add("DIALECT"); + args.Add(_dialect); + } + + if (_slop >= 0) + { + args.Add("SLOP"); + args.Add(_slop); + } + + if (_timeout >= 0) + { + args.Add("TIMEOUT"); + args.Add(_timeout); + } + + if (_inOrder) + { + args.Add("INORDER"); + } + + if (_expander != null) + { + args.Add("EXPANDER"); + args.Add(_expander); + } + } + + // TODO: check if DelayedRawable is needed here (Jedis have it) + + /// <summary> + /// Limit the results to a certain offset and limit + /// </summary> + /// <param name="offset">the first result to show, zero based indexing</param> + /// <param name="count">how many results we want to show</param> + /// <returns>the query itself, for builder-style syntax</returns> + public Query Limit(int offset, int count) + { + _paging = new Paging(offset, count); + return this; + } + + /// <summary> + /// Add a filter to the query's filter list + /// </summary> + /// <param name="f">either a numeric or geo filter object</param> + /// <returns>the query itself</returns> + public Query AddFilter(Filter f) + { + _filters.Add(f); + return this; + } + + /// <summary> + /// Set the query payload to be evaluated by the scoring function + /// </summary> + /// <param name="payload">the payload</param> + /// <returns>the query itself</returns> + public Query SetPayload(byte[] payload) + { + Payload = payload; + return this; + } + + /// <summary> + /// Set the query to verbatim mode, disabling stemming and query expansion + /// </summary> + /// <returns>the query itself</returns> + public Query SetVerbatim(bool value = true) + { + Verbatim = value; + return this; + } + + /// <summary> + /// Set the query not to return the contents of documents, and rather just return the ids + /// </summary> + /// <returns>the query itself</returns> + public Query SetNoContent(bool value = true) + { + NoContent = value; + return this; + } + + /// <summary> + /// Set the query not to filter for stopwords. In general this should not be used + /// </summary> + /// <returns>the query itself</returns> + public Query SetNoStopwords(bool value = true) + { + NoStopwords = value; + return this; + } + + /// <summary> + /// Set the query to return a factored score for each results. This is useful to merge results from + /// multiple queries. + /// </summary> + /// <returns>the query itself</returns> + public Query SetWithScores(bool value = true) + { + WithScores = value; + return this; + } + + /// <summary> + /// Set the query to return object payloads, if any were given + /// </summary> + /// <returns>the query itself</returns> + public Query SetWithPayload() + { + WithPayloads = true; + return this; + } + + /// <summary> + /// Set the query language, for stemming purposes + /// </summary> + /// <param name="language">the language</param> + /// <returns>the query itself</returns> + public Query SetLanguage(string language) + { + Language = language; + return this; + } + + /// <summary> + /// Set the query language, for stemming purposes + /// </summary> + /// <param name="scorer"></param> + /// <returns></returns> + public Query SetScorer(string scorer) + { + Scorer = scorer; + return this; + } + /// <summary> + /// Limit the query to results that are limited to a specific set of fields + /// </summary> + /// <param name="fields">a list of TEXT fields in the schemas</param> + /// <returns>the query object itself</returns> + public Query LimitFields(params string[] fields) + { + _fields = fields; + return this; + } + + /// <summary> + /// Limit the query to results that are limited to a specific set of keys + /// </summary> + /// <param name="keys">a list of the TEXT fields in the schemas</param> + /// <returns>the query object itself</returns> + public Query LimitKeys(params string[] keys) + { + _keys = keys; + return this; + } + + /// <summary> + /// Result's projection - the fields to return by the query + /// </summary> + /// <param name="fields">fields a list of TEXT fields in the schemas</param> + /// <returns>the query object itself</returns> + public Query ReturnFields(params string[] fields) + { + _returnFields = fields; + _returnFieldsNames = null; + return this; + } + + /// <summary> + /// Result's projection - the fields to return by the query + /// </summary> + /// <param name="field">field a list of TEXT fields in the schemas</param> + /// <returns>the query object itself</returns> + public Query ReturnFields(params FieldName[] fields) + { + _returnFields = null; + _returnFieldsNames = fields; + return this; + } + + public Query HighlightFields(HighlightTags tags, params string[] fields) => HighlightFieldsImpl(tags, fields); + public Query HighlightFields(params string[] fields) => HighlightFieldsImpl(null, fields); + private Query HighlightFieldsImpl(HighlightTags? tags, string[] fields) + { + if (fields == null || fields.Length > 0) + { + _highlightFields = fields; + } + _highlightTags = tags; + _wantsHighlight = true; + return this; + } + + public Query SummarizeFields(int contextLen, int fragmentCount, string separator, params string[] fields) + { + if (fields == null || fields.Length > 0) + { + _summarizeFields = fields; + } + _summarizeFragmentLen = contextLen; + _summarizeNumFragments = fragmentCount; + _summarizeSeparator = separator; + _wantsSummarize = true; + return this; + } + + public Query SummarizeFields(params string[] fields) => SummarizeFields(-1, -1, null, fields); + + /// <summary> + /// Set the query to be sorted by a sortable field defined in the schema + /// </summary> + /// <param name="field">the sorting field's name</param> + /// <param name="ascending">if set to true, the sorting order is ascending, else descending</param> + /// <returns>the query object itself</returns> + public Query SetSortBy(string field, bool ascending = true) + { + SortBy = field; + SortAscending = ascending; + return this; + } + + /// <summary> + /// Parameters can be referenced in the query string by a $ , followed by the parameter name, + /// e.g., $user , and each such reference in the search query to a parameter name is substituted + /// by the corresponding parameter value. + /// </summary> + /// <param name="name"></param> + /// <param name="value"> can be String, long or float</param> + /// <returns>The query object itself</returns> + public Query AddParam(String name, Object value) + { + if (_params == null) + { + _params = new Dictionary<string, object>(); + } + _params.Add(name, value); + return this; + } + + /// <summary> + /// Set the dialect version to execute the query accordingly + /// </summary> + /// <param name="dialect"></param> + /// <returns>the query object itself</returns> + public Query Dialect(int dialect) + { + _dialect = dialect; + return this; + } + + /// <summary> + /// Set the slop to execute the query accordingly + /// </summary> + /// <param name="slop"></param> + /// <returns>the query object itself</returns> + public Query Slop(int slop) + { + _slop = slop; + return this; + } + + /// <summary> + /// Set the timeout to execute the query accordingly + /// </summary> + /// <param name="timeout"></param> + /// <returns>the query object itself</returns> + public Query Timeout(long timeout) + { + _timeout = timeout; + return this; + } + + /// <summary> + /// Set the query terms appear in the same order in the document as in the query, regardless of the offsets between them + /// </summary> + /// <returns>the query object</returns> + public Query SetInOrder() + { + this._inOrder = true; + return this; + } + + /// <summary> + /// Set the query to use a custom query expander instead of the stemmer + /// </summary> + /// <param name="field the expander field's name"></param> + /// <returns>the query object itself</returns> + + public Query SetExpander(String field) + { + _expander = field; + return this; + } + } +} diff --git a/src/NRedisStack/Search/Reducer.cs b/src/NRedisStack/Search/Reducer.cs new file mode 100644 index 00000000..68294250 --- /dev/null +++ b/src/NRedisStack/Search/Reducer.cs @@ -0,0 +1,85 @@ +using System.Collections.Generic; + +namespace NRedisStack.Search.Aggregation +{ + public abstract class Reducer + { + + public override string ToString() => Name; + + // internal Reducer(string field) => _field = field; + + /// <summary> + /// The name of the reducer + /// </summary> + public abstract string Name { get; } + + public string? Alias { get; set; } + private readonly string _field; + + + protected Reducer(string field) + { + _field = field; + Alias = null; + } + + //protected Reducer() : this(field: null) { } + + protected virtual int GetOwnArgsCount() => _field == null ? 0 : 1; + protected virtual void AddOwnArgs(List<object> args) + { + if (_field != null) args.Add(_field); + } + + /** + * @return The name of the reducer + */ + // public abstract string getName(); + + // public string getAlias() + // { + // return Alias; + // } + + // public Reducer setAlias(string alias) + // { + // this.Alias = alias; + // return this; + // } + + // public final Reducer as(string alias) { + // return setAlias(alias); + // } + + public Reducer As(string alias) + { + Alias = alias; + return this; + } + public Reducer SetAliasAsField() + { + if (string.IsNullOrEmpty(_field)) throw new InvalidOperationException("Cannot set to field name since no field exists"); + return As(_field); + } + + internal void SerializeRedisArgs(List<object> args) + { + int count = GetOwnArgsCount(); + args.Add(count); + int before = args.Count; + AddOwnArgs(args); + int after = args.Count; + if (count != (after - before)) + throw new InvalidOperationException($"Reducer '{ToString()}' incorrectly reported the arg-count as {count}, but added {after - before}"); + } + + public List<object> GetArgs() + { + List<object> args = new List<object>(); + SerializeRedisArgs(args); + return args; + } +} + +} \ No newline at end of file diff --git a/src/NRedisStack/Search/Reducers.cs b/src/NRedisStack/Search/Reducers.cs new file mode 100644 index 00000000..6e8254dd --- /dev/null +++ b/src/NRedisStack/Search/Reducers.cs @@ -0,0 +1,103 @@ +using System.Collections.Generic; +using NRedisStack.Search.Aggregation; + +namespace NRedisStack.Search.Aggregation +{ + public static class Reducers + { + public static Reducer Count() => CountReducer.Instance; + private sealed class CountReducer : Reducer + { + internal static readonly Reducer Instance = new CountReducer(); + private CountReducer() : base(null) { } + public override string Name => "COUNT"; + } + + private sealed class SingleFieldReducer : Reducer + { + public override string Name { get; } + + internal SingleFieldReducer(string name, string field) : base(field) + { + Name = name; + } + } + + public static Reducer CountDistinct(string field) => new SingleFieldReducer("COUNT_DISTINCT", field); + + public static Reducer CountDistinctish(string field) => new SingleFieldReducer("COUNT_DISTINCTISH", field); + + public static Reducer Sum(string field) => new SingleFieldReducer("SUM", field); + + public static Reducer Min(string field) => new SingleFieldReducer("MIN", field); + + public static Reducer Max(string field) => new SingleFieldReducer("MAX", field); + + public static Reducer Avg(string field) => new SingleFieldReducer("AVG", field); + + public static Reducer StdDev(string field) => new SingleFieldReducer("STDDEV", field); + + public static Reducer Quantile(string field, double percentile) => new QuantileReducer(field, percentile); + + private sealed class QuantileReducer : Reducer + { + private readonly double _percentile; + public QuantileReducer(string field, double percentile) : base(field) + { + _percentile = percentile; + } + protected override int GetOwnArgsCount() => base.GetOwnArgsCount() + 1; + protected override void AddOwnArgs(List<object> args) + { + base.AddOwnArgs(args); + args.Add(_percentile); + } + public override string Name => "QUANTILE"; + } + public static Reducer FirstValue(string field, SortedField sortBy) => new FirstValueReducer(field, sortBy); + private sealed class FirstValueReducer : Reducer + { + private readonly SortedField? _sortBy; + public FirstValueReducer(string field, SortedField? sortBy) : base(field) + { + _sortBy = sortBy; + } + public override string Name => "FIRST_VALUE"; + + // TODO: Check if needed + // protected override int GetOwnArgsCount() => base.GetOwnArgsCount() + (_sortBy.HasValue ? 3 : 0); + protected override void AddOwnArgs(List<object> args) + { + base.AddOwnArgs(args); + if (_sortBy != null) + { + var sortBy = _sortBy; + args.Add("BY"); + args.Add(sortBy.FieldName); + args.Add(sortBy.Order.ToString()); + } + } + } + public static Reducer FirstValue(string field) => new FirstValueReducer(field, null); + + public static Reducer ToList(string field) => new SingleFieldReducer("TOLIST", field); + + public static Reducer RandomSample(string field, int size) => new RandomSampleReducer(field, size); + + private sealed class RandomSampleReducer : Reducer + { + private readonly int _size; + public RandomSampleReducer(string field, int size) : base(field) + { + _size = size; + } + public override string Name => "RANDOM_SAMPLE"; + protected override int GetOwnArgsCount() => base.GetOwnArgsCount() + 1; + protected override void AddOwnArgs(List<object> args) + { + base.AddOwnArgs(args); + args.Add(_size); + } + } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/Row.cs b/src/NRedisStack/Search/Row.cs new file mode 100644 index 00000000..2a8a8aa9 --- /dev/null +++ b/src/NRedisStack/Search/Row.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; +using StackExchange.Redis; + +namespace NRedisStack.Search.Aggregation +{ + public readonly struct Row + { + private readonly Dictionary<string, RedisValue> _fields; + + internal Row(Dictionary<string, RedisValue> fields) + { + _fields = fields; + } + + public bool ContainsKey(string key) => _fields.ContainsKey(key); + public RedisValue this[string key] => _fields.TryGetValue(key, out var result) ? result : RedisValue.Null; + + public string GetString(string key) => _fields.TryGetValue(key, out var result) ? (string)result : default; + public long GetLong(string key) => _fields.TryGetValue(key, out var result) ? (long)result : default; + public double GetDouble(string key) => _fields.TryGetValue(key, out var result) ? (double)result : default; + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/Schema.cs b/src/NRedisStack/Search/Schema.cs new file mode 100644 index 00000000..abecb187 --- /dev/null +++ b/src/NRedisStack/Search/Schema.cs @@ -0,0 +1,394 @@ +using System; +using System.Collections.Generic; +using static NRedisStack.Search.Schema.VectorField; + +namespace NRedisStack.Search +{ + /// <summary> + /// Schema abstracts the schema definition when creating an index. + /// Documents can contain fields not mentioned in the schema, but the index will only index pre-defined fields + /// </summary> + public sealed class Schema + { + public enum FieldType + { + Text, + Geo, + Numeric, + Tag, + Vector + } + + public class Field + { + public FieldName FieldName { get; } + public FieldType Type { get; } + + internal Field(string name, FieldType type) + : this(FieldName.Of(name), type) { } + + internal Field(FieldName name, FieldType type) + { + FieldName = name; + Type = type; + } + + internal void AddSchemaArgs(List<object> args) + { + static object GetForRedis(FieldType type) => type switch + { + FieldType.Text => "TEXT", + FieldType.Geo => "GEO", + FieldType.Numeric => "NUMERIC", + FieldType.Tag => "TAG", + FieldType.Vector => "VECTOR", + _ => throw new ArgumentOutOfRangeException(nameof(type)), + }; + FieldName.AddCommandArguments(args); + args.Add(GetForRedis(Type)); + AddFieldTypeArgs(args); + } + internal virtual void AddFieldTypeArgs(List<object> args) { } + } + + public class TextField : Field + { + public double Weight { get; } + public bool NoStem { get; } + public string? Phonetic { get; } + public bool Sortable { get; } + public bool Unf { get; } + public bool NoIndex { get; } + public bool WithSuffixTrie { get; } + + public TextField(FieldName name, double weight = 1.0, bool noStem = false, + string? phonetic = null, bool sortable = false, bool unf = false, + bool noIndex = false, bool withSuffixTrie = false) + : base(name, FieldType.Text) + { + Weight = weight; + NoStem = noStem; + Phonetic = phonetic; + Sortable = sortable; + if (unf && !sortable) + { + throw new ArgumentException("UNF can't be applied on a non-sortable field."); + } + Unf = unf; + NoIndex = noIndex; + WithSuffixTrie = withSuffixTrie; + } + + public TextField(string name, double weight = 1.0, bool noStem = false, + string? phonetic = null, bool sortable = false, bool unf = false, + bool noIndex = false, bool withSuffixTrie = false) + : this(FieldName.Of(name), weight, noStem, phonetic, sortable, unf, noIndex, withSuffixTrie) { } + + internal override void AddFieldTypeArgs(List<object> args) + { + if (Sortable) args.Add("SORTABLE"); + if (Unf) args.Add("UNF"); + if (NoStem) args.Add("NOSTEM"); + if (NoIndex) args.Add("NOINDEX"); + AddPhonetic(args); + AddWeight(args); + if (WithSuffixTrie) args.Add("WITHSUFFIXTRIE"); + } + + private void AddWeight(List<object> args) + { + if (Weight != 1.0) + { + args.Add("WEIGHT"); + args.Add(Weight); + } + } + + private void AddPhonetic(List<object> args) + { + if (Phonetic != null) + { + args.Add("PHONETIC"); + args.Add(this.Phonetic); + } + } + } + + public class TagField : Field + { + public bool Sortable { get; } + public bool Unf { get; } + public bool NoIndex { get; } + public string Separator { get; } + public bool CaseSensitive { get; } + public bool WithSuffixTrie { get; } + + internal TagField(FieldName name, bool sortable = false, bool unf = false, + bool noIndex = false, string separator = ",", + bool caseSensitive = false, bool withSuffixTrie = false) + : base(name, FieldType.Tag) + { + Sortable = sortable; + Unf = unf; + NoIndex = noIndex; + Separator = separator; + CaseSensitive = caseSensitive; + WithSuffixTrie = withSuffixTrie; + } + + internal TagField(string name, bool sortable = false, bool unf = false, + bool noIndex = false, string separator = ",", + bool caseSensitive = false, bool withSuffixTrie = false) + : this(FieldName.Of(name), sortable, unf, noIndex, separator, caseSensitive, withSuffixTrie) { } + + internal override void AddFieldTypeArgs(List<object> args) + { + if (Sortable) args.Add("SORTABLE"); + if (Unf) args.Add("UNF"); + if (NoIndex) args.Add("NOINDEX"); + if (WithSuffixTrie) args.Add("WITHSUFFIXTRIE"); + if (Separator != ",") + { + + args.Add("SEPARATOR"); + args.Add(Separator); + } + if (CaseSensitive) args.Add("CASESENSITIVE"); + } + } + + public class GeoField : Field + { + public bool Sortable { get; } + public bool NoIndex { get; } + internal GeoField(FieldName name, bool sortable = false, bool noIndex = false) + : base(name, FieldType.Geo) + { + Sortable = sortable; + NoIndex = noIndex; + } + + internal GeoField(string name, bool sortable = false, bool noIndex = false) + : this(FieldName.Of(name), sortable, noIndex) { } + + internal override void AddFieldTypeArgs(List<object> args) + { + if (Sortable) args.Add("SORTABLE"); + if (NoIndex) args.Add("NOINDEX"); + } + + } + + public class NumericField : Field + { + public bool Sortable { get; } + public bool NoIndex { get; } + internal NumericField(FieldName name, bool sortable = false, bool noIndex = false) + : base(name, FieldType.Numeric) + { + Sortable = sortable; + NoIndex = noIndex; + } + + internal NumericField(string name, bool sortable = false, bool noIndex = false) + : this(FieldName.Of(name), sortable, noIndex) { } + + internal override void AddFieldTypeArgs(List<object> args) + { + if (Sortable) args.Add("SORTABLE"); + if (NoIndex) args.Add("NOINDEX"); + } + + } + + public class VectorField : Field + { + public enum VectorAlgo + { + FLAT, + HNSW + } + + public VectorAlgo Algorithm { get; } + public Dictionary<string, object>? Attributes { get; } + public VectorField(string name, VectorAlgo algorithm, Dictionary<string, object>? attributes = null) + : base(name, FieldType.Vector) + { + Algorithm = algorithm; + Attributes = attributes; + } + + internal override void AddFieldTypeArgs(List<object> args) + { + args.Add(Algorithm.ToString()); + if (Attributes != null) + { + args.Add(Attributes.Count()); + + foreach (var attribute in Attributes) + { + args.Add(attribute.Key); + args.Add(attribute.Value); + } + } + } + } + public List<Field> Fields { get; } = new List<Field>(); + + /// <summary> + /// Add a field to the schema. + /// </summary> + /// <param name="field">The <see cref="Field"/> to add.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddField(Field field) + { + Fields.Add(field ?? throw new ArgumentNullException(nameof(field))); + return this; + } + + /// <summary> + /// Add a Text field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="weight">Its weight, a positive floating point number.</param> + /// <param name="sortable">If true, the text field can be sorted.</param> + /// <param name="noStem"> Disable stemming when indexing its values.</param> + /// <param name="phonetic">Declaring a text attribute as PHONETIC will perform phonetic matching on it in searches by default.</param> + /// <param name="noIndex">Attributes can have the NOINDEX option, which means they will not be indexed.</param> + /// <param name="unf">Set this to true to prevent the indexer from sorting on the normalized form. + /// Normalied form is the field sent to lower case with all diaretics removed</param> + /// <param name="withSuffixTrie">Keeps a suffix trie with all terms which match the suffix.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddTextField(string name, double weight = 1.0, bool sortable = false, bool unf = false, bool noStem = false, + string? phonetic = null, bool noIndex = false, bool withSuffixTrie = false) + { + Fields.Add(new TextField(name, weight, noStem, phonetic, sortable, unf, noIndex, withSuffixTrie)); + return this; + } + + /// <summary> + /// Add a Text field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="weight">Its weight, a positive floating point number.</param> + /// <param name="sortable">If true, the text field can be sorted.</param> + /// <param name="noStem"> Disable stemming when indexing its values.</param> + /// <param name="phonetic">Declaring a text attribute as PHONETIC will perform phonetic matching on it in searches by default.</param> + /// <param name="noIndex">Attributes can have the NOINDEX option, which means they will not be indexed.</param> + /// <param name="unf">Set this to true to prevent the indexer from sorting on the normalized form. + /// Normalied form is the field sent to lower case with all diaretics removed</param> + /// <param name="withSuffixTrie">Keeps a suffix trie with all terms which match the suffix.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddTextField(FieldName name, double weight = 1.0, bool sortable = false, bool unf = false, bool noStem = false, + string? phonetic = null, bool noIndex = false, bool withSuffixTrie = false) + { + Fields.Add(new TextField(name, weight, noStem, phonetic, sortable, unf, noIndex, withSuffixTrie)); + return this; + } + + /// <summary> + /// Add a Geo field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="sortable">If true, the text field can be sorted.</param> + /// <param name="noIndex">Attributes can have the NOINDEX option, which means they will not be indexed.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddGeoField(FieldName name, bool sortable = false, bool noIndex = false) + { + Fields.Add(new GeoField(name, sortable, noIndex)); + return this; + } + + /// <summary> + /// Add a Geo field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="sortable">If true, the text field can be sorted.</param> + /// <param name="noIndex">Attributes can have the NOINDEX option, which means they will not be indexed.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddGeoField(string name, bool sortable = false, bool noIndex = false) + { + Fields.Add(new GeoField(name, sortable, noIndex)); + return this; + } + + /// <summary> + /// Add a Numeric field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="sortable">If true, the text field can be sorted.</param> + /// <param name="noIndex">Attributes can have the NOINDEX option, which means they will not be indexed.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddNumericField(FieldName name, bool sortable = false, bool noIndex = false) + { + Fields.Add(new NumericField(name, sortable, noIndex)); + return this; + } + + /// <summary> + /// Add a Numeric field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="sortable">If true, the text field can be sorted.</param> + /// <param name="noIndex">Attributes can have the NOINDEX option, which means they will not be indexed.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddNumericField(string name, bool sortable = false, bool noIndex = false) + { + Fields.Add(new NumericField(name, sortable, noIndex)); + return this; + } + + /// <summary> + /// Add a Tag field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="sortable">If true, the field can be sorted.</param> + /// <param name="unf">Set this to true to prevent the indexer from sorting on the normalized form. + /// <param name="noIndex">Attributes can have the NOINDEX option, which means they will not be indexed.</param> + /// <param name="separator">The tag separator.</param> + /// <param name="caseSensitive">If true, Keeps the original letter cases of the tags.</param> + /// Normalied form is the field sent to lower case with all diaretics removed</param> + /// <param name="withSuffixTrie">Keeps a suffix trie with all terms which match the suffix.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddTagField(FieldName name, bool sortable = false, bool unf = false, + bool noIndex = false, string separator = ",", + bool caseSensitive = false, bool withSuffixTrie = false) + { + Fields.Add(new TagField(name, sortable, unf, noIndex, separator, caseSensitive, withSuffixTrie)); + return this; + } + + /// <summary> + /// Add a Tag field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="sortable">If true, the field can be sorted.</param> + /// <param name="unf">Set this to true to prevent the indexer from sorting on the normalized form. + /// <param name="noIndex">Attributes can have the NOINDEX option, which means they will not be indexed.</param> + /// <param name="separator">The tag separator.</param> + /// <param name="caseSensitive">If true, Keeps the original letter cases of the tags.</param> + /// Normalied form is the field sent to lower case with all diaretics removed</param> + /// <param name="withSuffixTrie">Keeps a suffix trie with all terms which match the suffix.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddTagField(string name, bool sortable = false, bool unf = false, + bool noIndex = false, string separator = ",", + bool caseSensitive = false, bool withSuffixTrie = false) + { + Fields.Add(new TagField(name, sortable, unf, noIndex, separator, caseSensitive, withSuffixTrie)); + return this; + } + + /// <summary> + /// Add a Vector field to the schema. + /// </summary> + /// <param name="name">The field's name.</param> + /// <param name="algorithm">The vector similarity algorithm to use.</param> + /// <param name="attribute">The algorithm attributes for the creation of the vector index.</param> + /// <returns>The <see cref="Schema"/> object.</returns> + public Schema AddVectorField(string name, VectorAlgo algorithm, Dictionary<string, object>? attributes = null) + { + Fields.Add(new VectorField(name, algorithm, attributes)); + return this; + } + } +} diff --git a/src/NRedisStack/Search/SearchCommands.cs b/src/NRedisStack/Search/SearchCommands.cs index 875bb43b..e994e921 100644 --- a/src/NRedisStack/Search/SearchCommands.cs +++ b/src/NRedisStack/Search/SearchCommands.cs @@ -1,4 +1,8 @@ using NRedisStack.Literals; +using NRedisStack.Search; +using NRedisStack.Search.Aggregation; +using NRedisStack.Search.DataTypes; +using NRedisStack.Search.FT.CREATE; using StackExchange.Redis; namespace NRedisStack { @@ -9,9 +13,707 @@ public SearchCommands(IDatabase db) { _db = db; } - public RedisResult Info(RedisValue index) + + /// <summary> + /// Returns a list of all existing indexes. + /// </summary> + /// <returns>Array with index names.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft._list"/></remarks> + public RedisResult[] _List() { - return _db.Execute(FT.INFO, index); + return _db.Execute(FT._LIST).ToArray(); } + + /// <summary> + /// Returns a list of all existing indexes. + /// </summary> + /// <returns>Array with index names.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft._list"/></remarks> + public async Task<RedisResult[]> _ListAsync() + { + return (await _db.ExecuteAsync(FT._LIST)).ToArray(); + } + + /// <summary> + /// Run a search query on an index, and perform aggregate transformations on the results. + /// </summary> + /// <param name="index">The index name.</param> + /// <param name="query">The query</param> + /// <returns>An <see langword="AggregationResult"/> object</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.aggregate"/></remarks> + public AggregationResult Aggregate(string index, AggregationRequest query) + { + List<object> args = new List<object> { index }; + //query.SerializeRedisArgs(args); + foreach (var arg in query.GetArgs()) + { + args.Add(arg.ToString()); + } + var result = _db.Execute(FT.AGGREGATE, args); + if (query.IsWithCursor()) + { + var results = (RedisResult[])result; + + return new AggregationResult(results[0], (long)results[1]); + } + else + { + return new AggregationResult(result); + } + } + + /// <summary> + /// Run a search query on an index, and perform aggregate transformations on the results. + /// </summary> + /// <param name="index">The index name.</param> + /// <param name="query">The query</param> + /// <returns>An <see langword="AggregationResult"/> object</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.aggregate"/></remarks> + public async Task<AggregationResult> AggregateAsync(string index, AggregationRequest query) + { + List<object> args = new List<object> { index }; + //query.SerializeRedisArgs(args); + foreach (var arg in query.GetArgs()) + { + args.Add(arg); + } + var result = await _db.ExecuteAsync(FT.AGGREGATE, args); + if (query.IsWithCursor()) + { + var results = (RedisResult[])result; + + return new AggregationResult(results[0], (long)results[1]); + } + else + { + return new AggregationResult(result); + } + } + + /// <summary> + /// Add an alias to an index. + /// </summary> + /// <param name="alias">Alias to be added to an index.</param> + /// <param name="index">The index name.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.aliasadd"/></remarks> + public bool AliasAdd(string alias, string index) + { + return _db.Execute(FT.ALIASADD, alias, index).OKtoBoolean(); + } + + /// <summary> + /// Add an alias to an index. + /// </summary> + /// <param name="alias">Alias to be added to an index.</param> + /// <param name="index">The index name.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.aliasadd"/></remarks> + public async Task<bool> AliasAddAsync(string alias, string index) + { + return (await _db.ExecuteAsync(FT.ALIASADD, alias, index)).OKtoBoolean(); + } + + /// <summary> + /// Remove an alias to an index. + /// </summary> + /// <param name="alias">Alias to be removed.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.aliasdel"/></remarks> + public bool AliasDel(string alias) + { + return _db.Execute(FT.ALIASDEL, alias).OKtoBoolean(); + } + + /// <summary> + /// Remove an alias to an index. + /// </summary> + /// <param name="alias">Alias to be removed.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.aliasdel"/></remarks> + public async Task<bool> AliasDelAsync(string alias) + { + return (await _db.ExecuteAsync(FT.ALIASDEL, alias)).OKtoBoolean(); + } + + /// <summary> + /// Add an alias to an index. If the alias is already associated with another index, + /// FT.ALIASUPDATE removes the alias association with the previous index. + /// </summary> + /// <param name="alias">Alias to be removed.</param> + /// <param name="index">The index name.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.aliasdel"/></remarks> + public bool AliasUpdate(string alias, string index) + { + return _db.Execute(FT.ALIASUPDATE, alias, index).OKtoBoolean(); + } + + /// <summary> + /// Add an alias to an index. If the alias is already associated with another index, + /// FT.ALIASUPDATE removes the alias association with the previous index. + /// </summary> + /// <param name="alias">Alias to be removed.</param> + /// <param name="index">The index name.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.aliasdel"/></remarks> + public async Task<bool> AliasUpdateAsync(string alias, string index) + { + return (await _db.ExecuteAsync(FT.ALIASUPDATE, alias, index)).OKtoBoolean(); + } + + /// <summary> + /// Add a new attribute to the index + /// </summary> + /// <param name="index">The index name.</param> + /// <param name="skipInitialScan">If set, does not scan and index.</param> + /// <param name="schema">the schema.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.alter"/></remarks> + public bool Alter(string index, Schema schema, bool skipInitialScan = false) + { + List<object> args = new List<object>() { index }; + if (skipInitialScan) args.Add("SKIPINITIALSCAN"); + args.Add("SCHEMA"); + args.Add("ADD"); + foreach (var f in schema.Fields) + { + f.AddSchemaArgs(args); + } + return _db.Execute(FT.ALTER, args).OKtoBoolean(); + } + + /// <summary> + /// Add a new attribute to the index + /// </summary> + /// <param name="index">The index name.</param> + /// <param name="skipInitialScan">If set, does not scan and index.</param> + /// <param name="schema">the schema.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.alter"/></remarks> + public async Task<bool> AlterAsync(string index, Schema schema, bool skipInitialScan = false) + { + List<object> args = new List<object>() { index }; + if (skipInitialScan) args.Add("SKIPINITIALSCAN"); + args.Add("SCHEMA"); + args.Add("ADD"); + foreach (var f in schema.Fields) + { + f.AddSchemaArgs(args); + } + return (await _db.ExecuteAsync(FT.ALTER, args)).OKtoBoolean(); + } + + /// <summary> + /// Retrieve configuration options. + /// </summary> + /// <param name="option">is name of the configuration option, or '*' for all.</param> + /// <returns>An array reply of the configuration name and value.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.config-get"/></remarks> + public Dictionary<string, string> ConfigGet(string option) + { + var result = _db.Execute(FT.CONFIG, "GET", option); + return result.ToConfigDictionary(); // TODO: fix all tests to be like this + } + + /// <summary> + /// Retrieve configuration options. + /// </summary> + /// <param name="option">is name of the configuration option, or '*' for all.</param> + /// <returns>An array reply of the configuration name and value.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.config-get"/></remarks> + public async Task<Dictionary<string, string>> ConfigGetAsync(string option) + { + return (await _db.ExecuteAsync(FT.CONFIG, "GET", option)).ToConfigDictionary(); + } + + /// <summary> + /// Describe configuration options. + /// </summary> + /// <param name="option">is name of the configuration option, or '*' for all.</param> + /// <param name="value">is value of the configuration option.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.config-set"/></remarks> + public bool ConfigSet(string option, string value) + { + return _db.Execute(FT.CONFIG, "SET", option, value).OKtoBoolean(); + } + + /// <summary> + /// Describe configuration options. + /// </summary> + /// <param name="option">is name of the configuration option, or '*' for all.</param> + /// <param name="value">is value of the configuration option.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.config-set"/></remarks> + public async Task<bool> ConfigSetAsync(string option, string value) + { + return (await _db.ExecuteAsync(FT.CONFIG, "SET", option, value)).OKtoBoolean(); + } + + /// <summary> + /// Create an index with the given specification. + /// </summary> + /// <param name="indexName">The index name.</param> + /// <param name="parameters">Command's parameters.</param> + /// <param name="schema">The index schema.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.create"/></remarks> + public bool Create(string indexName, FTCreateParams parameters, Schema schema) + { + var args = new List<object>() { indexName }; + parameters.AddParams(args); // TODO: Think of a better implementation + + args.Add("SCHEMA"); + + foreach (var f in schema.Fields) + { + f.AddSchemaArgs(args); + } + + return _db.Execute(FT.CREATE, args).OKtoBoolean(); + } + + /// <summary> + /// Create an index with the given specification. + /// </summary> + /// <param name="indexName">The index name.</param> + /// <param name="parameters">Command's parameters.</param> + /// <param name="schema">The index schema.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.create"/></remarks> + public async Task<bool> CreateAsync(string indexName, FTCreateParams parameters, Schema schema) + { + var args = new List<object>() { indexName }; + parameters.AddParams(args); // TODO: Think of a better implementation + args.Add("SCHEMA"); + foreach (var f in schema.Fields) + { + f.AddSchemaArgs(args); + } + return (await _db.ExecuteAsync(FT.CREATE, args)).OKtoBoolean(); + } + + /// <summary> + /// Delete a cursor from the index. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="cursorId">The cursor's ID.</param> + /// <returns><see langword="true"/> if it has been deleted, <see langword="false"/> if it did not exist.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.cursor-del/"/></remarks> + public bool CursorDel(string indexName, long cursorId) + { + return _db.Execute(FT.CURSOR, "DEL", indexName, cursorId).OKtoBoolean(); + } + + /// <summary> + /// Delete a cursor from the index. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="cursorId">The cursor's ID.</param> + /// <returns><see langword="true"/> if it has been deleted, <see langword="false"/> if it did not exist.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.cursor-del/"/></remarks> + public async Task<bool> CursorDelAsync(string indexName, long cursorId) + { + return (await _db.ExecuteAsync(FT.CURSOR, "DEL", indexName, cursorId)).OKtoBoolean(); + } + + /// <summary> + /// Read next results from an existing cursor. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="cursorId">The cursor's ID.</param> + /// <param name="count">Limit the amount of returned results.</param> + /// <returns>A AggregationResult object with the results</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.cursor-read/"/></remarks> + public AggregationResult CursorRead(string indexName, long cursorId, int? count = null) + { + RedisResult[] resp = ((count == null) ? _db.Execute(FT.CURSOR, "READ", indexName, cursorId) + : _db.Execute(FT.CURSOR, "READ", indexName, cursorId, "COUNT", count)) + .ToArray(); + + return new AggregationResult(resp[0], (long)resp[1]); + } + + /// <summary> + /// Read next results from an existing cursor. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="cursorId">The cursor's ID.</param> + /// <param name="count">Limit the amount of returned results.</param> + /// <returns>A AggregationResult object with the results</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.cursor-read/"/></remarks> + public async Task<AggregationResult> CursorReadAsync(string indexName, long cursorId, int? count = null) + { + RedisResult[] resp = (await ((count == null) ? _db.ExecuteAsync(FT.CURSOR, "READ", indexName, cursorId) + : _db.ExecuteAsync(FT.CURSOR, "READ", indexName, cursorId, "COUNT", count))) + .ToArray(); + + return new AggregationResult(resp[0], (long)resp[1]); + } + + /// <summary> + /// Add terms to a dictionary. + /// </summary> + /// <param name="dict">The dictionary name</param> + /// <param name="terms">Terms to add to the dictionary..</param> + /// <returns>The number of new terms that were added.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.dictadd/"/></remarks> + public long DictAdd(string dict, params string[] terms) + { + if (terms.Length < 1) + { + throw new ArgumentOutOfRangeException("At least one term must be provided"); + } + + var args = new List<object>(terms.Length + 1) { dict }; + foreach (var t in terms) + { + args.Add(t); + } + + return _db.Execute(FT.DICTADD, args).ToLong(); + } + + /// <summary> + /// Add terms to a dictionary. + /// </summary> + /// <param name="dict">The dictionary name</param> + /// <param name="terms">Terms to add to the dictionary..</param> + /// <returns>The number of new terms that were added.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.dictadd/"/></remarks> + public async Task<long> DictAddAsync(string dict, params string[] terms) + { + if (terms.Length < 1) + { + throw new ArgumentOutOfRangeException("At least one term must be provided"); + } + + var args = new List<object>(terms.Length + 1) { dict }; + foreach (var t in terms) + { + args.Add(t); + } + + return (await _db.ExecuteAsync(FT.DICTADD, args)).ToLong(); + } + + /// <summary> + /// Delete terms from a dictionary. + /// </summary> + /// <param name="dict">The dictionary name</param> + /// <param name="terms">Terms to delete to the dictionary..</param> + /// <returns>The number of new terms that were deleted.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.dictdel/"/></remarks> + public long DictDel(string dict, params string[] terms) + { + if (terms.Length < 1) + { + throw new ArgumentOutOfRangeException("At least one term must be provided"); + } + + var args = new List<object>(terms.Length + 1) { dict }; + foreach (var t in terms) + { + args.Add(t); + } + + return _db.Execute(FT.DICTDEL, args).ToLong(); + } + + /// <summary> + /// Delete terms from a dictionary. + /// </summary> + /// <param name="dict">The dictionary name</param> + /// <param name="terms">Terms to delete to the dictionary..</param> + /// <returns>The number of new terms that were deleted.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.dictdel/"/></remarks> + public async Task<long> DictDelAsync(string dict, params string[] terms) + { + if (terms.Length < 1) + { + throw new ArgumentOutOfRangeException("At least one term must be provided"); + } + + var args = new List<object>(terms.Length + 1) { dict }; + foreach (var t in terms) + { + args.Add(t); + } + + return (await _db.ExecuteAsync(FT.DICTDEL, args)).ToLong(); + } + + /// <summary> + /// Dump all terms in the given dictionary. + /// </summary> + /// <param name="dict">The dictionary name</param> + /// <returns>An array, where each element is term.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.dictdump/"/></remarks> + public RedisResult[] DictDump(string dict) + { + return _db.Execute(FT.DICTDUMP, dict).ToArray(); + } + + /// <summary> + /// Dump all terms in the given dictionary. + /// </summary> + /// <param name="dict">The dictionary name</param> + /// <returns>An array, where each element is term.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.dictdump/"/></remarks> + public async Task<RedisResult[]> DictDumpAsync(string dict) + { + return (await _db.ExecuteAsync(FT.DICTDUMP, dict)).ToArray(); + } + + /// <summary> + /// Delete an index. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="dd">If set, deletes the actual document hashes.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.dropindex/"/></remarks> + public bool DropIndex(string indexName, bool dd = false) + { + return ((dd) ? _db.Execute(FT.DROPINDEX, indexName, "DD") + : _db.Execute(FT.DROPINDEX, indexName)) + .OKtoBoolean(); + } + + /// <summary> + /// Delete an index. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="dd">If set, deletes the actual document hashes.</param> + /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.dropindex/"/></remarks> + public async Task<bool> DropIndexAsync(string indexName, bool dd = false) + { + return (await ((dd) ? _db.ExecuteAsync(FT.DROPINDEX, indexName, "DD") + : _db.ExecuteAsync(FT.DROPINDEX, indexName))) + .OKtoBoolean(); + } + + /// <summary> + /// Return the execution plan for a complex query + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="q">The query to explain</param> + /// <returns>String that representing the execution plan</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.explain/"/></remarks> + public string Explain(string indexName, Query q) + { + var args = new List<object> { indexName }; + q.SerializeRedisArgs(args); + return _db.Execute(FT.EXPLAIN, args).ToString(); + } + + /// <summary> + /// Return the execution plan for a complex query + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="q">The query to explain</param> + /// <returns>String that representing the execution plan</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.explain/"/></remarks> + public async Task<string> ExplainAsync(string indexName, Query q) + { + var args = new List<object> { indexName }; + q.SerializeRedisArgs(args); + return (await _db.ExecuteAsync(FT.EXPLAIN, args)).ToString(); + } + + /// <summary> + /// Return the execution plan for a complex query + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="q">The query to explain</param> + /// <returns>An array reply with a string representing the execution plan</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.explaincli/"/></remarks> + public RedisResult[] ExplainCli(string indexName, Query q) + { + var args = new List<object> { indexName }; + q.SerializeRedisArgs(args); + return _db.Execute(FT.EXPLAINCLI, args).ToArray(); + } + + /// <summary> + /// Return the execution plan for a complex query + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="q">The query to explain</param> + /// <returns>An array reply with a string representing the execution plan</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.explaincli/"/></remarks> + public async Task<RedisResult[]> ExplainCliAsync(string indexName, Query q) + { + var args = new List<object> { indexName }; + q.SerializeRedisArgs(args); + return (await _db.ExecuteAsync(FT.EXPLAINCLI, args)).ToArray(); + } + + // /// <summary> + // /// Return information and statistics on the index. + // /// </summary> + // /// <param name="key">The name of the index.</param> + // /// <returns>Dictionary of key and value with information about the index</returns> + // /// <remarks><seealso href="https://redis.io/commands/ft.info"/></remarks> + // public Dictionary<string, RedisValue> Info(RedisValue index) + // { + // return _db.Execute(FT.INFO, index).ToFtInfoAsDictionary(); + // } + + /// <summary> + /// Return information and statistics on the index. + /// </summary> + /// <param name="key">The name of the index.</param> + /// <returns>Dictionary of key and value with information about the index</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.info"/></remarks> + public InfoResult Info(RedisValue index) => + new InfoResult(_db.Execute("FT.INFO", index)); + + /// <summary> + /// Return information and statistics on the index. + /// </summary> + /// <param name="key">The name of the index.</param> + /// <returns>Dictionary of key and value with information about the index</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.info"/></remarks> + public async Task<InfoResult> InfoAsync(RedisValue index) => + new InfoResult(await _db.ExecuteAsync("FT.INFO", index)); + + // TODO: FT.PROFILE (jedis doesn't have it) + + /// <summary> + /// Search the index + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="q">a <see cref="Query"/> object with the query string and optional parameters</param> + /// <returns>a <see cref="SearchResult"/> object with the results</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.search"/></remarks> + public SearchResult Search(string indexName, Query q) + { + var args = new List<object> { indexName }; + q.SerializeRedisArgs(args); + + var resp = _db.Execute("FT.SEARCH", args).ToArray(); + return new SearchResult(resp, !q.NoContent, q.WithScores, q.WithPayloads, q.ExplainScore); + } + + /// <summary> + /// Search the index + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="q">a <see cref="Query"/> object with the query string and optional parameters</param> + /// <returns>a <see cref="SearchResult"/> object with the results</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.search"/></remarks> + public async Task<SearchResult> SearchAsync(string indexName, Query q) + { + var args = new List<object> { indexName }; + q.SerializeRedisArgs(args); + var resp = (await _db.ExecuteAsync("FT.SEARCH", args)).ToArray(); + return new SearchResult(resp, !q.NoContent, q.WithScores, q.WithPayloads, q.ExplainScore); + } + + /// <summary> + /// Dump the contents of a synonym group. + /// </summary> + /// <param name="indexName">The index name</param> + /// <returns>Pairs of term and an array of synonym groups.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.syndump"/></remarks> + public Dictionary<string, List<string>> SynDump(string indexName) + { + var resp = _db.Execute(FT.SYNDUMP, indexName).ToArray(); + var result = new Dictionary<string, List<string>>(); + for (int i = 0; i < resp.Length; i += 2) + { + var term = resp[i].ToString(); + var synonyms = (resp[i + 1]).ToArray().Select(x => x.ToString()).ToList(); // TODO: consider leave synonyms as RedisValue[] + result.Add(term, synonyms); + } + return result; + } + + // TODO: FT.SPELLCHECK (jedis doesn't have it) + + /// <summary> + /// Dump the contents of a synonym group. + /// </summary> + /// <param name="indexName">The index name</param> + /// <returns>Pairs of term and an array of synonym groups.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.syndump"/></remarks> + public async Task<Dictionary<string, List<string>>> SynDumpAsync(string indexName) + { + var resp = (await _db.ExecuteAsync(FT.SYNDUMP, indexName)).ToArray(); + var result = new Dictionary<string, List<string>>(); + for (int i = 0; i < resp.Length; i += 2) + { + var term = resp[i].ToString(); + var synonyms = (resp[i + 1]).ToArray().Select(x => x.ToString()).ToList(); // TODO: consider leave synonyms as RedisValue[] + result.Add(term, synonyms); + } + return result; + } + + /// <summary> + /// Update a synonym group. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="synonymGroupId">Is synonym group to return</param> + /// <param name="skipInitialScan">does not scan and index, and only documents + /// that are indexed after the update are affected</param> + /// <param name="terms">The terms</param> + /// <returns>Pairs of term and an array of synonym groups.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.synupdate"/></remarks> + public bool SynUpdate(string indexName, string synonymGroupId, bool skipInitialScan = false, params string[] terms) + { + if (terms.Length < 1) + { + throw new ArgumentOutOfRangeException("terms must have at least one element"); + } + var args = new List<object> { indexName, synonymGroupId }; + if (skipInitialScan) { args.Add(SearchArgs.SKIPINITIALSCAN); } + args.AddRange(terms); + return _db.Execute(FT.SYNUPDATE, args).OKtoBoolean(); + } + + /// <summary> + /// Update a synonym group. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="synonymGroupId">Is synonym group to return</param> + /// <param name="skipInitialScan">does not scan and index, and only documents + /// that are indexed after the update are affected</param> + /// <param name="terms">The terms</param> + /// <returns>Pairs of term and an array of synonym groups.</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.synupdate"/></remarks> + public async Task<bool> SynUpdateAsync(string indexName, string synonymGroupId, bool skipInitialScan = false, params string[] terms) + { + if (terms.Length < 1) + { + throw new ArgumentOutOfRangeException("terms must have at least one element"); + } + var args = new List<object> { indexName, synonymGroupId }; + if (skipInitialScan) { args.Add(SearchArgs.SKIPINITIALSCAN); } + args.AddRange(terms); + return (await _db.ExecuteAsync(FT.SYNUPDATE, args)).OKtoBoolean(); + } + + /// <summary> + /// Return a distinct set of values indexed in a Tag field. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="fieldName">TAG field name</param> + /// <returns>List of TAG field values</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.tagvals"/></remarks> + public RedisResult[] TagVals(string indexName, string fieldName) => //TODO: consider return Set + _db.Execute(FT.TAGVALS, indexName, fieldName).ToArray(); + + /// <summary> + /// Return a distinct set of values indexed in a Tag field. + /// </summary> + /// <param name="indexName">The index name</param> + /// <param name="fieldName">TAG field name</param> + /// <returns>List of TAG field values</returns> + /// <remarks><seealso href="https://redis.io/commands/ft.tagvals"/></remarks> + public async Task<RedisResult[]> TagValsAsync(string indexName, string fieldName) => //TODO: consider return Set + (await _db.ExecuteAsync(FT.TAGVALS, indexName, fieldName)).ToArray(); } } \ No newline at end of file diff --git a/src/NRedisStack/Search/SearchResult.cs b/src/NRedisStack/Search/SearchResult.cs new file mode 100644 index 00000000..a8ea49d3 --- /dev/null +++ b/src/NRedisStack/Search/SearchResult.cs @@ -0,0 +1,98 @@ +using StackExchange.Redis; +using System.Collections.Generic; +using System.Linq; + +namespace NRedisStack.Search +{ + /// <summary> + /// SearchResult encapsulates the returned result from a search query. + /// It contains publically accessible fields for the total number of results, and an array of <see cref="Document"/> + /// objects conatining the actual returned documents. + /// </summary> + public class SearchResult + { + public long TotalResults { get; } + public List<Document> Documents { get; } + + internal SearchResult(RedisResult[] resp, bool hasContent, bool hasScores, bool hasPayloads, bool shouldExplainScore) + { + // Calculate the step distance to walk over the results. + // The order of results is id, score (if withScore), payLoad (if hasPayloads), fields + int step = 1; + int scoreOffset = 0; + int contentOffset = 1; + int payloadOffset = 0; + if (hasScores) + { + step++; + scoreOffset = 1; + contentOffset++; + + } + if (hasContent) + { + step++; + if (hasPayloads) + { + payloadOffset = scoreOffset + 1; + step++; + contentOffset++; + } + } + + // the first element is always the number of results + TotalResults = (long)resp[0]; + var docs = new List<Document>((resp.Length - 1) / step); + Documents = docs; + for (int i = 1; i < resp.Length; i += step) + { + var id = (string)resp[i]; + double score = 1.0; + byte[] payload = null; + RedisValue[] fields = null; + string[] scoreExplained = null; + if (hasScores) + { + if (shouldExplainScore) + { + var scoreResult = (RedisResult[])resp[i + scoreOffset]; + score = (double) scoreResult[0]; + var redisResultsScoreExplained = (RedisResult[]) scoreResult[1]; + scoreExplained = FlatRedisResultArray(redisResultsScoreExplained).ToArray(); + } + else + { + score = (double)resp[i + scoreOffset]; + } + } + if (hasPayloads) + { + payload = (byte[])resp[i + payloadOffset]; + } + + if (hasContent) + { + fields = (RedisValue[])resp[i + contentOffset]; + } + + docs.Add(Document.Load(id, score, payload, fields, scoreExplained)); + } + } + + static IEnumerable<string> FlatRedisResultArray(RedisResult[] collection) + { + foreach (var o in collection) + { + if (o.Type == ResultType.MultiBulk) + { + foreach (string t in FlatRedisResultArray((RedisResult[])o)) + yield return t; + } + else + { + yield return o.ToString(); + } + } + } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Search/SortedField.cs b/src/NRedisStack/Search/SortedField.cs new file mode 100644 index 00000000..6aa3022a --- /dev/null +++ b/src/NRedisStack/Search/SortedField.cs @@ -0,0 +1,32 @@ +using System.Collections.Generic; + +namespace NRedisStack.Search.Aggregation +{ + public class SortedField + { + + public enum SortOrder + { + ASC, DESC + } + + public string FieldName { get; } + public SortOrder Order { get; } + + public SortedField(String fieldName, SortOrder order) + { + this.FieldName = fieldName; + this.Order = order; + } + + public static SortedField Asc(String field) + { + return new SortedField(field, SortOrder.ASC); + } + + public static SortedField Desc(String field) + { + return new SortedField(field, SortOrder.DESC); + } + } +} \ No newline at end of file diff --git a/src/NRedisStack/Tdigest/TdigestCommands.cs b/src/NRedisStack/Tdigest/TdigestCommands.cs index 3e373973..ab02de32 100644 --- a/src/NRedisStack/Tdigest/TdigestCommands.cs +++ b/src/NRedisStack/Tdigest/TdigestCommands.cs @@ -16,53 +16,19 @@ public TdigestCommands(IDatabase db) /// Adds one or more observations to a t-digest sketch. /// </summary> /// <param name="key">The name of the sketch.</param> - /// <param name="value">The value of the observation.</param> - /// <param name="weight">The weight of this observation.</param> + /// <param name="values">The value of the observation.</param> /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> - public bool Add(RedisKey key, double item, long weight) + public bool Add(RedisKey key, params double[] values) { - if (weight < 0) throw new ArgumentOutOfRangeException(nameof(weight)); - - return _db.Execute(TDIGEST.ADD, key, item, weight).OKtoBoolean(); - } - - /// <summary> - /// Adds one or more observations to a t-digest sketch. - /// </summary> - /// <param name="key">The name of the sketch.</param> - /// <param name="value">The value of the observation.</param> - /// <param name="weight">The weight of this observation.</param> - /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> - public async Task<bool> AddAsync(RedisKey key, double item, int weight) - { - if (weight < 0) throw new ArgumentOutOfRangeException(nameof(weight)); - - var result = await _db.ExecuteAsync(TDIGEST.ADD, key, item, weight); - return result.OKtoBoolean(); - } - - /// <summary> - /// Adds one or more observations to a t-digest sketch. - /// </summary> - /// <param name="key">The name of the sketch.</param> - /// <param name="valueWeight">Tuple of the value of the observation and The weight of this observation.</param> - /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> - /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> - public bool Add(RedisKey key, params Tuple<double, long>[] valueWeight) - { - if (valueWeight.Length < 1) - throw new ArgumentOutOfRangeException(nameof(valueWeight)); - - var args = new List<object> { key }; - - foreach (var pair in valueWeight) + if (values.Length < 0) throw new ArgumentOutOfRangeException(nameof(values)); + var args = new string[values.Length + 1]; + args[0] = key.ToString(); + for (int i = 0; i < values.Length; i++) { - if (pair.Item2 < 0) throw new ArgumentOutOfRangeException(nameof(pair.Item2)); - args.Add(pair.Item1); - args.Add(pair.Item2); + args[i + 1] = values[i].ToString(); } + return _db.Execute(TDIGEST.ADD, args).OKtoBoolean(); } @@ -70,25 +36,99 @@ public bool Add(RedisKey key, params Tuple<double, long>[] valueWeight) /// Adds one or more observations to a t-digest sketch. /// </summary> /// <param name="key">The name of the sketch.</param> - /// <param name="valueWeight">Tuple of the value of the observation and The weight of this observation.</param> + /// <param name="values">The value of the observation.</param> /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> - public async Task<bool> AddAsync(RedisKey key, params Tuple<double, long>[] valueWeight) + public async Task<bool> AddAsync(RedisKey key, params double[] values) { - if (valueWeight.Length < 1) - throw new ArgumentOutOfRangeException(nameof(valueWeight)); - - var args = new List<object> { key }; - - foreach (var pair in valueWeight) + if (values.Length < 0) throw new ArgumentOutOfRangeException(nameof(values)); + var args = new string[values.Length + 1]; + args[0] = key; + for (int i = 0; i < values.Length; i++) { - if (pair.Item2 < 0) throw new ArgumentOutOfRangeException(nameof(pair.Item2)); - args.Add(pair.Item1); - args.Add(pair.Item2); + args[i + 1] = values[i].ToString(); } + return (await _db.ExecuteAsync(TDIGEST.ADD, args)).OKtoBoolean(); } + // /// <summary> + // /// Adds one or more observations to a t-digest sketch. + // /// </summary> + // /// <param name="key">The name of the sketch.</param> + // /// <param name="value">The value of the observation.</param> + // /// <param name="weight">The weight of this observation.</param> + // /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + // /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> + // public bool Add(RedisKey key, double item, long weight) + // { + // if (weight < 0) throw new ArgumentOutOfRangeException(nameof(weight)); + + // return _db.Execute(TDIGEST.ADD, key, item, weight).OKtoBoolean(); + // } + + // /// <summary> + // /// Adds one or more observations to a t-digest sketch. + // /// </summary> + // /// <param name="key">The name of the sketch.</param> + // /// <param name="value">The value of the observation.</param> + // /// <param name="weight">The weight of this observation.</param> + // /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + // /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> + // public async Task<bool> AddAsync(RedisKey key, double item, int weight) + // { + // if (weight < 0) throw new ArgumentOutOfRangeException(nameof(weight)); + + // var result = await _db.ExecuteAsync(TDIGEST.ADD, key, item, weight); + // return result.OKtoBoolean(); + // } + + // /// <summary> + // /// Adds one or more observations to a t-digest sketch. + // /// </summary> + // /// <param name="key">The name of the sketch.</param> + // /// <param name="valueWeight">Tuple of the value of the observation and The weight of this observation.</param> + // /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + // /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> + // public bool Add(RedisKey key, params Tuple<double, long>[] valueWeight) + // { + // if (valueWeight.Length < 1) + // throw new ArgumentOutOfRangeException(nameof(valueWeight)); + + // var args = new List<object> { key }; + + // foreach (var pair in valueWeight) + // { + // if (pair.Item2 < 0) throw new ArgumentOutOfRangeException(nameof(pair.Item2)); + // args.Add(pair.Item1); + // args.Add(pair.Item2); + // } + // return _db.Execute(TDIGEST.ADD, args).OKtoBoolean(); + // } + + // /// <summary> + // /// Adds one or more observations to a t-digest sketch. + // /// </summary> + // /// <param name="key">The name of the sketch.</param> + // /// <param name="valueWeight">Tuple of the value of the observation and The weight of this observation.</param> + // /// <returns><see langword="true"/> if executed correctly, error otherwise</returns> + // /// <remarks><seealso href="https://redis.io/commands/tdigest.add"/></remarks> + // public async Task<bool> AddAsync(RedisKey key, params Tuple<double, long>[] valueWeight) + // { + // if (valueWeight.Length < 1) + // throw new ArgumentOutOfRangeException(nameof(valueWeight)); + + // var args = new List<object> { key }; + + // foreach (var pair in valueWeight) + // { + // if (pair.Item2 < 0) throw new ArgumentOutOfRangeException(nameof(pair.Item2)); + // args.Add(pair.Item1); + // args.Add(pair.Item2); + // } + // return (await _db.ExecuteAsync(TDIGEST.ADD, args)).OKtoBoolean(); + // } + /// <summary> /// Estimate the fraction of all observations added which are <= value. /// </summary> diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index ca963510..ddc57920 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -2,21 +2,1065 @@ using StackExchange.Redis; using NRedisStack.RedisStackCommands; using Moq; - +using NRedisStack.Search.FT.CREATE; +using NRedisStack.Search; +using static NRedisStack.Search.Schema; +using NRedisStack.Search.Aggregation; namespace NRedisStack.Tests.Search; public class SearchTests : AbstractNRedisStackTest, IDisposable { Mock<IDatabase> _mock = new Mock<IDatabase>(); - private readonly string key = "SEARCH_TESTS"; + // private readonly string key = "SEARCH_TESTS"; + private readonly string index = "TEST_INDEX"; public SearchTests(RedisFixture redisFixture) : base(redisFixture) { } public void Dispose() { - redisFixture.Redis.GetDatabase().KeyDelete(key); + redisFixture.Redis.GetDatabase().KeyDelete(index); + } + + private void AddDocument(IDatabase db, Document doc) + { + string key = doc.Id; + var properties = doc.GetProperties(); + // HashEntry[] hash = new HashEntry[properties.Count()]; + // for(int i = 0; i < properties.Count(); i++) + // { + // var property = properties.ElementAt(i); + // hash[i] = new HashEntry(property.Key, property.Value); + // } + // db.HashSet(key, hash); + var nameValue = new List<object>() { key }; + foreach (var item in properties) + { + nameValue.Add(item.Key); + nameValue.Add(item.Value); + } + db.Execute("HSET", nameValue); + + } + + private void AddDocument(IDatabase db, string key, Dictionary<string, object> objDictionary) + { + Dictionary<string, string> strDictionary = new Dictionary<string, string>(); + // HashEntry[] hash = new HashEntry[objDictionary.Count()]; + // for(int i = 0; i < objDictionary.Count(); i++) + // { + // var property = objDictionary.ElementAt(i); + // hash[i] = new HashEntry(property.Key, property.Value.ToString()); + // } + // db.HashSet(key, hash); + var nameValue = new List<object>() { key }; + foreach (var item in objDictionary) + { + nameValue.Add(item.Key); + nameValue.Add(item.Value); + } + db.Execute("HSET", nameValue); + } + + [Fact] + public void TestAggregationRequestVerbatim() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "hello kitty")); + + AggregationRequest r = new AggregationRequest("kitti"); + + AggregationResult res = ft.Aggregate(index, r); + Assert.Equal(1, res.TotalResults); + + r = new AggregationRequest("kitti") + .Verbatim(); + + res = ft.Aggregate(index, r); + Assert.Equal(0, res.TotalResults); + } + + [Fact] + public async Task TestAggregationRequestVerbatimAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "hello kitty")); + + AggregationRequest r = new AggregationRequest("kitti"); + + AggregationResult res = await ft.AggregateAsync(index, r); + Assert.Equal(1, res.TotalResults); + + r = new AggregationRequest("kitti") + .Verbatim(); + + res = await ft.AggregateAsync(index, r); + Assert.Equal(0, res.TotalResults); + } + + [Fact] + public void TestAggregationRequestTimeout() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("count", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + AggregationRequest r = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Timeout(5000); + + AggregationResult res = ft.Aggregate(index, r); + Assert.Equal(2, res.TotalResults); + } + + [Fact] + public async Task TestAggregationRequestTimeoutAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("count", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + AggregationRequest r = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Timeout(5000); + + AggregationResult res = await ft.AggregateAsync(index, r); + Assert.Equal(2, res.TotalResults); + } + + [Fact] + public void TestAggregations() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, true); + sc.AddNumericField("count", true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + // client.AddDocument(new Document("data1").Set("name", "abc").Set("count", 10)); + // client.AddDocument(new Document("data2").Set("name", "def").Set("count", 5)); + // client.AddDocument(new Document("data3").Set("name", "def").Set("count", 25)); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + AggregationRequest r = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As ("sum")) + .SortBy(10, SortedField.Desc("@sum")); + + // actual search + var res = ft.Aggregate(index, r); + Assert.Equal(2, res.TotalResults); + + Row r1 = res.GetRow(0); + Assert.NotNull(r1); + Assert.Equal("def", r1.GetString("name")); + Assert.Equal(30, r1.GetLong("sum")); + Assert.Equal(30, r1.GetDouble("sum"), 0); + + Assert.Equal(0L, r1.GetLong("nosuchcol")); + Assert.Equal(0.0, r1.GetDouble("nosuchcol"), 0); + Assert.Null(r1.GetString("nosuchcol")); + + Row r2 = res.GetRow(1); + Assert.NotNull(r2); + Assert.Equal("abc", r2.GetString("name")); + Assert.Equal(10, r2.GetLong("sum")); + } + + [Fact] + public void TestAggregationRequestParamsDialect() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("count", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + Dictionary<string, object> parameters = new Dictionary<string, object>(); + parameters.Add("name", "abc"); + + AggregationRequest r = new AggregationRequest("$name") + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Params(parameters) + .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2 + + AggregationResult res = ft.Aggregate(index, r); + Assert.Equal(1, res.TotalResults); + + Row r1 = res.GetRow(0); + Assert.NotNull(r1); + Assert.Equal("abc", r1.GetString("name")); + Assert.Equal(10, r1.GetLong("sum")); + } + + [Fact] + public async Task TestAggregationRequestParamsDialectAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("count", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + Dictionary<string, object> parameters = new Dictionary<string, object>(); + parameters.Add("name", "abc"); + + AggregationRequest r = new AggregationRequest("$name") + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .Params(parameters) + .Dialect(2); // From documentation - To use PARAMS, DIALECT must be set to 2 + + AggregationResult res = await ft.AggregateAsync(index, r); + Assert.Equal(1, res.TotalResults); + + Row r1 = res.GetRow(0); + Assert.NotNull(r1); + Assert.Equal("abc", r1.GetString("name")); + Assert.Equal(10, r1.GetLong("sum")); + } + + [Fact] + public void TestAlias() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddTextField("field1"); + + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + Dictionary<string, object> doc = new Dictionary<string, object>(); + doc.Add("field1", "value"); + AddDocument(db, "doc1", doc); + + Assert.True(ft.AliasAdd("ALIAS1", index)); + SearchResult res1 = ft.Search("ALIAS1", new Query("*").ReturnFields("field1")); + Assert.Equal(1, res1.TotalResults); + Assert.Equal("value", res1.Documents[0]["field1"]); + + Assert.True(ft.AliasUpdate("ALIAS2", index)); + SearchResult res2 = ft.Search("ALIAS2", new Query("*").ReturnFields("field1")); + Assert.Equal(1, res2.TotalResults); + Assert.Equal("value", res2.Documents[0]["field1"]); + + Assert.Throws<RedisServerException>(() => ft.AliasDel("ALIAS3")); + Assert.True(ft.AliasDel("ALIAS2")); + Assert.Throws<RedisServerException>(() => ft.AliasDel("ALIAS2")); + } + + [Fact] + public async Task TestAliasAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddTextField("field1"); + + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + Dictionary<string, object> doc = new Dictionary<string, object>(); + doc.Add("field1", "value"); + AddDocument(db, "doc1", doc); + + Assert.True(await ft.AliasAddAsync("ALIAS1", index)); + SearchResult res1 = ft.Search("ALIAS1", new Query("*").ReturnFields("field1")); + Assert.Equal(1, res1.TotalResults); + Assert.Equal("value", res1.Documents[0]["field1"]); + + Assert.True(await ft.AliasUpdateAsync("ALIAS2", index)); + SearchResult res2 = ft.Search("ALIAS2", new Query("*").ReturnFields("field1")); + Assert.Equal(1, res2.TotalResults); + Assert.Equal("value", res2.Documents[0]["field1"]); + + await Assert.ThrowsAsync<RedisServerException>(async () => await ft.AliasDelAsync("ALIAS3")); + Assert.True(await ft.AliasDelAsync("ALIAS2")); + await Assert.ThrowsAsync<RedisServerException>(async () => await ft.AliasDelAsync("ALIAS2")); + } + + [Fact] + public void TestApplyAndFilterAggregations() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("subj1", sortable: true); + sc.AddNumericField("subj2", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + // client.AddDocument(db, new Document("data1").Set("name", "abc").Set("subj1", 20).Set("subj2", 70)); + // client.AddDocument(db, new Document("data2").Set("name", "def").Set("subj1", 60).Set("subj2", 40)); + // client.AddDocument(db, new Document("data3").Set("name", "ghi").Set("subj1", 50).Set("subj2", 80)); + // client.AddDocument(db, new Document("data4").Set("name", "abc").Set("subj1", 30).Set("subj2", 20)); + // client.AddDocument(db, new Document("data5").Set("name", "def").Set("subj1", 65).Set("subj2", 45)); + // client.AddDocument(db, new Document("data6").Set("name", "ghi").Set("subj1", 70).Set("subj2", 70)); + AddDocument(db, new Document("data1").Set("name", "abc").Set("subj1", 20).Set("subj2", 70)); + AddDocument(db, new Document("data2").Set("name", "def").Set("subj1", 60).Set("subj2", 40)); + AddDocument(db, new Document("data3").Set("name", "ghi").Set("subj1", 50).Set("subj2", 80)); + AddDocument(db, new Document("data4").Set("name", "abc").Set("subj1", 30).Set("subj2", 20)); + AddDocument(db, new Document("data5").Set("name", "def").Set("subj1", 65).Set("subj2", 45)); + AddDocument(db, new Document("data6").Set("name", "ghi").Set("subj1", 70).Set("subj2", 70)); + + AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg") + .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore")) + .Filter("@avgscore>=50") + .SortBy(10, SortedField.Asc("@name")); + + // actual search + AggregationResult res = ft.Aggregate(index, r); + Assert.Equal(3, res.TotalResults); + + Row r1 = res.GetRow(0); + Assert.NotNull(r1); + Assert.Equal("def", r1.GetString("name")); + Assert.Equal(52.5, r1.GetDouble("avgscore"), 0); + + Row r2 = res.GetRow(1); + Assert.NotNull(r2); + Assert.Equal("ghi", r2.GetString("name")); + Assert.Equal(67.5, r2.GetDouble("avgscore"), 0); + } + + [Fact] + public void TestCreate() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + var schema = new Schema().AddTextField("first").AddTextField("last").AddNumericField("age"); + var parameters = FTCreateParams.CreateParams().Filter("@age>16").Prefix("student:", "pupil:"); + + Assert.True(ft.Create(index, parameters, schema)); + + db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") }); + db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") }); + db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") }); + db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") }); + db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") }); + db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") }); + db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") }); + + var noFilters = ft.Search(index, new Query()); + Assert.Equal(4, noFilters.TotalResults); + + var res1 = ft.Search(index, new Query("@first:Jo*")); + Assert.Equal(2, res1.TotalResults); + + var res2 = ft.Search(index, new Query("@first:Pat")); + Assert.Equal(1, res2.TotalResults); + + var res3 = ft.Search(index, new Query("@last:Rod")); + Assert.Equal(0, res3.TotalResults); + } + + [Fact] + public async Task TestCreateAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + var schema = new Schema().AddTextField("first").AddTextField("last").AddNumericField("age"); + var parameters = FTCreateParams.CreateParams().Filter("@age>16").Prefix("student:", "pupil:"); + Assert.True(await ft.CreateAsync(index, parameters, schema)); + db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") }); + db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") }); + db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") }); + db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") }); + db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") }); + db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") }); + db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") }); + var noFilters = ft.Search(index, new Query()); + Assert.Equal(4, noFilters.TotalResults); + var res1 = ft.Search(index, new Query("@first:Jo*")); + Assert.Equal(2, res1.TotalResults); + var res2 = ft.Search(index, new Query("@first:Pat")); + Assert.Equal(1, res2.TotalResults); + var res3 = ft.Search(index, new Query("@last:Rod")); + Assert.Equal(0, res3.TotalResults); + } + + [Fact] + public void CreateNoParams() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + + Schema sc = new Schema().AddTextField("first", 1.0).AddTextField("last", 1.0).AddNumericField("age"); + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", 18) }); + db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", 17) }); + db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", 21) }); + db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", 20) }); + + SearchResult noFilters = ft.Search(index, new Query()); + Assert.Equal(4, noFilters.TotalResults); + + SearchResult res1 = ft.Search(index, new Query("@first:Jo*")); + Assert.Equal(2, res1.TotalResults); + + SearchResult res2 = ft.Search(index, new Query("@first:Pat")); + Assert.Equal(1, res2.TotalResults); + + SearchResult res3 = ft.Search(index, new Query("@last:Rod")); + Assert.Equal(0, res3.TotalResults); + } + + [Fact] + public async Task CreateNoParamsAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + + Schema sc = new Schema().AddTextField("first", 1.0).AddTextField("last", 1.0).AddNumericField("age"); + Assert.True(await ft.CreateAsync(index, FTCreateParams.CreateParams(), sc)); + + db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", 18) }); + db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", 17) }); + db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", 21) }); + db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", 20) }); + + SearchResult noFilters = ft.Search(index, new Query()); + Assert.Equal(4, noFilters.TotalResults); + + SearchResult res1 = ft.Search(index, new Query("@first:Jo*")); + Assert.Equal(2, res1.TotalResults); + + SearchResult res2 = ft.Search(index, new Query("@first:Pat")); + Assert.Equal(1, res2.TotalResults); + + SearchResult res3 = ft.Search(index, new Query("@last:Rod")); + Assert.Equal(0, res3.TotalResults); + } + + [Fact] + public void CreateWithFieldNames() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddField(new TextField(FieldName.Of("first").As("given"))) + .AddField(new TextField(FieldName.Of("last"))); + + Assert.True(ft.Create(index, FTCreateParams.CreateParams().Prefix("student:", "pupil:"), sc)); + + db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") }); + db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") }); + db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") }); + db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") }); + db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") }); + db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") }); + db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") }); + + SearchResult noFilters = ft.Search(index, new Query()); + Assert.Equal(5, noFilters.TotalResults); + + SearchResult asOriginal = ft.Search(index, new Query("@first:Jo*")); + Assert.Equal(0, asOriginal.TotalResults); + + SearchResult asAttribute = ft.Search(index, new Query("@given:Jo*")); + Assert.Equal(2, asAttribute.TotalResults); + + SearchResult nonAttribute = ft.Search(index, new Query("@last:Rod")); + Assert.Equal(1, nonAttribute.TotalResults); + } + + [Fact] + public async Task CreateWithFieldNamesAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddField(new TextField(FieldName.Of("first").As("given"))) + .AddField(new TextField(FieldName.Of("last"))); + + Assert.True(await ft.CreateAsync(index, FTCreateParams.CreateParams().Prefix("student:", "pupil:"), sc)); + + db.HashSet("profesor:5555", new HashEntry[] { new("first", "Albert"), new("last", "Blue"), new("age", "55") }); + db.HashSet("student:1111", new HashEntry[] { new("first", "Joe"), new("last", "Dod"), new("age", "18") }); + db.HashSet("pupil:2222", new HashEntry[] { new("first", "Jen"), new("last", "Rod"), new("age", "14") }); + db.HashSet("student:3333", new HashEntry[] { new("first", "El"), new("last", "Mark"), new("age", "17") }); + db.HashSet("pupil:4444", new HashEntry[] { new("first", "Pat"), new("last", "Shu"), new("age", "21") }); + db.HashSet("student:5555", new HashEntry[] { new("first", "Joen"), new("last", "Ko"), new("age", "20") }); + db.HashSet("teacher:6666", new HashEntry[] { new("first", "Pat"), new("last", "Rod"), new("age", "20") }); + + SearchResult noFilters = await ft.SearchAsync(index, new Query()); + Assert.Equal(5, noFilters.TotalResults); + + SearchResult asOriginal = await ft.SearchAsync(index, new Query("@first:Jo*")); + Assert.Equal(0, asOriginal.TotalResults); + + SearchResult asAttribute = await ft.SearchAsync(index, new Query("@given:Jo*")); + Assert.Equal(2, asAttribute.TotalResults); + + SearchResult nonAttribute = await ft.SearchAsync(index, new Query("@last:Rod")); + Assert.Equal(1, nonAttribute.TotalResults); + } + + [Fact] + public void AlterAdd() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddTextField("title", 1.0); + + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + var fields = new HashEntry("title", "hello world"); + //fields.("title", "hello world"); + for (int i = 0; i < 100; i++) + { + db.HashSet($"doc{i}", fields.Name, fields.Value); + } + SearchResult res = ft.Search(index, new Query("hello world")); + Assert.Equal(100, res.TotalResults); + + Assert.True(ft.Alter(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5))); + for (int i = 0; i < 100; i++) + { + var fields2 = new HashEntry[] { new("name", "name" + i), + new("tags", $"tagA,tagB,tag{i}") }; + // assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2)); + db.HashSet($"doc{i}", fields2); + } + SearchResult res2 = ft.Search(index, new Query("@tags:{tagA}")); + Assert.Equal(100, res2.TotalResults); + + var info = ft.Info(index); + Assert.Equal(index, info.IndexName); + Assert.Equal("title", (info.Attributes[0]["identifier"]).ToString()); + Assert.Equal("TAG", (info.Attributes[1]["type"]).ToString()); + Assert.Equal("name", (info.Attributes[2]["attribute"]).ToString()); + } + + [Fact] + public async Task AlterAddAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddTextField("title", 1.0); + + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + var fields = new HashEntry("title", "hello world"); + //fields.("title", "hello world"); + for (int i = 0; i < 100; i++) + { + db.HashSet($"doc{i}", fields.Name, fields.Value); + } + SearchResult res = ft.Search(index, new Query("hello world")); + Assert.Equal(100, res.TotalResults); + + Assert.True(await ft.AlterAsync(index, new Schema().AddTagField("tags").AddTextField("name", weight: 0.5))); + for (int i = 0; i < 100; i++) + { + var fields2 = new HashEntry[] { new("name", "name" + i), + new("tags", $"tagA,tagB,tag{i}") }; + // assertTrue(client.updateDocument(string.format("doc%d", i), 1.0, fields2)); + db.HashSet($"doc{i}", fields2); + } + SearchResult res2 = ft.Search(index, new Query("@tags:{tagA}")); + Assert.Equal(100, res2.TotalResults); + + var info = await ft.InfoAsync(index); + Assert.Equal(index, info.IndexName); + Assert.Equal("title", (info.Attributes[0]["identifier"]).ToString()); + Assert.Equal("TAG", (info.Attributes[1]["type"]).ToString()); + Assert.Equal("name", (info.Attributes[2]["attribute"]).ToString()); + } + + [Fact] + public void TestConfig() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Assert.True(ft.ConfigSet("TIMEOUT", "100")); + Dictionary<string, string> configMap = ft.ConfigGet("*"); + Assert.Equal("100", configMap["TIMEOUT"].ToString()); + } + + [Fact] + public async Task TestConfigAsnyc() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Assert.True(await ft.ConfigSetAsync("TIMEOUT", "100")); + Dictionary<string, string> configMap = await ft.ConfigGetAsync("*"); + Assert.Equal("100", configMap["TIMEOUT"].ToString()); + } + + [Fact] + public void configOnTimeout() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Assert.True(ft.ConfigSet("ON_TIMEOUT", "fail")); + Assert.Equal("fail", ft.ConfigGet("ON_TIMEOUT")["ON_TIMEOUT"]); + + try { ft.ConfigSet("ON_TIMEOUT", "null"); } catch (RedisServerException) { } + } + + [Fact] + public async Task configOnTimeoutAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Assert.True(await ft.ConfigSetAsync("ON_TIMEOUT", "fail")); + Assert.Equal("fail", (await ft.ConfigGetAsync("ON_TIMEOUT"))["ON_TIMEOUT"]); + + try { ft.ConfigSet("ON_TIMEOUT", "null"); } catch (RedisServerException) { } } + [Fact] + public void TestDialectConfig() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + // confirm default + var result = ft.ConfigGet("DEFAULT_DIALECT"); + Assert.Equal("1", result["DEFAULT_DIALECT"]); // TODO: should be "1" ? + + Assert.True(ft.ConfigSet("DEFAULT_DIALECT", "2")); + Assert.Equal("2", ft.ConfigGet("DEFAULT_DIALECT")["DEFAULT_DIALECT"]); + try { ft.ConfigSet("DEFAULT_DIALECT", "0"); } catch (RedisServerException) { } + try { ft.ConfigSet("DEFAULT_DIALECT", "3"); } catch (RedisServerException) { } + + Assert.Throws<RedisServerException>(() => ft.ConfigSet("DEFAULT_DIALECT", "0")); + Assert.Throws<RedisServerException>(() => ft.ConfigSet("DEFAULT_DIALECT", "3")); + + // Restore to default + Assert.True(ft.ConfigSet("DEFAULT_DIALECT", "1")); + } + + [Fact] + public async Task TestDialectConfigAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + // confirm default + var result = await ft.ConfigGetAsync("DEFAULT_DIALECT"); + Assert.Equal("1", result["DEFAULT_DIALECT"]); // TODO: should be "1" ? + + Assert.True(await ft.ConfigSetAsync("DEFAULT_DIALECT", "2")); + Assert.Equal("2", (await ft.ConfigGetAsync("DEFAULT_DIALECT"))["DEFAULT_DIALECT"]); + try { await ft.ConfigSetAsync("DEFAULT_DIALECT", "0"); } catch (RedisServerException) { } + try { await ft.ConfigSetAsync("DEFAULT_DIALECT", "3"); } catch (RedisServerException) { } + + Assert.Throws<RedisServerException>(() => ft.ConfigSet("DEFAULT_DIALECT", "0")); + Assert.Throws<RedisServerException>(() => ft.ConfigSet("DEFAULT_DIALECT", "3")); + + // Restore to default + Assert.True(ft.ConfigSet("DEFAULT_DIALECT", "1")); + } + + [Fact] + public async Task TestCursor() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("count", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + AggregationRequest r = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .SortBy(10, SortedField.Desc("@sum")) + .Cursor(1, 3000); + + // actual search + AggregationResult res = ft.Aggregate(index, r); + Row? row = res.GetRow(0); + Assert.NotNull(row); + Assert.Equal("def", row.Value.GetString("name")); + Assert.Equal(30, row.Value.GetLong("sum")); + Assert.Equal(30.0, row.Value.GetDouble("sum")); + + Assert.Equal(0L, row.Value.GetLong("nosuchcol")); + Assert.Equal(0.0, row.Value.GetDouble("nosuchcol")); + Assert.Null(row.Value.GetString("nosuchcol")); + + res = ft.CursorRead(index, res.CursorId, 1); + Row? row2 = res.GetRow(0); + + Assert.NotNull(row2); + Assert.Equal("abc", row2.Value.GetString("name")); + Assert.Equal(10, row2.Value.GetLong("sum")); + + Assert.True(ft.CursorDel(index, res.CursorId)); + + try + { + ft.CursorRead(index, res.CursorId, 1); + Assert.True(false); + } + catch (RedisException) { } + + _ = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .SortBy(10, SortedField.Desc("@sum")) + .Cursor(1, 1000); + + await Task.Delay(1000).ConfigureAwait(false); + + try + { + ft.CursorRead(index, res.CursorId, 1); + Assert.True(false); + } + catch (RedisException) { } + } + + [Fact] + public async Task TestCursorAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema(); + sc.AddTextField("name", 1.0, sortable: true); + sc.AddNumericField("count", sortable: true); + ft.Create(index, FTCreateParams.CreateParams(), sc); + AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); + AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); + AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); + + AggregationRequest r = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .SortBy(10, SortedField.Desc("@sum")) + .Cursor(1, 3000); + + // actual search + AggregationResult res = ft.Aggregate(index, r); + Row? row = res.GetRow(0); + Assert.NotNull(row); + Assert.Equal("def", row.Value.GetString("name")); + Assert.Equal(30, row.Value.GetLong("sum")); + Assert.Equal(30.0, row.Value.GetDouble("sum")); + + Assert.Equal(0L, row.Value.GetLong("nosuchcol")); + Assert.Equal(0.0, row.Value.GetDouble("nosuchcol")); + Assert.Null(row.Value.GetString("nosuchcol")); + + res = await ft.CursorReadAsync(index, res.CursorId, 1); + Row? row2 = res.GetRow(0); + + Assert.NotNull(row2); + Assert.Equal("abc", row2.Value.GetString("name")); + Assert.Equal(10, row2.Value.GetLong("sum")); + + Assert.True(await ft.CursorDelAsync(index, res.CursorId)); + + try + { + await ft.CursorReadAsync(index, res.CursorId, 1); + Assert.True(false); + } + catch (RedisException) { } + + _ = new AggregationRequest() + .GroupBy("@name", Reducers.Sum("@count").As("sum")) + .SortBy(10, SortedField.Desc("@sum")) + .Cursor(1, 1000); + + await Task.Delay(1000).ConfigureAwait(false); + + try + { + await ft.CursorReadAsync(index, res.CursorId, 1); + Assert.True(false); + } + catch (RedisException) { } + } + + [Fact] + public void TestDictionary() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + + Assert.Equal(3L, ft.DictAdd("dict", "bar", "foo", "hello world")); + + var dumResult = ft.DictDump("dict"); + int i = 0; + Assert.Equal("bar",dumResult[i++].ToString()); + Assert.Equal("foo",dumResult[i++].ToString()); + Assert.Equal("hello world",dumResult[i].ToString()); + + Assert.Equal(3L, ft.DictDel("dict", "foo", "bar", "hello world")); + Assert.Equal(ft.DictDump("dict").Length, 0); + } + + [Fact] + public void TestDropIndex() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddTextField("title", 1.0); + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + Dictionary<string, object> fields = new Dictionary<string, object>(); + fields.Add("title", "hello world"); + for (int i = 0; i < 100; i++) + { + AddDocument(db, $"doc{i}", fields); + } + + SearchResult res = ft.Search(index, new Query("hello world")); + Assert.Equal(100, res.TotalResults); + + Assert.True(ft.DropIndex(index)); + + try + { + ft.Search(index, new Query("hello world")); + //fail("Index should not exist."); + } + catch (RedisServerException ex) + { + Assert.True(ex.Message.Contains("no such index")); + } + Assert.Equal("100", db.Execute("DBSIZE").ToString()); + } + + [Fact] + public async Task TestDropIndexAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddTextField("title", 1.0); + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + Dictionary<string, object> fields = new Dictionary<string, object>(); + fields.Add("title", "hello world"); + for (int i = 0; i < 100; i++) + { + AddDocument(db, $"doc{i}", fields); + } + + SearchResult res = ft.Search(index, new Query("hello world")); + Assert.Equal(100, res.TotalResults); + + Assert.True(await ft.DropIndexAsync(index)); + + try + { + ft.Search(index, new Query("hello world")); + //fail("Index should not exist."); + } + catch (RedisServerException ex) + { + Assert.True(ex.Message.Contains("no such index")); + } + Assert.Equal("100", db.Execute("DBSIZE").ToString()); + } + + [Fact] + public void dropIndexDD() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddTextField("title", 1.0); + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + Dictionary<string, object> fields = new Dictionary<string, object>(); + fields.Add("title", "hello world"); + for (int i = 0; i < 100; i++) + { + AddDocument(db, $"doc{i}", fields); + } + + SearchResult res = ft.Search(index, new Query("hello world")); + Assert.Equal(100, res.TotalResults); + + Assert.True(ft.DropIndex(index, true)); + + RedisResult[] keys = (RedisResult[]) db.Execute("KEYS", "*"); + Assert.True(keys.Length == 0); + Assert.Equal("0", db.Execute("DBSIZE").ToString()); + } + + [Fact] + public async Task dropIndexDDAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema().AddTextField("title", 1.0); + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + Dictionary<string, object> fields = new Dictionary<string, object>(); + fields.Add("title", "hello world"); + for (int i = 0; i < 100; i++) + { + AddDocument(db, $"doc{i}", fields); + } + + SearchResult res = ft.Search(index, new Query("hello world")); + Assert.Equal(100, res.TotalResults); + + Assert.True(await ft.DropIndexAsync(index, true)); + + RedisResult[] keys = (RedisResult[]) db.Execute("KEYS", "*"); + Assert.True(keys.Length == 0); + Assert.Equal("0", db.Execute("DBSIZE").ToString()); + } + + [Fact] + public async Task TestDictionaryAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + + Assert.Equal(3L, await ft.DictAddAsync("dict", "bar", "foo", "hello world")); + + var dumResult = await ft.DictDumpAsync("dict"); + int i = 0; + Assert.Equal("bar",dumResult[i++].ToString()); + Assert.Equal("foo",dumResult[i++].ToString()); + Assert.Equal("hello world",dumResult[i].ToString()); + + Assert.Equal(3L, await ft.DictDelAsync("dict", "foo", "bar", "hello world")); + Assert.Equal((await ft.DictDumpAsync("dict")).Length, 0); + } + + [Fact] + public void TestExplain() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema() + .AddTextField("f1", 1.0) + .AddTextField("f2", 1.0) + .AddTextField("f3", 1.0); + ft.Create(index, FTCreateParams.CreateParams(), sc); + + String res = ft.Explain(index, new Query("@f3:f3_val @f2:f2_val @f1:f1_val")); + Assert.NotNull(res); + Assert.False(res.Length == 0); + } + + [Fact] + public async Task TestExplainAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema() + .AddTextField("f1", 1.0) + .AddTextField("f2", 1.0) + .AddTextField("f3", 1.0); + ft.Create(index, FTCreateParams.CreateParams(), sc); + + String res = await ft.ExplainAsync(index, new Query("@f3:f3_val @f2:f2_val @f1:f1_val")); + Assert.NotNull(res); + Assert.False(res.Length == 0); + } + + [Fact] + public void TestSynonym() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + var sc = new Schema().AddTextField("name", 1.0).AddTextField("addr", 1.0); + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + long group1 = 345L; + long group2 = 789L; + string group1_str = group1.ToString(); + string group2_str = group2.ToString(); + Assert.True(ft.SynUpdate(index, group1_str, false, "girl", "baby")); + Assert.True(ft.SynUpdate(index, group1_str, false, "child")); + Assert.True(ft.SynUpdate(index, group2_str, false, "child")); + + Dictionary<string, List<string>> dump = ft.SynDump(index); + + Dictionary<string, List<string>> expected = new Dictionary<string, List<string>>(); + expected.Add("girl", new List<string>() { group1_str }); + expected.Add("baby", new List<string>() { group1_str }); + expected.Add("child", new List<string>() { group1_str, group2_str }); + Assert.Equal(expected, dump); + } + + [Fact] + public async Task TestSynonymAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + var sc = new Schema().AddTextField("name", 1.0).AddTextField("addr", 1.0); + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + + long group1 = 345L; + long group2 = 789L; + string group1_str = group1.ToString(); + string group2_str = group2.ToString(); + Assert.True(await ft.SynUpdateAsync(index, group1_str, false, "girl", "baby")); + Assert.True(await ft.SynUpdateAsync(index, group1_str, false, "child")); + Assert.True(await ft.SynUpdateAsync(index, group2_str, false, "child")); + + Dictionary<string, List<string>> dump = await ft.SynDumpAsync(index); + + Dictionary<string, List<string>> expected = new Dictionary<string, List<string>>(); + expected.Add("girl", new List<string>() { group1_str }); + expected.Add("baby", new List<string>() { group1_str }); + expected.Add("child", new List<string>() { group1_str, group2_str }); + Assert.Equal(expected, dump); + } [Fact] public void TestModulePrefixs() @@ -30,6 +1074,122 @@ public void TestModulePrefixs() Assert.NotEqual(ft1.GetHashCode(), ft2.GetHashCode()); } + [Fact] + public async Task GetTagFieldSyncAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema() + .AddTextField("title", 1.0) + .AddTagField("category"); + + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + Dictionary<string, object> fields1 = new Dictionary<string, object>(); + fields1.Add("title", "hello world"); + fields1.Add("category", "red"); + // assertTrue(client.AddDocument(db, "foo", fields1)); + AddDocument(db, "foo", fields1); + Dictionary<string, object> fields2 = new Dictionary<string, object>(); + fields2.Add("title", "hello world"); + fields2.Add("category", "blue"); + // assertTrue(client.AddDocument(db, "bar", fields2)); + AddDocument(db, "bar", fields2); + Dictionary<string, object> fields3 = new Dictionary<string, object>(); + fields3.Add("title", "hello world"); + fields3.Add("category", "green,yellow"); + // assertTrue(client.AddDocument(db, "baz", fields3)); + AddDocument(db, "baz", fields3); + Dictionary<string, object> fields4 = new Dictionary<string, object>(); + fields4.Add("title", "hello world"); + fields4.Add("category", "orange;purple"); + // assertTrue(client.AddDocument(db, "qux", fields4)); + AddDocument(db, "qux", fields4); + + Assert.Equal(1, ft.Search(index, new Query("@category:{red}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("@category:{blue}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("hello @category:{red}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("hello @category:{blue}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("@category:{yellow}")).TotalResults); + Assert.Equal(0, ft.Search(index, new Query("@category:{purple}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("@category:{orange\\;purple}")).TotalResults); + Assert.Equal(4, ft.Search(index, new Query("hello")).TotalResults); + + var SyncRes = ft.TagVals(index, "category"); + int i = 0; + Assert.Equal(SyncRes[i++].ToString(), "blue"); + Assert.Equal(SyncRes[i++].ToString(), "green"); + Assert.Equal(SyncRes[i++].ToString(), "orange;purple"); + Assert.Equal(SyncRes[i++].ToString(), "red"); + Assert.Equal(SyncRes[i++].ToString(), "yellow"); + + var AsyncRes = await ft.TagValsAsync(index, "category"); + i = 0; + Assert.Equal(SyncRes[i++].ToString(), "blue"); + Assert.Equal(SyncRes[i++].ToString(), "green"); + Assert.Equal(SyncRes[i++].ToString(), "orange;purple"); + Assert.Equal(SyncRes[i++].ToString(), "red"); + Assert.Equal(SyncRes[i++].ToString(), "yellow"); + } + + [Fact] + public async Task TestGetTagFieldWithNonDefaultSeparatorSyncAsync() + { + IDatabase db = redisFixture.Redis.GetDatabase(); + db.Execute("FLUSHALL"); + var ft = db.FT(); + Schema sc = new Schema() + .AddTextField("title", 1.0) + .AddTagField("category", separator: ";"); + + Assert.True(ft.Create(index, FTCreateParams.CreateParams(), sc)); + Dictionary<string, object> fields1 = new Dictionary<string, object>(); + fields1.Add("title", "hello world"); + fields1.Add("category", "red"); + // assertTrue(client.AddDocument(db, "foo", fields1)); + AddDocument(db, "foo", fields1); + Dictionary<string, object> fields2 = new Dictionary<string, object>(); + fields2.Add("title", "hello world"); + fields2.Add("category", "blue"); + // assertTrue(client.AddDocument(db, "bar", fields2)); + AddDocument(db, "bar", fields2); + Dictionary<string, object> fields3 = new Dictionary<string, object>(); + fields3.Add("title", "hello world"); + fields3.Add("category", "green;yellow"); + AddDocument(db, "baz", fields3); + // assertTrue(client.AddDocument(db, "baz", fields3)); + Dictionary<string, object> fields4 = new Dictionary<string, object>(); + fields4.Add("title", "hello world"); + fields4.Add("category", "orange,purple"); + // assertTrue(client.AddDocument(db, "qux", fields4)); + AddDocument(db, "qux", fields4); + + Assert.Equal(1, ft.Search(index, new Query("@category:{red}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("@category:{blue}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("hello @category:{red}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("hello @category:{blue}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("hello @category:{yellow}")).TotalResults); + Assert.Equal(0, ft.Search(index, new Query("@category:{purple}")).TotalResults); + Assert.Equal(1, ft.Search(index, new Query("@category:{orange\\,purple}")).TotalResults); + Assert.Equal(4, ft.Search(index, new Query("hello")).TotalResults); + + var SyncRes = ft.TagVals(index, "category"); + int i = 0; + Assert.Equal(SyncRes[i++].ToString(), "blue"); + Assert.Equal(SyncRes[i++].ToString(), "green"); + Assert.Equal(SyncRes[i++].ToString(), "orange,purple"); + Assert.Equal(SyncRes[i++].ToString(), "red"); + Assert.Equal(SyncRes[i++].ToString(), "yellow"); + + var AsyncRes = await ft.TagValsAsync(index, "category"); + i = 0; + Assert.Equal(SyncRes[i++].ToString(), "blue"); + Assert.Equal(SyncRes[i++].ToString(), "green"); + Assert.Equal(SyncRes[i++].ToString(), "orange,purple"); + Assert.Equal(SyncRes[i++].ToString(), "red"); + Assert.Equal(SyncRes[i++].ToString(), "yellow"); + } + [Fact] public void TestModulePrefixs1() { @@ -50,7 +1210,5 @@ public void TestModulePrefixs1() // ... conn.Dispose(); } - } - } \ No newline at end of file diff --git a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs index 9201c204..55e2796c 100644 --- a/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs +++ b/tests/NRedisStack.Tests/Tdigest/TdigestTests.cs @@ -90,164 +90,178 @@ public async Task TestCreateAndInfoAsync() } } - [Fact] - public void TestRank() - { - IDatabase db = redisFixture.Redis.GetDatabase(); - db.Execute("FLUSHALL"); - var tdigest = db.TDIGEST(); - - Assert.True(tdigest.Create("t-digest", 500)); - var tuples = new Tuple<double, long>[20]; - for (int i = 0; i < 20; i++) - { - tuples[i] = new(i, 1); - } - Assert.True(tdigest.Add("t-digest", tuples)); - Assert.Equal(-1, tdigest.Rank("t-digest", -1)[0]); - Assert.Equal(1, tdigest.Rank("t-digest", 0)[0]); - Assert.Equal(11, tdigest.Rank("t-digest", 10)[0]); - Assert.Equal(new long[3] { -1, 20, 10 }, tdigest.Rank("t-digest", -20, 20, 9)); - } - - [Fact] - public async Task TestRankAsync() - { - IDatabase db = redisFixture.Redis.GetDatabase(); - db.Execute("FLUSHALL"); - var tdigest = db.TDIGEST(); - - Assert.True(tdigest.Create("t-digest", 500)); - var tuples = new Tuple<double, long>[20]; - for (int i = 0; i < 20; i++) - { - tuples[i] = new(i, 1); - } - Assert.True(tdigest.Add("t-digest", tuples)); - Assert.Equal(-1, (await tdigest.RankAsync("t-digest", -1))[0]); - Assert.Equal(1, (await tdigest.RankAsync("t-digest", 0))[0]); - Assert.Equal(11, (await tdigest.RankAsync("t-digest", 10))[0]); - Assert.Equal(new long[3] { -1, 20, 10 }, await tdigest.RankAsync("t-digest", -20, 20, 9)); - } - - [Fact] - public void TestRevRank() - { - IDatabase db = redisFixture.Redis.GetDatabase(); - db.Execute("FLUSHALL"); - var tdigest = db.TDIGEST(); - - Assert.True(tdigest.Create("t-digest", 500)); - var tuples = new Tuple<double, long>[20]; - for (int i = 0; i < 20; i++) - { - tuples[i] = new(i, 1); - } - - Assert.True(tdigest.Add("t-digest", tuples)); - Assert.Equal(-1, tdigest.RevRank("t-digest", 20)[0]); - Assert.Equal(20, tdigest.RevRank("t-digest", 0)[0]); - Assert.Equal(new long[3] { -1, 20, 10 }, tdigest.RevRank("t-digest", 21, 0, 10)); - } - - [Fact] - public async Task TestRevRankAsync() - { - IDatabase db = redisFixture.Redis.GetDatabase(); - db.Execute("FLUSHALL"); - var tdigest = db.TDIGEST(); - - Assert.True(tdigest.Create("t-digest", 500)); - var tuples = new Tuple<double, long>[20]; - for (int i = 0; i < 20; i++) - { - tuples[i] = new(i, 1); - } - - Assert.True(tdigest.Add("t-digest", tuples)); - Assert.Equal(-1, (await tdigest.RevRankAsync("t-digest", 20))[0]); - Assert.Equal(20, (await tdigest.RevRankAsync("t-digest", 0))[0]); - Assert.Equal(new long[3] { -1, 20, 10 }, await tdigest.RevRankAsync("t-digest", 21, 0, 10)); - } + // [Fact] + // public void TestRank() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[20]; + // for (int i = 0; i < 20; i++) + // { + // tuples[i] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(-1, tdigest.Rank("t-digest", -1)[0]); + // Assert.Equal(1, tdigest.Rank("t-digest", 0)[0]); + // Assert.Equal(11, tdigest.Rank("t-digest", 10)[0]); + // Assert.Equal(new long[3] { -1, 20, 10 }, tdigest.Rank("t-digest", -20, 20, 9)); + // } - // TODO: fix those tests: [Fact] - public void TestByRank() + public void TestRankCommands() { + //final String key = "ranks"; IDatabase db = redisFixture.Redis.GetDatabase(); db.Execute("FLUSHALL"); var tdigest = db.TDIGEST(); - - Assert.True(tdigest.Create("t-digest", 500)); - var tuples = new Tuple<double, long>[10]; - for (int i = 1; i <= 10; i++) - { - tuples[i - 1] = new(i, 1); - } - Assert.True(tdigest.Add("t-digest", tuples)); - Assert.Equal(1, tdigest.ByRank("t-digest", 0)[0]); - Assert.Equal(10, tdigest.ByRank("t-digest", 9)[0]); - Assert.True(double.IsInfinity(tdigest.ByRank("t-digest", 100)[0])); - //Assert.Throws<RedisServerException>(() => tdigest.ByRank("t-digest", -1)[0]); - } - - [Fact] - public async Task TestByRankAsync() - { - IDatabase db = redisFixture.Redis.GetDatabase(); - db.Execute("FLUSHALL"); - var tdigest = db.TDIGEST(); - - Assert.True(tdigest.Create("t-digest", 500)); - var tuples = new Tuple<double, long>[10]; - for (int i = 1; i <= 10; i++) - { - tuples[i - 1] = new(i, 1); - } - Assert.True(tdigest.Add("t-digest", tuples)); - Assert.Equal(1, (await tdigest.ByRankAsync("t-digest", 0))[0]); - Assert.Equal(10, (await tdigest.ByRankAsync("t-digest", 9))[0]); - Assert.True(double.IsInfinity((await tdigest.ByRankAsync("t-digest", 100))[0])); - } - - [Fact] - public void TestByRevRank() - { - IDatabase db = redisFixture.Redis.GetDatabase(); - db.Execute("FLUSHALL"); - var tdigest = db.TDIGEST(); - - Assert.True(tdigest.Create("t-digest", 500)); - var tuples = new Tuple<double, long>[10]; - for (int i = 1; i <= 10; i++) - { - tuples[i - 1] = new(i, 1); - } - Assert.True(tdigest.Add("t-digest", tuples)); - Assert.Equal(10, tdigest.ByRevRank("t-digest", 0)[0]); - Assert.Equal(2, tdigest.ByRevRank("t-digest", 9)[0]); - Assert.True(double.IsInfinity(-tdigest.ByRevRank("t-digest", 100)[0])); - //Assert.Throws<RedisServerException>(() => tdigest.ByRank("t-digest", -1)[0]); - } - - [Fact] - public async Task TestByRevRankAsync() - { - IDatabase db = redisFixture.Redis.GetDatabase(); - db.Execute("FLUSHALL"); - var tdigest = db.TDIGEST(); - - Assert.True(tdigest.Create("t-digest", 500)); - var tuples = new Tuple<double, long>[10]; - for (int i = 1; i <= 10; i++) - { - tuples[i - 1] = new(i, 1); - } - Assert.True(tdigest.Add("t-digest", tuples)); - Assert.Equal(10, (await tdigest.ByRevRankAsync("t-digest", 0))[0]); - Assert.Equal(2, (await tdigest.ByRevRankAsync("t-digest", 9))[0]); - Assert.True(double.IsInfinity(-(await tdigest.ByRevRankAsync("t-digest", 100))[0])); - } + tdigest.Create(key); + tdigest.Add(key, 2d, 3d, 5d); + Assert.Equal(new long[] { 1l, 2l }, tdigest.Rank(key, 2, 4)); + Assert.Equal(new long[] { 0, 1 }, tdigest.RevRank(key, 5, 4)); + Assert.Equal(new double[] { 2, 3 }, tdigest.ByRank(key, 0, 1)); + Assert.Equal(new double[] { 5, 3 }, tdigest.ByRevRank(key, 1, 2)); + } + + // [Fact] + // public async Task TestRankAsync() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[20]; + // for (int i = 0; i < 20; i++) + // { + // tuples[i] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(-1, (await tdigest.RankAsync("t-digest", -1))[0]); + // Assert.Equal(1, (await tdigest.RankAsync("t-digest", 0))[0]); + // Assert.Equal(11, (await tdigest.RankAsync("t-digest", 10))[0]); + // Assert.Equal(new long[3] { -1, 20, 10 }, await tdigest.RankAsync("t-digest", -20, 20, 9)); + // } + + // [Fact] + // public void TestRevRank() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[20]; + // for (int i = 0; i < 20; i++) + // { + // tuples[i] = new(i, 1); + // } + + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(-1, tdigest.RevRank("t-digest", 20)[0]); + // Assert.Equal(20, tdigest.RevRank("t-digest", 0)[0]); + // Assert.Equal(new long[3] { -1, 20, 10 }, tdigest.RevRank("t-digest", 21, 0, 10)); + // } + + // [Fact] + // public async Task TestRevRankAsync() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[20]; + // for (int i = 0; i < 20; i++) + // { + // tuples[i] = new(i, 1); + // } + + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(-1, (await tdigest.RevRankAsync("t-digest", 20))[0]); + // Assert.Equal(20, (await tdigest.RevRankAsync("t-digest", 0))[0]); + // Assert.Equal(new long[3] { -1, 20, 10 }, await tdigest.RevRankAsync("t-digest", 21, 0, 10)); + // } + + // [Fact] + // public void TestByRank() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[10]; + // for (int i = 1; i <= 10; i++) + // { + // tuples[i - 1] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(1, tdigest.ByRank("t-digest", 0)[0]); + // Assert.Equal(10, tdigest.ByRank("t-digest", 9)[0]); + // Assert.True(double.IsInfinity(tdigest.ByRank("t-digest", 100)[0])); + // //Assert.Throws<RedisServerException>(() => tdigest.ByRank("t-digest", -1)[0]); + // } + + // [Fact] + // public async Task TestByRankAsync() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[10]; + // for (int i = 1; i <= 10; i++) + // { + // tuples[i - 1] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(1, (await tdigest.ByRankAsync("t-digest", 0))[0]); + // Assert.Equal(10, (await tdigest.ByRankAsync("t-digest", 9))[0]); + // Assert.True(double.IsInfinity((await tdigest.ByRankAsync("t-digest", 100))[0])); + // } + + // [Fact] + // public void TestByRevRank() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[10]; + // for (int i = 1; i <= 10; i++) + // { + // tuples[i - 1] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(10, tdigest.ByRevRank("t-digest", 0)[0]); + // Assert.Equal(2, tdigest.ByRevRank("t-digest", 9)[0]); + // Assert.True(double.IsInfinity(-tdigest.ByRevRank("t-digest", 100)[0])); + // //Assert.Throws<RedisServerException>(() => tdigest.ByRank("t-digest", -1)[0]); + // } + + // [Fact] + // public async Task TestByRevRankAsync() + // { + // IDatabase db = redisFixture.Redis.GetDatabase(); + // db.Execute("FLUSHALL"); + // var tdigest = db.TDIGEST(); + + // Assert.True(tdigest.Create("t-digest", 500)); + // var tuples = new Tuple<double, long>[10]; + // for (int i = 1; i <= 10; i++) + // { + // tuples[i - 1] = new(i, 1); + // } + // Assert.True(tdigest.Add("t-digest", tuples)); + // Assert.Equal(10, (await tdigest.ByRevRankAsync("t-digest", 0))[0]); + // Assert.Equal(2, (await tdigest.ByRevRankAsync("t-digest", 9))[0]); + // Assert.True(double.IsInfinity(-(await tdigest.ByRevRankAsync("t-digest", 100))[0])); + // } [Fact] @@ -264,7 +278,8 @@ public void TestReset() Assert.True(tdigest.Reset("reset")); AssertMergedUnmergedNodes(tdigest, "reset", 0, 0); - tdigest.Add("reset", RandomValueWeight(), RandomValueWeight(), RandomValueWeight()); + // tdigest.Add("reset", RandomValue(), RandomValue(), RandomValue()); + tdigest.Add("reset", RandomValue(), RandomValue(), RandomValue()); AssertMergedUnmergedNodes(tdigest, "reset", 0, 3); Assert.True(tdigest.Reset("reset")); @@ -285,7 +300,9 @@ public async Task TestResetAsync() Assert.True(await tdigest.ResetAsync("reset")); AssertMergedUnmergedNodes(tdigest, "reset", 0, 0); - await tdigest.AddAsync("reset", RandomValueWeight(), RandomValueWeight(), RandomValueWeight()); + //await tdigest.AddAsync("reset", RandomValue(), RandomValue(), RandomValue()); + tdigest.Add("reset", RandomValue(), RandomValue(), RandomValue()); + AssertMergedUnmergedNodes(tdigest, "reset", 0, 3); Assert.True(await tdigest.ResetAsync("reset")); @@ -301,10 +318,10 @@ public void TestAdd() tdigest.Create("tdadd", 100); - Assert.True(tdigest.Add("tdadd", RandomValueWeight())); + Assert.True(tdigest.Add("tdadd", RandomValue())); AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 1); - Assert.True(tdigest.Add("tdadd", RandomValueWeight(), RandomValueWeight(), RandomValueWeight(), RandomValueWeight())); + Assert.True(tdigest.Add("tdadd", RandomValue(), RandomValue(), RandomValue(), RandomValue())); AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 5); } @@ -317,10 +334,10 @@ public async Task TestAddAsync() await tdigest.CreateAsync("tdadd", 100); - Assert.True(await tdigest.AddAsync("tdadd", RandomValueWeight())); + Assert.True(await tdigest.AddAsync("tdadd", RandomValue())); AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 1); - Assert.True(await tdigest.AddAsync("tdadd", RandomValueWeight(), RandomValueWeight(), RandomValueWeight(), RandomValueWeight())); + Assert.True(await tdigest.AddAsync("tdadd", RandomValue(), RandomValue(), RandomValue(), RandomValue())); AssertMergedUnmergedNodes(tdigest, "tdadd", 0, 5); } @@ -337,8 +354,10 @@ public void TestMerge() Assert.True(tdigest.Merge("td2", sourceKeys: "td4m")); AssertMergedUnmergedNodes(tdigest, "td2", 0, 0); - tdigest.Add("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - tdigest.Add("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); + // tdigest.Add("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + // tdigest.Add("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); + tdigest.Add("td2", 1, 1, 1); + tdigest.Add("td4m", 1, 1); Assert.True(tdigest.Merge("td2", sourceKeys: "td4m")); AssertMergedUnmergedNodes(tdigest, "td2", 3, 2); @@ -358,8 +377,11 @@ public async Task TestMergeAsync() Assert.True(await tdigest.MergeAsync("td2", sourceKeys: "td4m")); AssertMergedUnmergedNodes(tdigest, "td2", 0, 0); - await tdigest.AddAsync("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - await tdigest.AddAsync("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); + // await tdigest.AddAsync("td2", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + // await tdigest.AddAsync("td4m", DefinedValueWeight(1, 100), DefinedValueWeight(1, 100)); + + await tdigest.AddAsync("td2", 1, 1, 1); + await tdigest.AddAsync("td4m", 1, 1); Assert.True(await tdigest.MergeAsync("td2", sourceKeys: "td4m")); AssertMergedUnmergedNodes(tdigest, "td2", 3, 2); @@ -374,8 +396,10 @@ public void MergeMultiAndParams() tdigest.Create("from1", 100); tdigest.Create("from2", 200); - tdigest.Add("from1", 1d, 1); - tdigest.Add("from2", 1d, 10); + // tdigest.Add("from1", 1d, 1); + // tdigest.Add("from2", 1d, 10); + tdigest.Add("from1", 1d); + tdigest.Add("from2", WeightedValue(1d, 10)); Assert.True(tdigest.Merge("to", 2, sourceKeys: new RedisKey[] { "from1", "from2" })); AssertTotalWeight(tdigest, "to", 11d); @@ -393,8 +417,10 @@ public async Task MergeMultiAndParamsAsync() tdigest.Create("from1", 100); tdigest.Create("from2", 200); - tdigest.Add("from1", 1d, 1); - tdigest.Add("from2", 1d, 10); + // tdigest.Add("from1", 1d, 1); + // tdigest.Add("from2", 1d, 10); + tdigest.Add("from1", 1d); + tdigest.Add("from2", WeightedValue(1d, 10)); Assert.True(await tdigest.MergeAsync("to", 2, sourceKeys: new RedisKey[] { "from1", "from2" })); AssertTotalWeight(tdigest, "to", 11d); @@ -416,9 +442,13 @@ public void TestCDF() Assert.Equal(double.NaN, item); } - tdigest.Add("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - tdigest.Add("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + // tdigest.Add("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + // tdigest.Add("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + + tdigest.Add("tdcdf", 1, 1, 1); + tdigest.Add("tdcdf", 100, 100); Assert.Equal(new double[] { 0.6 }, tdigest.CDF("tdcdf", 50)); + tdigest.CDF("tdcdf", 25, 50, 75); // TODO: Why needed? } [Fact] @@ -434,9 +464,14 @@ public async Task TestCDFAsync() Assert.Equal(double.NaN, item); } - await tdigest.AddAsync("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - await tdigest.AddAsync("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + // await tdigest.AddAsync("tdcdf", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + // await tdigest.AddAsync("tdcdf", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + tdigest.Add("tdcdf", 1, 1, 1); + tdigest.Add("tdcdf", 100, 100); + Assert.Equal(new double[] { 0.6 }, await tdigest.CDFAsync("tdcdf", 50)); + await tdigest.CDFAsync("tdcdf", 25, 50, 75); // TODO: Why needed? + } [Fact] @@ -450,8 +485,10 @@ public void TestQuantile() var resDelete = tdigest.Quantile("tdqnt", 0.5); Assert.Equal(new double[] { double.NaN }, tdigest.Quantile("tdqnt", 0.5)); - tdigest.Add("tdqnt", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - tdigest.Add("tdqnt", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + // tdigest.Add("tdqnt", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + // tdigest.Add("tdqnt", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + tdigest.Add("tdqnt", 1, 1, 1); + tdigest.Add("tdqnt", 100, 100); Assert.Equal(new double[] { 1 }, tdigest.Quantile("tdqnt", 0.5)); } @@ -466,8 +503,10 @@ public async Task TestQuantileAsync() var resDelete = await tdigest.QuantileAsync("tdqnt", 0.5); Assert.Equal(new double[] { double.NaN }, await tdigest.QuantileAsync("tdqnt", 0.5)); - await tdigest.AddAsync("tdqnt", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); - await tdigest.AddAsync("tdqnt", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + // await tdigest.AddAsync("tdqnt", DefinedValueWeight(1, 1), DefinedValueWeight(1, 1), DefinedValueWeight(1, 1)); + // await tdigest.AddAsync("tdqnt", DefinedValueWeight(100, 1), DefinedValueWeight(100, 1)); + tdigest.Add("tdqnt", 1, 1, 1); + tdigest.Add("tdqnt", 100, 100); Assert.Equal(new double[] { 1 }, await tdigest.QuantileAsync("tdqnt", 0.5)); } @@ -482,8 +521,10 @@ public void TestMinAndMax() Assert.Equal(double.NaN, tdigest.Min(key)); Assert.Equal(double.NaN, tdigest.Max(key)); - tdigest.Add(key, DefinedValueWeight(2, 1)); - tdigest.Add(key, DefinedValueWeight(5, 1)); + // tdigest.Add(key, DefinedValueWeight(2, 1)); + // tdigest.Add(key, DefinedValueWeight(5, 1)); + tdigest.Add(key, 2); + tdigest.Add(key, 5); Assert.Equal(2d, tdigest.Min(key)); Assert.Equal(5d, tdigest.Max(key)); } @@ -499,8 +540,10 @@ public async Task TestMinAndMaxAsync() Assert.Equal(double.NaN, await tdigest.MinAsync(key)); Assert.Equal(double.NaN, await tdigest.MaxAsync(key)); - await tdigest.AddAsync(key, DefinedValueWeight(2, 1)); - await tdigest.AddAsync(key, DefinedValueWeight(5, 1)); + // await tdigest.AddAsync(key, DefinedValueWeight(2, 1)); + // await tdigest.AddAsync(key, DefinedValueWeight(5, 1)); + tdigest.Add(key, 2); + tdigest.Add(key, 5); Assert.Equal(2d, await tdigest.MinAsync(key)); Assert.Equal(5d, await tdigest.MaxAsync(key)); } @@ -516,7 +559,8 @@ public void TestTrimmedMean() for (int i = 0; i < 20; i++) { - tdigest.Add(key, new Tuple<double, long>(i, 1)); + //tdigest.Add(key, new Tuple<double, long>(i, 1)); + tdigest.Add(key, i); } Assert.Equal(9.5, tdigest.TrimmedMean(key, 0.1, 0.9)); @@ -536,7 +580,8 @@ public async Task TestTrimmedMeanAsync() for (int i = 0; i < 20; i++) { - await tdigest.AddAsync(key, new Tuple<double, long>(i, 1)); + // await tdigest.AddAsync(key, new Tuple<double, long>(i, 1)); + tdigest.Add(key, i); } Assert.Equal(9.5, await tdigest.TrimmedMeanAsync(key, 0.1, 0.9)); @@ -580,6 +625,11 @@ public void TestModulePrefixs1() } } + private static double RandomValue() + { + Random random = new Random(); + return random.NextDouble() * 10000; + } static Tuple<double, long> RandomValueWeight() { @@ -602,4 +652,11 @@ static Tuple<double, long> DefinedValueWeight(double value, long weight) { return new Tuple<double, long>(value, weight); } + + private static double[] WeightedValue(double value, int weight) + { + double[] values = new double[weight]; + Array.Fill(values, value); + return values; + } }