diff --git a/wiki/content/query-language/functions.md b/wiki/content/query-language/functions.md index 451a11d943e..5b653ed7de0 100644 --- a/wiki/content/query-language/functions.md +++ b/wiki/content/query-language/functions.md @@ -15,7 +15,7 @@ Comparison functions (`eq`, `ge`, `gt`, `le`, `lt`) in the query root (aka `func be applied on [indexed predicates]({{< relref "query-language/schema.md#indexing" >}}). Since v1.2, comparison functions can now be used on [@filter]({{}}) directives even on predicates that have not been indexed. -Filtering on non-indexed predicates can be slow for large datasets, as they require +Filtering on non-in dexed predicates can be slow for large datasets, as they require iterating over all of the possible values at the level where the filter is being used. All other functions, in the query root or in the filter can only be applied to indexed predicates. @@ -265,7 +265,7 @@ Index Required: An index is required for the `eq(predicate, ...)` forms (see tab | `int` | `int` | | `float` | `float` | | `bool` | `bool` | -| `string` | `exact`, `hash` | +| `string` | `exact`, `hash`, `term`, `fulltext` | | `dateTime` | `dateTime` | Test for equality of a predicate or variable to a value or find in a list of values. diff --git a/worker/tokens.go b/worker/tokens.go index f39451c9f45..f7e6e04130d 100644 --- a/worker/tokens.go +++ b/worker/tokens.go @@ -105,11 +105,13 @@ func pickTokenizer(ctx context.Context, attr string, f string) (tok.Tokenizer, e return nil, errors.Errorf("Attribute:%s does not have proper index for comparison", attr) } - // If we didn't find a sortable or !isLossy() tokenizer for eq function, - // then let's see if we can find a term or fulltext tokenizer - for _, t := range tokenizers { - if t.Identifier() == tok.IdentTerm || t.Identifier() == tok.IdentFullText { - return t, nil + // If we didn't find a !isLossy() tokenizer for eq function on string type predicates, + // then let's see if we can find a non-trigram tokenizer + if typ, err := schema.State().TypeOf(attr); err == nil && typ == types.StringID { + for _, t := range tokenizers { + if t.Identifier() != tok.IdentTrigram { + return t, nil + } } }