diff --git a/.drone.env b/.drone.env index a871b9a7c7b..47be138be2f 100644 --- a/.drone.env +++ b/.drone.env @@ -1,3 +1,3 @@ # The test runner source for UI tests -WEB_COMMITID=de510963a4c9d9eaa05ba69512fabb323a32bd73 +WEB_COMMITID=1322e5b46c827d0e7f7b8f563f302e61269f8515 WEB_BRANCH=master diff --git a/changelog/unreleased/enhancement-kql-lexer-and-bleve-query-compiler.md b/changelog/unreleased/enhancement-kql-lexer-and-bleve-query-compiler.md deleted file mode 100644 index 975c90394da..00000000000 --- a/changelog/unreleased/enhancement-kql-lexer-and-bleve-query-compiler.md +++ /dev/null @@ -1,27 +0,0 @@ -Enhancement: Keyword Query Language (KQL) search syntax support - -Introduce support for [KQL](https://learn.microsoft.com/en-us/sharepoint/dev/general-development/keyword-query-language-kql-syntax-reference) search syntax. - -The functionality consists of a kql lexer and a bleve query compiler - -Supported field queries: - -* `Tag` search `tag:golden tag:"silver"` -* `Filename` search `name:file.txt name:"file.docx"` -* `Content` search `content:ahab content:"captain aha*"` - -Supported conjunctive normal form queries: - -* `Boolean`: `AND`, `OR`, `NOT`, -* `Group`: `(tag:book content:ahab*)`, `tag:(book pdf)` - -some examples are: - -query: `(name:"moby di*" OR tag:bestseller) AND tag:book NOT tag:read` - -* Resources with `name: moby di*` `OR` `tag: bestseller`. -* `AND` with `tag:book`. -* `NOT` with `tag:read`. - -https://github.com/owncloud/ocis/pull/7043 -https://github.com/owncloud/ocis/issues/7042 diff --git a/changelog/unreleased/kql-search-query-language.md b/changelog/unreleased/kql-search-query-language.md new file mode 100644 index 00000000000..ddb289e2485 --- /dev/null +++ b/changelog/unreleased/kql-search-query-language.md @@ -0,0 +1,27 @@ +Enhancement: Keyword Query Language (KQL) search syntax + +We've introduced support for [KQL](https://learn.microsoft.com/en-us/sharepoint/dev/general-development/keyword-query-language-kql-syntax-reference) as the default oCIS search query language. + +Some examples of a valid KQL query are: + +* `Tag`: `tag:golden tag:"silver"` +* `Filename`: `name:file.txt name:"file.docx"` +* `Content`: `content:ahab content:"captain aha*"` + +Conjunctive normal form queries: + +* `Boolean`: `tag:golden AND tag:"silver`, `tag:golden OR tag:"silver`, `tag:golden NOT tag:"silver` +* `Group`: `(tag:book content:ahab*)`, `tag:(book pdf)` + +Complex queries: + +* `(name:"moby di*" OR tag:bestseller) AND tag:book NOT tag:read` + +https://github.com/owncloud/ocis/pull/7212 +https://github.com/owncloud/ocis/pull/7043 +https://github.com/owncloud/web/pull/9653 +https://github.com/owncloud/ocis/issues/7042 +https://github.com/owncloud/ocis/issues/7179 +https://github.com/owncloud/ocis/issues/7114 +https://github.com/owncloud/web/issues/9636 +https://github.com/owncloud/web/issues/9646 diff --git a/services/search/README.md b/services/search/README.md index 9fcc5f8b671..82df9ca3dc3 100644 --- a/services/search/README.md +++ b/services/search/README.md @@ -25,6 +25,22 @@ Note that as of now, the search service can not be scaled. Consider using a dedi By default, the search service is shipped with [bleve](https://github.com/blevesearch/bleve) as its primary search engine. The available engines can be extended by implementing the [Engine](pkg/engine/engine.go) interface and making that engine available. +## Query language + +By default, [KQL](https://learn.microsoft.com/en-us/sharepoint/dev/general-development/keyword-query-language-kql-syntax-reference) is used as query language, +for an overview of how the syntax works, please read the [microsoft documentation](https://learn.microsoft.com/en-us/sharepoint/dev/general-development/keyword-query-language-kql-syntax-reference). + +Not all parts are supported, the following list gives an overview of parts that are not implemented yet: + +* Synonym operators +* Inclusion and exclusion operators +* Dynamic ranking operator +* ONEAR operator +* NEAR operator +* Date intervals + +In the following [ADR](https://github.com/owncloud/ocis/blob/docs/ocis/adr/0020-file-search-query-language.md) you can read why we chose KQL. + ## Extraction Engines The search service provides the following extraction engines and their results are used as index for searching: diff --git a/services/search/pkg/engine/bleve.go b/services/search/pkg/engine/bleve.go index af4bf1b7ec3..c4ad66c6d9d 100644 --- a/services/search/pkg/engine/bleve.go +++ b/services/search/pkg/engine/bleve.go @@ -17,7 +17,7 @@ import ( "github.com/blevesearch/bleve/v2/analysis/tokenizer/single" "github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode" "github.com/blevesearch/bleve/v2/mapping" - bleveQuery "github.com/blevesearch/bleve/v2/search/query" + "github.com/blevesearch/bleve/v2/search/query" storageProvider "github.com/cs3org/go-cs3apis/cs3/storage/provider/v1beta1" "google.golang.org/protobuf/types/known/timestamppb" @@ -27,13 +27,13 @@ import ( searchMessage "github.com/owncloud/ocis/v2/protogen/gen/ocis/messages/search/v0" searchService "github.com/owncloud/ocis/v2/protogen/gen/ocis/services/search/v0" "github.com/owncloud/ocis/v2/services/search/pkg/content" - "github.com/owncloud/ocis/v2/services/search/pkg/query" + searchQuery "github.com/owncloud/ocis/v2/services/search/pkg/query" ) // Bleve represents a search engine which utilizes bleve to search and store resources. type Bleve struct { - index bleve.Index - query query.Creator[bleveQuery.Query] + index bleve.Index + queryCreator searchQuery.Creator[query.Query] } // NewBleveIndex returns a new bleve index @@ -58,10 +58,10 @@ func NewBleveIndex(root string) (bleve.Index, error) { } // NewBleveEngine creates a new Bleve instance -func NewBleveEngine(index bleve.Index, qbc query.Creator[bleveQuery.Query]) *Bleve { +func NewBleveEngine(index bleve.Index, queryCreator searchQuery.Creator[query.Query]) *Bleve { return &Bleve{ - index: index, - query: qbc, + index: index, + queryCreator: queryCreator, } } @@ -118,15 +118,15 @@ func BuildBleveMapping() (mapping.IndexMapping, error) { // Search executes a search request operation within the index. // Returns a SearchIndexResponse object or an error. -func (b *Bleve) Search(_ context.Context, sir *searchService.SearchIndexRequest) (*searchService.SearchIndexResponse, error) { - createdQuery, err := b.query.Create(sir.Query) +func (b *Bleve) Search(ctx context.Context, sir *searchService.SearchIndexRequest) (*searchService.SearchIndexResponse, error) { + createdQuery, err := b.queryCreator.Create(sir.Query) if err != nil { return nil, err } q := bleve.NewConjunctionQuery( // Skip documents that have been marked as deleted - &bleveQuery.BoolFieldQuery{ + &query.BoolFieldQuery{ Bool: false, FieldVal: "Deleted", }, @@ -136,7 +136,7 @@ func (b *Bleve) Search(_ context.Context, sir *searchService.SearchIndexRequest) if sir.Ref != nil { q.Conjuncts = append( q.Conjuncts, - &bleveQuery.TermQuery{ + &query.TermQuery{ FieldVal: "RootID", Term: storagespace.FormatResourceID( storageProvider.ResourceId{ diff --git a/services/search/pkg/engine/bleve_test.go b/services/search/pkg/engine/bleve_test.go index c1ec06f0e9a..53690a81d7a 100644 --- a/services/search/pkg/engine/bleve_test.go +++ b/services/search/pkg/engine/bleve_test.go @@ -4,10 +4,9 @@ import ( "context" "fmt" - "github.com/cs3org/reva/v2/pkg/storagespace" - bleveSearch "github.com/blevesearch/bleve/v2" sprovider "github.com/cs3org/go-cs3apis/cs3/storage/provider/v1beta1" + "github.com/cs3org/reva/v2/pkg/storagespace" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" @@ -22,7 +21,6 @@ var _ = Describe("Bleve", func() { var ( eng *engine.Bleve idx bleveSearch.Index - ctx context.Context doSearch = func(id string, query, path string) (*searchsvc.SearchIndexResponse, error) { rID, err := storagespace.ParseID(id) @@ -30,7 +28,7 @@ var _ = Describe("Bleve", func() { return nil, err } - return eng.Search(ctx, &searchsvc.SearchIndexRequest{ + return eng.Search(context.Background(), &searchsvc.SearchIndexRequest{ Query: query, Ref: &searchmsg.Reference{ ResourceId: &searchmsg.ResourceID{ @@ -63,7 +61,7 @@ var _ = Describe("Bleve", func() { idx, err = bleveSearch.NewMemOnly(mapping) Expect(err).ToNot(HaveOccurred()) - eng = engine.NewBleveEngine(idx, bleve.LegacyCreator) + eng = engine.NewBleveEngine(idx, bleve.DefaultCreator) Expect(err).ToNot(HaveOccurred()) rootResource = engine.Resource{ @@ -94,7 +92,7 @@ var _ = Describe("Bleve", func() { Describe("New", func() { It("returns a new index instance", func() { - b := engine.NewBleveEngine(idx, bleve.LegacyCreator) + b := engine.NewBleveEngine(idx, bleve.DefaultCreator) Expect(b).ToNot(BeNil()) }) }) @@ -134,7 +132,7 @@ var _ = Describe("Bleve", func() { err := eng.Upsert(parentResource.ID, parentResource) Expect(err).ToNot(HaveOccurred()) - assertDocCount(rootResource.ID, `Name:foo\ o*`, 1) + assertDocCount(rootResource.ID, `name:"foo o*"`, 1) }) It("finds files by digits in the filename", func() { @@ -409,14 +407,14 @@ var _ = Describe("Bleve", func() { err = eng.Upsert(childResource.ID, childResource) Expect(err).ToNot(HaveOccurred()) - assertDocCount(rootResource.ID, parentResource.Document.Name, 1) - assertDocCount(rootResource.ID, childResource.Document.Name, 1) + assertDocCount(rootResource.ID, `"`+parentResource.Document.Name+`"`, 1) + assertDocCount(rootResource.ID, `"`+childResource.Document.Name+`"`, 1) err = eng.Delete(parentResource.ID) Expect(err).ToNot(HaveOccurred()) - assertDocCount(rootResource.ID, parentResource.Document.Name, 0) - assertDocCount(rootResource.ID, childResource.Document.Name, 0) + assertDocCount(rootResource.ID, `"`+parentResource.Document.Name+`"`, 0) + assertDocCount(rootResource.ID, `"`+childResource.Document.Name+`"`, 0) }) }) @@ -431,14 +429,14 @@ var _ = Describe("Bleve", func() { err = eng.Delete(parentResource.ID) Expect(err).ToNot(HaveOccurred()) - assertDocCount(rootResource.ID, parentResource.Name, 0) - assertDocCount(rootResource.ID, childResource.Name, 0) + assertDocCount(rootResource.ID, `"`+parentResource.Name+`"`, 0) + assertDocCount(rootResource.ID, `"`+childResource.Name+`"`, 0) err = eng.Restore(parentResource.ID) Expect(err).ToNot(HaveOccurred()) - assertDocCount(rootResource.ID, parentResource.Name, 1) - assertDocCount(rootResource.ID, childResource.Name, 1) + assertDocCount(rootResource.ID, `"`+parentResource.Name+`"`, 1) + assertDocCount(rootResource.ID, `"`+childResource.Name+`"`, 1) }) }) diff --git a/services/search/pkg/query/ast/ast.go b/services/search/pkg/query/ast/ast.go index dab5d6353be..7d70bf089c4 100644 --- a/services/search/pkg/query/ast/ast.go +++ b/services/search/pkg/query/ast/ast.go @@ -1,6 +1,10 @@ // Package ast provides available ast nodes. package ast +import ( + "time" +) + // Node represents abstract syntax tree node type Node interface { Location() *Location @@ -48,6 +52,14 @@ type BooleanNode struct { Value bool } +// DateTimeNode represents a time.Time value +type DateTimeNode struct { + *Base + Key string + Operator *OperatorNode + Value time.Time +} + // OperatorNode represents an operator value like // AND, OR, NOT, =, <= ... and so on type OperatorNode struct { diff --git a/services/search/pkg/query/ast/test/test.go b/services/search/pkg/query/ast/test/test.go index 6b03fee57b6..01a260de0ea 100644 --- a/services/search/pkg/query/ast/test/test.go +++ b/services/search/pkg/query/ast/test/test.go @@ -21,6 +21,7 @@ func DiffAst(x, y interface{}, opts ...cmp.Option) string { cmpopts.IgnoreFields(ast.OperatorNode{}, "Base"), cmpopts.IgnoreFields(ast.GroupNode{}, "Base"), cmpopts.IgnoreFields(ast.BooleanNode{}, "Base"), + cmpopts.IgnoreFields(ast.DateTimeNode{}, "Base"), )..., ) } diff --git a/services/search/pkg/query/bleve/bleve.go b/services/search/pkg/query/bleve/bleve.go index 0058475cb5c..e720b35f70e 100644 --- a/services/search/pkg/query/bleve/bleve.go +++ b/services/search/pkg/query/bleve/bleve.go @@ -5,6 +5,7 @@ import ( bQuery "github.com/blevesearch/bleve/v2/search/query" "github.com/owncloud/ocis/v2/services/search/pkg/query" + "github.com/owncloud/ocis/v2/services/search/pkg/query/kql" ) // Creator is combines a Builder and a Compiler which is used to Create the query. @@ -29,5 +30,5 @@ func (c Creator[T]) Create(qs string) (T, error) { return t, nil } -// LegacyCreator exposes an ocis legacy bleve query creator. -var LegacyCreator = Creator[bQuery.Query]{LegacyBuilder{}, LegacyCompiler{}} +// DefaultCreator exposes a kql to bleve query creator. +var DefaultCreator = Creator[bQuery.Query]{kql.Builder{}, Compiler{}} diff --git a/services/search/pkg/query/bleve/compiler.go b/services/search/pkg/query/bleve/compiler.go index 9f9026110e1..114052bcce1 100644 --- a/services/search/pkg/query/bleve/compiler.go +++ b/services/search/pkg/query/bleve/compiler.go @@ -22,13 +22,15 @@ var _fields = map[string]string{ "type": "Type", "tag": "Tags", "tags": "Tags", + "content": "Content", + "hidden": "Hidden", } // Compiler represents a KQL query search string to the bleve query formatter. type Compiler struct{} // Compile implements the query formatter which converts the KQL query search string to the bleve query. -func (c *Compiler) Compile(givenAst *ast.Ast) (bleveQuery.Query, error) { +func (c Compiler) Compile(givenAst *ast.Ast) (bleveQuery.Query, error) { q, err := compile(givenAst) if err != nil { return nil, err @@ -52,7 +54,49 @@ func walk(offset int, nodes []ast.Node) (bleveQuery.Query, int) { for i := offset; i < len(nodes); i++ { switch n := nodes[i].(type) { case *ast.StringNode: - q := bleveQuery.NewQueryStringQuery(getField(n.Key) + ":" + n.Value) + k := getField(n.Key) + v := strings.ReplaceAll(n.Value, " ", `\ `) + + if k != "Hidden" { + v = strings.ToLower(v) + } + + q := bleveQuery.NewQueryStringQuery(k + ":" + v) + if prev == nil { + prev = q + } else { + next = q + } + case *ast.DateTimeNode: + q := &bleveQuery.DateRangeQuery{ + Start: bleveQuery.BleveQueryTime{}, + End: bleveQuery.BleveQueryTime{}, + InclusiveStart: nil, + InclusiveEnd: nil, + FieldVal: getField(n.Key), + } + + if n.Operator == nil { + continue + } + + switch n.Operator.Value { + case ">": + q.Start.Time = n.Value + q.InclusiveStart = &[]bool{false}[0] + case ">=": + q.Start.Time = n.Value + q.InclusiveStart = &[]bool{true}[0] + case "<": + q.End.Time = n.Value + q.InclusiveEnd = &[]bool{false}[0] + case "<=": + q.End.Time = n.Value + q.InclusiveEnd = &[]bool{true}[0] + default: + continue + } + if prev == nil { prev = q } else { diff --git a/services/search/pkg/query/bleve/compiler_test.go b/services/search/pkg/query/bleve/compiler_test.go index 1891cd79a9f..fa4860cfd17 100644 --- a/services/search/pkg/query/bleve/compiler_test.go +++ b/services/search/pkg/query/bleve/compiler_test.go @@ -2,6 +2,7 @@ package bleve import ( "testing" + "time" "github.com/blevesearch/bleve/v2/search/query" tAssert "github.com/stretchr/testify/assert" @@ -9,6 +10,15 @@ import ( "github.com/owncloud/ocis/v2/services/search/pkg/query/ast" ) +var timeMustParse = func(t *testing.T, ts string) time.Time { + tp, err := time.Parse(time.RFC3339Nano, ts) + if err != nil { + t.Fatalf("time.Parse(...) error = %v", err) + } + + return tp +} + func Test_compile(t *testing.T) { tests := []struct { name string @@ -36,7 +46,7 @@ func Test_compile(t *testing.T) { }, }, want: query.NewConjunctionQuery([]query.Query{ - query.NewQueryStringQuery(`Name:John Smith`), + query.NewQueryStringQuery(`Name:john\ smith`), }), wantErr: false, }, @@ -50,8 +60,8 @@ func Test_compile(t *testing.T) { }, }, want: query.NewConjunctionQuery([]query.Query{ - query.NewQueryStringQuery(`Name:John Smith`), - query.NewQueryStringQuery(`Name:Jane`), + query.NewQueryStringQuery(`Name:john\ smith`), + query.NewQueryStringQuery(`Name:jane`), }), wantErr: false, }, @@ -82,7 +92,7 @@ func Test_compile(t *testing.T) { }, }, want: query.NewDisjunctionQuery([]query.Query{ - query.NewQueryStringQuery(`Name:moby di*`), + query.NewQueryStringQuery(`Name:moby\ di*`), query.NewConjunctionQuery([]query.Query{ query.NewQueryStringQuery(`Tags:bestseller`), query.NewQueryStringQuery(`Tags:book`), @@ -125,7 +135,7 @@ func Test_compile(t *testing.T) { }, want: query.NewConjunctionQuery([]query.Query{ query.NewDisjunctionQuery([]query.Query{ - query.NewQueryStringQuery(`Name:moby di*`), + query.NewQueryStringQuery(`Name:moby\ di*`), query.NewQueryStringQuery(`Tags:bestseller`), }), query.NewQueryStringQuery(`Tags:book`), @@ -150,7 +160,7 @@ func Test_compile(t *testing.T) { }, want: query.NewConjunctionQuery([]query.Query{ query.NewDisjunctionQuery([]query.Query{ - query.NewQueryStringQuery(`Name:moby di*`), + query.NewQueryStringQuery(`Name:moby\ di*`), query.NewQueryStringQuery(`Tags:bestseller`), }), query.NewQueryStringQuery(`Tags:book`), @@ -173,8 +183,8 @@ func Test_compile(t *testing.T) { }, }, want: query.NewConjunctionQuery([]query.Query{ - query.NewQueryStringQuery(`author:John Smith`), - query.NewQueryStringQuery(`author:Jane`), + query.NewQueryStringQuery(`author:john\ smith`), + query.NewQueryStringQuery(`author:jane`), }), wantErr: false, }, @@ -195,12 +205,129 @@ func Test_compile(t *testing.T) { }, }, want: query.NewConjunctionQuery([]query.Query{ - query.NewQueryStringQuery(`author:John Smith`), - query.NewQueryStringQuery(`author:Jane`), + query.NewQueryStringQuery(`author:john\ smith`), + query.NewQueryStringQuery(`author:jane`), query.NewQueryStringQuery(`Tags:bestseller`), }), wantErr: false, }, + { + name: `id:b27d3bf1-b254-459f-92e8-bdba668d6d3f$d0648459-25fb-4ed8-8684-bc62c7dca29c!d0648459-25fb-4ed8-8684-bc62c7dca29c mtime>=2023-09-05T12:40:59.14741+02:00`, + args: &ast.Ast{ + Nodes: []ast.Node{ + &ast.StringNode{ + Key: "id", + Value: "b27d3bf1-b254-459f-92e8-bdba668d6d3f$d0648459-25fb-4ed8-8684-bc62c7dca29c!d0648459-25fb-4ed8-8684-bc62c7dca29c", + }, + &ast.OperatorNode{Value: "AND"}, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: ">="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + }, + }, + want: query.NewConjunctionQuery([]query.Query{ + query.NewQueryStringQuery(`ID:b27d3bf1-b254-459f-92e8-bdba668d6d3f$d0648459-25fb-4ed8-8684-bc62c7dca29c!d0648459-25fb-4ed8-8684-bc62c7dca29c`), + func() query.Query { + q := query.NewDateRangeInclusiveQuery(timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), time.Time{}, &[]bool{true}[0], nil) + q.FieldVal = "Mtime" + return q + }(), + }), + wantErr: false, + }, + { + name: `StringNode value lowercase`, + args: &ast.Ast{ + Nodes: []ast.Node{ + &ast.StringNode{Value: "John Smith"}, + &ast.OperatorNode{Value: "AND"}, + &ast.StringNode{Key: "Hidden", Value: "T"}, + &ast.OperatorNode{Value: "AND"}, + &ast.StringNode{Key: "hidden", Value: "T"}, + }, + }, + want: query.NewConjunctionQuery([]query.Query{ + query.NewQueryStringQuery(`Name:john\ smith`), + query.NewQueryStringQuery(`Hidden:T`), + query.NewQueryStringQuery(`Hidden:T`), + }), + wantErr: false, + }, + { + name: `ast.DateTimeNode`, + args: &ast.Ast{ + Nodes: []ast.Node{ + &ast.DateTimeNode{ + Key: "mtime", + // "=" is not supported by bleve, ignore + Operator: &ast.OperatorNode{Value: "="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.OperatorNode{Value: "AND"}, + &ast.DateTimeNode{ + Key: "mtime", + // ":" is not supported by bleve, ignore + Operator: &ast.OperatorNode{Value: ":"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.OperatorNode{Value: "AND"}, + &ast.DateTimeNode{ + Key: "mtime", + // no operator, skip + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.OperatorNode{Value: "AND"}, + &ast.DateTimeNode{ + Key: "mtime", + Operator: &ast.OperatorNode{Value: ">"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.OperatorNode{Value: "AND"}, + &ast.DateTimeNode{ + Key: "mtime", + Operator: &ast.OperatorNode{Value: ">="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.OperatorNode{Value: "AND"}, + &ast.DateTimeNode{ + Key: "mtime", + Operator: &ast.OperatorNode{Value: "<"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.OperatorNode{Value: "AND"}, + &ast.DateTimeNode{ + Key: "mtime", + Operator: &ast.OperatorNode{Value: "<="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + }, + }, + want: query.NewConjunctionQuery([]query.Query{ + func() query.Query { + q := query.NewDateRangeInclusiveQuery(timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), time.Time{}, &[]bool{false}[0], nil) + q.FieldVal = "Mtime" + return q + }(), + func() query.Query { + q := query.NewDateRangeInclusiveQuery(timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), time.Time{}, &[]bool{true}[0], nil) + q.FieldVal = "Mtime" + return q + }(), + func() query.Query { + q := query.NewDateRangeInclusiveQuery(time.Time{}, timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), nil, &[]bool{false}[0]) + q.FieldVal = "Mtime" + return q + }(), + func() query.Query { + q := query.NewDateRangeInclusiveQuery(time.Time{}, timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), nil, &[]bool{true}[0]) + q.FieldVal = "Mtime" + return q + }(), + }), + wantErr: false, + }, } assert := tAssert.New(t) diff --git a/services/search/pkg/query/bleve/legacy.go b/services/search/pkg/query/bleve/legacy.go deleted file mode 100644 index 74aec2004eb..00000000000 --- a/services/search/pkg/query/bleve/legacy.go +++ /dev/null @@ -1,76 +0,0 @@ -package bleve - -import ( - "regexp" - "strings" - - bQuery "github.com/blevesearch/bleve/v2/search/query" - - "github.com/owncloud/ocis/v2/services/search/pkg/query/ast" -) - -// LegacyBuilder implements the legacy Builder interface. -type LegacyBuilder struct{} - -// Build translates the ast to a valid bleve query. -func (b LegacyBuilder) Build(qs string) (*ast.Ast, error) { - return &ast.Ast{ - Base: &ast.Base{ - Loc: &ast.Location{ - Start: ast.Position{ - Line: 0, - Column: 0, - }, - End: ast.Position{ - Line: 0, - Column: len(qs), - }, - Source: &qs, - }, - }, - }, nil -} - -// LegacyCompiler represents a default bleve query formatter. -type LegacyCompiler struct{} - -// Compile implements the default bleve query formatter which converts the bleve likes query search string to the bleve query. -func (c LegacyCompiler) Compile(givenAst *ast.Ast) (bQuery.Query, error) { - return &bQuery.QueryStringQuery{ - Query: c.formatQuery(*givenAst.Base.Loc.Source), - }, nil -} - -func (c LegacyCompiler) formatQuery(q string) string { - cq := q - fields := []string{"RootID", "Path", "ID", "Name", "Size", "Mtime", "MimeType", "Type"} - for _, field := range fields { - cq = strings.ReplaceAll(cq, strings.ToLower(field)+":", field+":") - } - - fieldRe := regexp.MustCompile(`\w+:[^ ]+`) - if fieldRe.MatchString(cq) { - nameTagesRe := regexp.MustCompile(`\+?(Name|Tags)`) // detect "Name", "+Name, "Tags" and "+Tags" - parts := strings.Split(cq, " ") - - cq = "" - for _, part := range parts { - fieldParts := strings.SplitN(part, ":", 2) - if len(fieldParts) > 1 { - key := fieldParts[0] - value := fieldParts[1] - if nameTagesRe.MatchString(key) { - value = strings.ToLower(value) // do a lowercase query on the lowercased fields - } - cq += key + ":" + value + " " - } else { - cq += part + " " - } - } - return cq // Sophisticated field based search - } - - // this is a basic filename search - cq = strings.ReplaceAll(cq, ":", `\:`) - return "Name:*" + strings.ReplaceAll(strings.ToLower(cq), " ", `\ `) + "*" -} diff --git a/services/search/pkg/query/kql/cast.go b/services/search/pkg/query/kql/cast.go index 261ae7e62a8..cdceda9a584 100644 --- a/services/search/pkg/query/kql/cast.go +++ b/services/search/pkg/query/kql/cast.go @@ -2,6 +2,7 @@ package kql import ( "fmt" + "time" "github.com/owncloud/ocis/v2/services/search/pkg/query/ast" ) @@ -13,23 +14,24 @@ func toIfaceSlice(in interface{}) []interface{} { return in.([]interface{}) } -func toNode(in interface{}) (ast.Node, error) { - out, ok := in.(ast.Node) +func toNode[T ast.Node](in interface{}) (T, error) { + var t T + out, ok := in.(T) if !ok { - return nil, fmt.Errorf("can't convert '%T' to ast.Node", in) + return t, fmt.Errorf("can't convert '%T' to '%T'", in, t) } return out, nil } -func toNodes(in interface{}) ([]ast.Node, error) { +func toNodes[T ast.Node](in interface{}) ([]T, error) { switch v := in.(type) { case []interface{}: - var nodes []ast.Node + var nodes []T for _, el := range toIfaceSlice(v) { - node, err := toNode(el) + node, err := toNode[T](el) if err != nil { return nil, err } @@ -38,7 +40,7 @@ func toNodes(in interface{}) ([]ast.Node, error) { } return nodes, nil - case []ast.Node: + case []T: return v, nil default: return nil, fmt.Errorf("can't convert '%T' to []ast.Node", in) @@ -52,9 +54,13 @@ func toString(in interface{}) (string, error) { case []interface{}: var str string - for _, i := range v { - j := i.([]uint8) - str += string(j[0]) + for i := range v { + sv, err := toString(v[i]) + if err != nil { + return "", err + } + + str += sv } return str, nil @@ -64,3 +70,12 @@ func toString(in interface{}) (string, error) { return "", fmt.Errorf("can't convert '%T' to string", v) } } + +func toTime(in interface{}) (time.Time, error) { + ts, err := toString(in) + if err != nil { + return time.Time{}, err + } + + return time.Parse(time.RFC3339Nano, ts) +} diff --git a/services/search/pkg/query/kql/dictionary.peg b/services/search/pkg/query/kql/dictionary.peg index b2d32599e92..927f0159eac 100644 --- a/services/search/pkg/query/kql/dictionary.peg +++ b/services/search/pkg/query/kql/dictionary.peg @@ -17,7 +17,7 @@ Nodes <- ( GroupNode / PropertyRestrictionNodes / - BooleanOperatorNode / + OperatorBooleanNode / FreeTextKeywordNodes ) _ @@ -30,7 +30,7 @@ Nodes <- //////////////////////////////////////////////////////// GroupNode <- - k:(Char+)? (ColonOperator / EqualOperator)? "(" v:Nodes ")" { + k:(Char+)? (OperatorColonNode / OperatorEqualNode)? "(" v:Nodes ")" { return buildGroupNode(k, v, c.text, c.pos) } @@ -40,19 +40,24 @@ GroupNode <- PropertyRestrictionNodes <- YesNoPropertyRestrictionNode / + DateTimeRestrictionNode / TextPropertyRestrictionNode YesNoPropertyRestrictionNode <- - k:Char+ (ColonOperator / EqualOperator) v:("true" / "false"){ + k:Char+ (OperatorColonNode / OperatorEqualNode) v:("true" / "false"){ return buildBooleanNode(k, v, c.text, c.pos) } +DateTimeRestrictionNode <- + k:Char+ o:(OperatorGreaterOrEqualNode / OperatorLessOrEqualNode / OperatorGreaterNode / OperatorLessNode / OperatorEqualNode / OperatorColonNode) '"'? v:(FullDate "T" FullTime) '"'? { + return buildDateTimeNode(k, o, v, c.text, c.pos) + } + TextPropertyRestrictionNode <- - k:Char+ (ColonOperator / EqualOperator) v:(String / [^ ()]+){ + k:Char+ (OperatorColonNode / OperatorEqualNode) v:(String / [^ ()]+){ return buildStringNode(k, v, c.text, c.pos) } - //////////////////////////////////////////////////////// // free text-keywords //////////////////////////////////////////////////////// @@ -62,12 +67,12 @@ FreeTextKeywordNodes <- WordNode PhraseNode <- - ColonOperator? _ v:String _ ColonOperator? { + OperatorColonNode? _ v:String _ OperatorColonNode? { return buildStringNode("", v, c.text, c.pos) } WordNode <- - ColonOperator? _ v:[^ :()]+ _ ColonOperator? { + OperatorColonNode? _ v:[^ :()]+ _ OperatorColonNode? { return buildStringNode("", v, c.text, c.pos) } @@ -75,18 +80,83 @@ WordNode <- // operators //////////////////////////////////////////////////////// -BooleanOperatorNode <- +OperatorBooleanNode <- ("AND" / "OR" / "NOT") { return buildOperatorNode(c.text, c.pos) } -ColonOperator <- +OperatorColonNode <- ":" { - return c.text, nil + return buildOperatorNode(c.text, c.pos) } -EqualOperator <- +OperatorEqualNode <- "=" { + return buildOperatorNode(c.text, c.pos) + } + +OperatorLessNode <- + "<" { + return buildOperatorNode(c.text, c.pos) + } + +OperatorLessOrEqualNode <- + "<=" { + return buildOperatorNode(c.text, c.pos) + } + +OperatorGreaterNode <- + ">" { + return buildOperatorNode(c.text, c.pos) + } + +OperatorGreaterOrEqualNode <- + ">=" { + return buildOperatorNode(c.text, c.pos) + } + + +//////////////////////////////////////////////////////// +// time +//////////////////////////////////////////////////////// + +TimeYear <- + Digit Digit Digit Digit { + return c.text, nil + } + +TimeMonth <- + Digit Digit { + return c.text, nil + } + +TimeDay <- + Digit Digit { + return c.text, nil + } + +TimeHour <- + Digit Digit { + return c.text, nil + } + +TimeMinute <- + Digit Digit { + return c.text, nil + } + +TimeSecond <- + Digit Digit { + return c.text, nil + } + +FullDate <- + TimeYear "-" TimeMonth "-" TimeDay { + return c.text, nil + } + +FullTime <- + TimeHour ":" TimeMinute ":" TimeSecond ("." Digit+)? ("Z" / ("+" / "-") TimeHour ":" TimeMinute) { return c.text, nil } @@ -104,5 +174,10 @@ String <- return v, nil } +Digit <- + [0-9] { + return c.text, nil + } + _ <- [ \t]* diff --git a/services/search/pkg/query/kql/dictionary_gen.go b/services/search/pkg/query/kql/dictionary_gen.go index aa8fbe993a3..45d7c38858e 100644 --- a/services/search/pkg/query/kql/dictionary_gen.go +++ b/services/search/pkg/query/kql/dictionary_gen.go @@ -79,7 +79,7 @@ var g = &grammar{ }, &ruleRefExpr{ pos: position{line: 20, col: 13, offset: 337}, - name: "BooleanOperatorNode", + name: "OperatorBooleanNode", }, &ruleRefExpr{ pos: position{line: 21, col: 13, offset: 371}, @@ -127,31 +127,31 @@ var g = &grammar{ alternatives: []any{ &ruleRefExpr{ pos: position{line: 33, col: 17, offset: 610}, - name: "ColonOperator", + name: "OperatorColonNode", }, &ruleRefExpr{ - pos: position{line: 33, col: 33, offset: 626}, - name: "EqualOperator", + pos: position{line: 33, col: 37, offset: 630}, + name: "OperatorEqualNode", }, }, }, }, &litMatcher{ - pos: position{line: 33, col: 49, offset: 642}, + pos: position{line: 33, col: 57, offset: 650}, val: "(", ignoreCase: false, want: "\"(\"", }, &labeledExpr{ - pos: position{line: 33, col: 53, offset: 646}, + pos: position{line: 33, col: 61, offset: 654}, label: "v", expr: &ruleRefExpr{ - pos: position{line: 33, col: 55, offset: 648}, + pos: position{line: 33, col: 63, offset: 656}, name: "Nodes", }, }, &litMatcher{ - pos: position{line: 33, col: 61, offset: 654}, + pos: position{line: 33, col: 69, offset: 662}, val: ")", ignoreCase: false, want: "\")\"", @@ -162,16 +162,20 @@ var g = &grammar{ }, { name: "PropertyRestrictionNodes", - pos: position{line: 41, col: 1, offset: 858}, + pos: position{line: 41, col: 1, offset: 866}, expr: &choiceExpr{ - pos: position{line: 42, col: 5, offset: 890}, + pos: position{line: 42, col: 5, offset: 898}, alternatives: []any{ &ruleRefExpr{ - pos: position{line: 42, col: 5, offset: 890}, + pos: position{line: 42, col: 5, offset: 898}, name: "YesNoPropertyRestrictionNode", }, &ruleRefExpr{ - pos: position{line: 43, col: 5, offset: 925}, + pos: position{line: 43, col: 5, offset: 933}, + name: "DateTimeRestrictionNode", + }, + &ruleRefExpr{ + pos: position{line: 44, col: 5, offset: 963}, name: "TextPropertyRestrictionNode", }, }, @@ -179,51 +183,51 @@ var g = &grammar{ }, { name: "YesNoPropertyRestrictionNode", - pos: position{line: 45, col: 1, offset: 954}, + pos: position{line: 46, col: 1, offset: 992}, expr: &actionExpr{ - pos: position{line: 46, col: 5, offset: 990}, + pos: position{line: 47, col: 5, offset: 1028}, run: (*parser).callonYesNoPropertyRestrictionNode1, expr: &seqExpr{ - pos: position{line: 46, col: 5, offset: 990}, + pos: position{line: 47, col: 5, offset: 1028}, exprs: []any{ &labeledExpr{ - pos: position{line: 46, col: 5, offset: 990}, + pos: position{line: 47, col: 5, offset: 1028}, label: "k", expr: &oneOrMoreExpr{ - pos: position{line: 46, col: 7, offset: 992}, + pos: position{line: 47, col: 7, offset: 1030}, expr: &ruleRefExpr{ - pos: position{line: 46, col: 7, offset: 992}, + pos: position{line: 47, col: 7, offset: 1030}, name: "Char", }, }, }, &choiceExpr{ - pos: position{line: 46, col: 14, offset: 999}, + pos: position{line: 47, col: 14, offset: 1037}, alternatives: []any{ &ruleRefExpr{ - pos: position{line: 46, col: 14, offset: 999}, - name: "ColonOperator", + pos: position{line: 47, col: 14, offset: 1037}, + name: "OperatorColonNode", }, &ruleRefExpr{ - pos: position{line: 46, col: 30, offset: 1015}, - name: "EqualOperator", + pos: position{line: 47, col: 34, offset: 1057}, + name: "OperatorEqualNode", }, }, }, &labeledExpr{ - pos: position{line: 46, col: 45, offset: 1030}, + pos: position{line: 47, col: 53, offset: 1076}, label: "v", expr: &choiceExpr{ - pos: position{line: 46, col: 48, offset: 1033}, + pos: position{line: 47, col: 56, offset: 1079}, alternatives: []any{ &litMatcher{ - pos: position{line: 46, col: 48, offset: 1033}, + pos: position{line: 47, col: 56, offset: 1079}, val: "true", ignoreCase: false, want: "\"true\"", }, &litMatcher{ - pos: position{line: 46, col: 57, offset: 1042}, + pos: position{line: 47, col: 65, offset: 1088}, val: "false", ignoreCase: false, want: "\"false\"", @@ -235,53 +239,151 @@ var g = &grammar{ }, }, }, + { + name: "DateTimeRestrictionNode", + pos: position{line: 51, col: 1, offset: 1158}, + expr: &actionExpr{ + pos: position{line: 52, col: 5, offset: 1189}, + run: (*parser).callonDateTimeRestrictionNode1, + expr: &seqExpr{ + pos: position{line: 52, col: 5, offset: 1189}, + exprs: []any{ + &labeledExpr{ + pos: position{line: 52, col: 5, offset: 1189}, + label: "k", + expr: &oneOrMoreExpr{ + pos: position{line: 52, col: 7, offset: 1191}, + expr: &ruleRefExpr{ + pos: position{line: 52, col: 7, offset: 1191}, + name: "Char", + }, + }, + }, + &labeledExpr{ + pos: position{line: 52, col: 13, offset: 1197}, + label: "o", + expr: &choiceExpr{ + pos: position{line: 52, col: 16, offset: 1200}, + alternatives: []any{ + &ruleRefExpr{ + pos: position{line: 52, col: 16, offset: 1200}, + name: "OperatorGreaterOrEqualNode", + }, + &ruleRefExpr{ + pos: position{line: 52, col: 45, offset: 1229}, + name: "OperatorLessOrEqualNode", + }, + &ruleRefExpr{ + pos: position{line: 52, col: 71, offset: 1255}, + name: "OperatorGreaterNode", + }, + &ruleRefExpr{ + pos: position{line: 52, col: 93, offset: 1277}, + name: "OperatorLessNode", + }, + &ruleRefExpr{ + pos: position{line: 52, col: 112, offset: 1296}, + name: "OperatorEqualNode", + }, + &ruleRefExpr{ + pos: position{line: 52, col: 132, offset: 1316}, + name: "OperatorColonNode", + }, + }, + }, + }, + &zeroOrOneExpr{ + pos: position{line: 52, col: 151, offset: 1335}, + expr: &litMatcher{ + pos: position{line: 52, col: 151, offset: 1335}, + val: "\"", + ignoreCase: false, + want: "\"\\\"\"", + }, + }, + &labeledExpr{ + pos: position{line: 52, col: 156, offset: 1340}, + label: "v", + expr: &seqExpr{ + pos: position{line: 52, col: 159, offset: 1343}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 52, col: 159, offset: 1343}, + name: "FullDate", + }, + &litMatcher{ + pos: position{line: 52, col: 168, offset: 1352}, + val: "T", + ignoreCase: false, + want: "\"T\"", + }, + &ruleRefExpr{ + pos: position{line: 52, col: 172, offset: 1356}, + name: "FullTime", + }, + }, + }, + }, + &zeroOrOneExpr{ + pos: position{line: 52, col: 182, offset: 1366}, + expr: &litMatcher{ + pos: position{line: 52, col: 182, offset: 1366}, + val: "\"", + ignoreCase: false, + want: "\"\\\"\"", + }, + }, + }, + }, + }, + }, { name: "TextPropertyRestrictionNode", - pos: position{line: 50, col: 1, offset: 1112}, + pos: position{line: 56, col: 1, offset: 1437}, expr: &actionExpr{ - pos: position{line: 51, col: 5, offset: 1147}, + pos: position{line: 57, col: 5, offset: 1472}, run: (*parser).callonTextPropertyRestrictionNode1, expr: &seqExpr{ - pos: position{line: 51, col: 5, offset: 1147}, + pos: position{line: 57, col: 5, offset: 1472}, exprs: []any{ &labeledExpr{ - pos: position{line: 51, col: 5, offset: 1147}, + pos: position{line: 57, col: 5, offset: 1472}, label: "k", expr: &oneOrMoreExpr{ - pos: position{line: 51, col: 7, offset: 1149}, + pos: position{line: 57, col: 7, offset: 1474}, expr: &ruleRefExpr{ - pos: position{line: 51, col: 7, offset: 1149}, + pos: position{line: 57, col: 7, offset: 1474}, name: "Char", }, }, }, &choiceExpr{ - pos: position{line: 51, col: 14, offset: 1156}, + pos: position{line: 57, col: 14, offset: 1481}, alternatives: []any{ &ruleRefExpr{ - pos: position{line: 51, col: 14, offset: 1156}, - name: "ColonOperator", + pos: position{line: 57, col: 14, offset: 1481}, + name: "OperatorColonNode", }, &ruleRefExpr{ - pos: position{line: 51, col: 30, offset: 1172}, - name: "EqualOperator", + pos: position{line: 57, col: 34, offset: 1501}, + name: "OperatorEqualNode", }, }, }, &labeledExpr{ - pos: position{line: 51, col: 45, offset: 1187}, + pos: position{line: 57, col: 53, offset: 1520}, label: "v", expr: &choiceExpr{ - pos: position{line: 51, col: 48, offset: 1190}, + pos: position{line: 57, col: 56, offset: 1523}, alternatives: []any{ &ruleRefExpr{ - pos: position{line: 51, col: 48, offset: 1190}, + pos: position{line: 57, col: 56, offset: 1523}, name: "String", }, &oneOrMoreExpr{ - pos: position{line: 51, col: 57, offset: 1199}, + pos: position{line: 57, col: 65, offset: 1532}, expr: &charClassMatcher{ - pos: position{line: 51, col: 57, offset: 1199}, + pos: position{line: 57, col: 65, offset: 1532}, val: "[^ ()]", chars: []rune{' ', '(', ')'}, ignoreCase: false, @@ -297,16 +399,16 @@ var g = &grammar{ }, { name: "FreeTextKeywordNodes", - pos: position{line: 60, col: 1, offset: 1406}, + pos: position{line: 65, col: 1, offset: 1738}, expr: &choiceExpr{ - pos: position{line: 61, col: 5, offset: 1434}, + pos: position{line: 66, col: 5, offset: 1766}, alternatives: []any{ &ruleRefExpr{ - pos: position{line: 61, col: 5, offset: 1434}, + pos: position{line: 66, col: 5, offset: 1766}, name: "PhraseNode", }, &ruleRefExpr{ - pos: position{line: 62, col: 5, offset: 1451}, + pos: position{line: 67, col: 5, offset: 1783}, name: "WordNode", }, }, @@ -314,41 +416,41 @@ var g = &grammar{ }, { name: "PhraseNode", - pos: position{line: 64, col: 1, offset: 1461}, + pos: position{line: 69, col: 1, offset: 1793}, expr: &actionExpr{ - pos: position{line: 65, col: 6, offset: 1480}, + pos: position{line: 70, col: 6, offset: 1812}, run: (*parser).callonPhraseNode1, expr: &seqExpr{ - pos: position{line: 65, col: 6, offset: 1480}, + pos: position{line: 70, col: 6, offset: 1812}, exprs: []any{ &zeroOrOneExpr{ - pos: position{line: 65, col: 6, offset: 1480}, + pos: position{line: 70, col: 6, offset: 1812}, expr: &ruleRefExpr{ - pos: position{line: 65, col: 6, offset: 1480}, - name: "ColonOperator", + pos: position{line: 70, col: 6, offset: 1812}, + name: "OperatorColonNode", }, }, &ruleRefExpr{ - pos: position{line: 65, col: 21, offset: 1495}, + pos: position{line: 70, col: 25, offset: 1831}, name: "_", }, &labeledExpr{ - pos: position{line: 65, col: 23, offset: 1497}, + pos: position{line: 70, col: 27, offset: 1833}, label: "v", expr: &ruleRefExpr{ - pos: position{line: 65, col: 25, offset: 1499}, + pos: position{line: 70, col: 29, offset: 1835}, name: "String", }, }, &ruleRefExpr{ - pos: position{line: 65, col: 32, offset: 1506}, + pos: position{line: 70, col: 36, offset: 1842}, name: "_", }, &zeroOrOneExpr{ - pos: position{line: 65, col: 34, offset: 1508}, + pos: position{line: 70, col: 38, offset: 1844}, expr: &ruleRefExpr{ - pos: position{line: 65, col: 34, offset: 1508}, - name: "ColonOperator", + pos: position{line: 70, col: 38, offset: 1844}, + name: "OperatorColonNode", }, }, }, @@ -357,31 +459,31 @@ var g = &grammar{ }, { name: "WordNode", - pos: position{line: 69, col: 1, offset: 1585}, + pos: position{line: 74, col: 1, offset: 1925}, expr: &actionExpr{ - pos: position{line: 70, col: 6, offset: 1602}, + pos: position{line: 75, col: 6, offset: 1942}, run: (*parser).callonWordNode1, expr: &seqExpr{ - pos: position{line: 70, col: 6, offset: 1602}, + pos: position{line: 75, col: 6, offset: 1942}, exprs: []any{ &zeroOrOneExpr{ - pos: position{line: 70, col: 6, offset: 1602}, + pos: position{line: 75, col: 6, offset: 1942}, expr: &ruleRefExpr{ - pos: position{line: 70, col: 6, offset: 1602}, - name: "ColonOperator", + pos: position{line: 75, col: 6, offset: 1942}, + name: "OperatorColonNode", }, }, &ruleRefExpr{ - pos: position{line: 70, col: 21, offset: 1617}, + pos: position{line: 75, col: 25, offset: 1961}, name: "_", }, &labeledExpr{ - pos: position{line: 70, col: 23, offset: 1619}, + pos: position{line: 75, col: 27, offset: 1963}, label: "v", expr: &oneOrMoreExpr{ - pos: position{line: 70, col: 25, offset: 1621}, + pos: position{line: 75, col: 29, offset: 1965}, expr: &charClassMatcher{ - pos: position{line: 70, col: 25, offset: 1621}, + pos: position{line: 75, col: 29, offset: 1965}, val: "[^ :()]", chars: []rune{' ', ':', '(', ')'}, ignoreCase: false, @@ -390,14 +492,14 @@ var g = &grammar{ }, }, &ruleRefExpr{ - pos: position{line: 70, col: 34, offset: 1630}, + pos: position{line: 75, col: 38, offset: 1974}, name: "_", }, &zeroOrOneExpr{ - pos: position{line: 70, col: 36, offset: 1632}, + pos: position{line: 75, col: 40, offset: 1976}, expr: &ruleRefExpr{ - pos: position{line: 70, col: 36, offset: 1632}, - name: "ColonOperator", + pos: position{line: 75, col: 40, offset: 1976}, + name: "OperatorColonNode", }, }, }, @@ -405,28 +507,28 @@ var g = &grammar{ }, }, { - name: "BooleanOperatorNode", - pos: position{line: 78, col: 1, offset: 1837}, + name: "OperatorBooleanNode", + pos: position{line: 83, col: 1, offset: 2185}, expr: &actionExpr{ - pos: position{line: 79, col: 5, offset: 1864}, - run: (*parser).callonBooleanOperatorNode1, + pos: position{line: 84, col: 5, offset: 2212}, + run: (*parser).callonOperatorBooleanNode1, expr: &choiceExpr{ - pos: position{line: 79, col: 6, offset: 1865}, + pos: position{line: 84, col: 6, offset: 2213}, alternatives: []any{ &litMatcher{ - pos: position{line: 79, col: 6, offset: 1865}, + pos: position{line: 84, col: 6, offset: 2213}, val: "AND", ignoreCase: false, want: "\"AND\"", }, &litMatcher{ - pos: position{line: 79, col: 14, offset: 1873}, + pos: position{line: 84, col: 14, offset: 2221}, val: "OR", ignoreCase: false, want: "\"OR\"", }, &litMatcher{ - pos: position{line: 79, col: 21, offset: 1880}, + pos: position{line: 84, col: 21, offset: 2228}, val: "NOT", ignoreCase: false, want: "\"NOT\"", @@ -436,13 +538,13 @@ var g = &grammar{ }, }, { - name: "ColonOperator", - pos: position{line: 83, col: 1, offset: 1944}, + name: "OperatorColonNode", + pos: position{line: 88, col: 1, offset: 2292}, expr: &actionExpr{ - pos: position{line: 84, col: 5, offset: 1965}, - run: (*parser).callonColonOperator1, + pos: position{line: 89, col: 5, offset: 2317}, + run: (*parser).callonOperatorColonNode1, expr: &litMatcher{ - pos: position{line: 84, col: 5, offset: 1965}, + pos: position{line: 89, col: 5, offset: 2317}, val: ":", ignoreCase: false, want: "\":\"", @@ -450,27 +552,359 @@ var g = &grammar{ }, }, { - name: "EqualOperator", - pos: position{line: 88, col: 1, offset: 2005}, + name: "OperatorEqualNode", + pos: position{line: 93, col: 1, offset: 2378}, expr: &actionExpr{ - pos: position{line: 89, col: 5, offset: 2026}, - run: (*parser).callonEqualOperator1, + pos: position{line: 94, col: 5, offset: 2403}, + run: (*parser).callonOperatorEqualNode1, expr: &litMatcher{ - pos: position{line: 89, col: 5, offset: 2026}, + pos: position{line: 94, col: 5, offset: 2403}, val: "=", ignoreCase: false, want: "\"=\"", }, }, }, + { + name: "OperatorLessNode", + pos: position{line: 98, col: 1, offset: 2464}, + expr: &actionExpr{ + pos: position{line: 99, col: 5, offset: 2488}, + run: (*parser).callonOperatorLessNode1, + expr: &litMatcher{ + pos: position{line: 99, col: 5, offset: 2488}, + val: "<", + ignoreCase: false, + want: "\"<\"", + }, + }, + }, + { + name: "OperatorLessOrEqualNode", + pos: position{line: 103, col: 1, offset: 2549}, + expr: &actionExpr{ + pos: position{line: 104, col: 5, offset: 2580}, + run: (*parser).callonOperatorLessOrEqualNode1, + expr: &litMatcher{ + pos: position{line: 104, col: 5, offset: 2580}, + val: "<=", + ignoreCase: false, + want: "\"<=\"", + }, + }, + }, + { + name: "OperatorGreaterNode", + pos: position{line: 108, col: 1, offset: 2642}, + expr: &actionExpr{ + pos: position{line: 109, col: 5, offset: 2669}, + run: (*parser).callonOperatorGreaterNode1, + expr: &litMatcher{ + pos: position{line: 109, col: 5, offset: 2669}, + val: ">", + ignoreCase: false, + want: "\">\"", + }, + }, + }, + { + name: "OperatorGreaterOrEqualNode", + pos: position{line: 113, col: 1, offset: 2730}, + expr: &actionExpr{ + pos: position{line: 114, col: 5, offset: 2764}, + run: (*parser).callonOperatorGreaterOrEqualNode1, + expr: &litMatcher{ + pos: position{line: 114, col: 5, offset: 2764}, + val: ">=", + ignoreCase: false, + want: "\">=\"", + }, + }, + }, + { + name: "TimeYear", + pos: position{line: 123, col: 1, offset: 2950}, + expr: &actionExpr{ + pos: position{line: 124, col: 5, offset: 2966}, + run: (*parser).callonTimeYear1, + expr: &seqExpr{ + pos: position{line: 124, col: 5, offset: 2966}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 124, col: 5, offset: 2966}, + name: "Digit", + }, + &ruleRefExpr{ + pos: position{line: 124, col: 11, offset: 2972}, + name: "Digit", + }, + &ruleRefExpr{ + pos: position{line: 124, col: 17, offset: 2978}, + name: "Digit", + }, + &ruleRefExpr{ + pos: position{line: 124, col: 23, offset: 2984}, + name: "Digit", + }, + }, + }, + }, + }, + { + name: "TimeMonth", + pos: position{line: 128, col: 1, offset: 3026}, + expr: &actionExpr{ + pos: position{line: 129, col: 5, offset: 3043}, + run: (*parser).callonTimeMonth1, + expr: &seqExpr{ + pos: position{line: 129, col: 5, offset: 3043}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 129, col: 5, offset: 3043}, + name: "Digit", + }, + &ruleRefExpr{ + pos: position{line: 129, col: 11, offset: 3049}, + name: "Digit", + }, + }, + }, + }, + }, + { + name: "TimeDay", + pos: position{line: 133, col: 1, offset: 3091}, + expr: &actionExpr{ + pos: position{line: 134, col: 5, offset: 3106}, + run: (*parser).callonTimeDay1, + expr: &seqExpr{ + pos: position{line: 134, col: 5, offset: 3106}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 134, col: 5, offset: 3106}, + name: "Digit", + }, + &ruleRefExpr{ + pos: position{line: 134, col: 11, offset: 3112}, + name: "Digit", + }, + }, + }, + }, + }, + { + name: "TimeHour", + pos: position{line: 138, col: 1, offset: 3154}, + expr: &actionExpr{ + pos: position{line: 139, col: 5, offset: 3170}, + run: (*parser).callonTimeHour1, + expr: &seqExpr{ + pos: position{line: 139, col: 5, offset: 3170}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 139, col: 5, offset: 3170}, + name: "Digit", + }, + &ruleRefExpr{ + pos: position{line: 139, col: 11, offset: 3176}, + name: "Digit", + }, + }, + }, + }, + }, + { + name: "TimeMinute", + pos: position{line: 143, col: 1, offset: 3218}, + expr: &actionExpr{ + pos: position{line: 144, col: 5, offset: 3236}, + run: (*parser).callonTimeMinute1, + expr: &seqExpr{ + pos: position{line: 144, col: 5, offset: 3236}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 144, col: 5, offset: 3236}, + name: "Digit", + }, + &ruleRefExpr{ + pos: position{line: 144, col: 11, offset: 3242}, + name: "Digit", + }, + }, + }, + }, + }, + { + name: "TimeSecond", + pos: position{line: 148, col: 1, offset: 3284}, + expr: &actionExpr{ + pos: position{line: 149, col: 5, offset: 3302}, + run: (*parser).callonTimeSecond1, + expr: &seqExpr{ + pos: position{line: 149, col: 5, offset: 3302}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 149, col: 5, offset: 3302}, + name: "Digit", + }, + &ruleRefExpr{ + pos: position{line: 149, col: 11, offset: 3308}, + name: "Digit", + }, + }, + }, + }, + }, + { + name: "FullDate", + pos: position{line: 153, col: 1, offset: 3350}, + expr: &actionExpr{ + pos: position{line: 154, col: 5, offset: 3366}, + run: (*parser).callonFullDate1, + expr: &seqExpr{ + pos: position{line: 154, col: 5, offset: 3366}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 154, col: 5, offset: 3366}, + name: "TimeYear", + }, + &litMatcher{ + pos: position{line: 154, col: 14, offset: 3375}, + val: "-", + ignoreCase: false, + want: "\"-\"", + }, + &ruleRefExpr{ + pos: position{line: 154, col: 18, offset: 3379}, + name: "TimeMonth", + }, + &litMatcher{ + pos: position{line: 154, col: 28, offset: 3389}, + val: "-", + ignoreCase: false, + want: "\"-\"", + }, + &ruleRefExpr{ + pos: position{line: 154, col: 32, offset: 3393}, + name: "TimeDay", + }, + }, + }, + }, + }, + { + name: "FullTime", + pos: position{line: 158, col: 1, offset: 3437}, + expr: &actionExpr{ + pos: position{line: 159, col: 5, offset: 3453}, + run: (*parser).callonFullTime1, + expr: &seqExpr{ + pos: position{line: 159, col: 5, offset: 3453}, + exprs: []any{ + &ruleRefExpr{ + pos: position{line: 159, col: 5, offset: 3453}, + name: "TimeHour", + }, + &litMatcher{ + pos: position{line: 159, col: 14, offset: 3462}, + val: ":", + ignoreCase: false, + want: "\":\"", + }, + &ruleRefExpr{ + pos: position{line: 159, col: 18, offset: 3466}, + name: "TimeMinute", + }, + &litMatcher{ + pos: position{line: 159, col: 29, offset: 3477}, + val: ":", + ignoreCase: false, + want: "\":\"", + }, + &ruleRefExpr{ + pos: position{line: 159, col: 33, offset: 3481}, + name: "TimeSecond", + }, + &zeroOrOneExpr{ + pos: position{line: 159, col: 44, offset: 3492}, + expr: &seqExpr{ + pos: position{line: 159, col: 45, offset: 3493}, + exprs: []any{ + &litMatcher{ + pos: position{line: 159, col: 45, offset: 3493}, + val: ".", + ignoreCase: false, + want: "\".\"", + }, + &oneOrMoreExpr{ + pos: position{line: 159, col: 49, offset: 3497}, + expr: &ruleRefExpr{ + pos: position{line: 159, col: 49, offset: 3497}, + name: "Digit", + }, + }, + }, + }, + }, + &choiceExpr{ + pos: position{line: 159, col: 59, offset: 3507}, + alternatives: []any{ + &litMatcher{ + pos: position{line: 159, col: 59, offset: 3507}, + val: "Z", + ignoreCase: false, + want: "\"Z\"", + }, + &seqExpr{ + pos: position{line: 159, col: 65, offset: 3513}, + exprs: []any{ + &choiceExpr{ + pos: position{line: 159, col: 66, offset: 3514}, + alternatives: []any{ + &litMatcher{ + pos: position{line: 159, col: 66, offset: 3514}, + val: "+", + ignoreCase: false, + want: "\"+\"", + }, + &litMatcher{ + pos: position{line: 159, col: 72, offset: 3520}, + val: "-", + ignoreCase: false, + want: "\"-\"", + }, + }, + }, + &ruleRefExpr{ + pos: position{line: 159, col: 77, offset: 3525}, + name: "TimeHour", + }, + &litMatcher{ + pos: position{line: 159, col: 86, offset: 3534}, + val: ":", + ignoreCase: false, + want: "\":\"", + }, + &ruleRefExpr{ + pos: position{line: 159, col: 90, offset: 3538}, + name: "TimeMinute", + }, + }, + }, + }, + }, + }, + }, + }, + }, { name: "Char", - pos: position{line: 97, col: 1, offset: 2189}, + pos: position{line: 167, col: 1, offset: 3709}, expr: &actionExpr{ - pos: position{line: 98, col: 5, offset: 2201}, + pos: position{line: 168, col: 5, offset: 3721}, run: (*parser).callonChar1, expr: &charClassMatcher{ - pos: position{line: 98, col: 5, offset: 2201}, + pos: position{line: 168, col: 5, offset: 3721}, val: "[A-Za-z]", ranges: []rune{'A', 'Z', 'a', 'z'}, ignoreCase: false, @@ -480,26 +914,26 @@ var g = &grammar{ }, { name: "String", - pos: position{line: 102, col: 1, offset: 2246}, + pos: position{line: 172, col: 1, offset: 3766}, expr: &actionExpr{ - pos: position{line: 103, col: 5, offset: 2260}, + pos: position{line: 173, col: 5, offset: 3780}, run: (*parser).callonString1, expr: &seqExpr{ - pos: position{line: 103, col: 5, offset: 2260}, + pos: position{line: 173, col: 5, offset: 3780}, exprs: []any{ &litMatcher{ - pos: position{line: 103, col: 5, offset: 2260}, + pos: position{line: 173, col: 5, offset: 3780}, val: "\"", ignoreCase: false, want: "\"\\\"\"", }, &labeledExpr{ - pos: position{line: 103, col: 9, offset: 2264}, + pos: position{line: 173, col: 9, offset: 3784}, label: "v", expr: &zeroOrMoreExpr{ - pos: position{line: 103, col: 11, offset: 2266}, + pos: position{line: 173, col: 11, offset: 3786}, expr: &charClassMatcher{ - pos: position{line: 103, col: 11, offset: 2266}, + pos: position{line: 173, col: 11, offset: 3786}, val: "[^\"]", chars: []rune{'"'}, ignoreCase: false, @@ -508,7 +942,7 @@ var g = &grammar{ }, }, &litMatcher{ - pos: position{line: 103, col: 17, offset: 2272}, + pos: position{line: 173, col: 17, offset: 3792}, val: "\"", ignoreCase: false, want: "\"\\\"\"", @@ -517,13 +951,28 @@ var g = &grammar{ }, }, }, + { + name: "Digit", + pos: position{line: 177, col: 1, offset: 3827}, + expr: &actionExpr{ + pos: position{line: 178, col: 5, offset: 3840}, + run: (*parser).callonDigit1, + expr: &charClassMatcher{ + pos: position{line: 178, col: 5, offset: 3840}, + val: "[0-9]", + ranges: []rune{'0', '9'}, + ignoreCase: false, + inverted: false, + }, + }, + }, { name: "_", - pos: position{line: 107, col: 1, offset: 2307}, + pos: position{line: 182, col: 1, offset: 3882}, expr: &zeroOrMoreExpr{ - pos: position{line: 108, col: 5, offset: 2316}, + pos: position{line: 183, col: 5, offset: 3891}, expr: &charClassMatcher{ - pos: position{line: 108, col: 5, offset: 2316}, + pos: position{line: 183, col: 5, offset: 3891}, val: "[ \\t]", chars: []rune{' ', '\t'}, ignoreCase: false, @@ -578,6 +1027,17 @@ func (p *parser) callonYesNoPropertyRestrictionNode1() (any, error) { return p.cur.onYesNoPropertyRestrictionNode1(stack["k"], stack["v"]) } +func (c *current) onDateTimeRestrictionNode1(k, o, v any) (any, error) { + return buildDateTimeNode(k, o, v, c.text, c.pos) + +} + +func (p *parser) callonDateTimeRestrictionNode1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onDateTimeRestrictionNode1(stack["k"], stack["o"], stack["v"]) +} + func (c *current) onTextPropertyRestrictionNode1(k, v any) (any, error) { return buildStringNode(k, v, c.text, c.pos) @@ -611,37 +1071,169 @@ func (p *parser) callonWordNode1() (any, error) { return p.cur.onWordNode1(stack["v"]) } -func (c *current) onBooleanOperatorNode1() (any, error) { +func (c *current) onOperatorBooleanNode1() (any, error) { return buildOperatorNode(c.text, c.pos) } -func (p *parser) callonBooleanOperatorNode1() (any, error) { +func (p *parser) callonOperatorBooleanNode1() (any, error) { stack := p.vstack[len(p.vstack)-1] _ = stack - return p.cur.onBooleanOperatorNode1() + return p.cur.onOperatorBooleanNode1() +} + +func (c *current) onOperatorColonNode1() (any, error) { + return buildOperatorNode(c.text, c.pos) + } -func (c *current) onColonOperator1() (any, error) { +func (p *parser) callonOperatorColonNode1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onOperatorColonNode1() +} + +func (c *current) onOperatorEqualNode1() (any, error) { + return buildOperatorNode(c.text, c.pos) + +} + +func (p *parser) callonOperatorEqualNode1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onOperatorEqualNode1() +} + +func (c *current) onOperatorLessNode1() (any, error) { + return buildOperatorNode(c.text, c.pos) + +} + +func (p *parser) callonOperatorLessNode1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onOperatorLessNode1() +} + +func (c *current) onOperatorLessOrEqualNode1() (any, error) { + return buildOperatorNode(c.text, c.pos) + +} + +func (p *parser) callonOperatorLessOrEqualNode1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onOperatorLessOrEqualNode1() +} + +func (c *current) onOperatorGreaterNode1() (any, error) { + return buildOperatorNode(c.text, c.pos) + +} + +func (p *parser) callonOperatorGreaterNode1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onOperatorGreaterNode1() +} + +func (c *current) onOperatorGreaterOrEqualNode1() (any, error) { + return buildOperatorNode(c.text, c.pos) + +} + +func (p *parser) callonOperatorGreaterOrEqualNode1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onOperatorGreaterOrEqualNode1() +} + +func (c *current) onTimeYear1() (any, error) { return c.text, nil } -func (p *parser) callonColonOperator1() (any, error) { +func (p *parser) callonTimeYear1() (any, error) { stack := p.vstack[len(p.vstack)-1] _ = stack - return p.cur.onColonOperator1() + return p.cur.onTimeYear1() } -func (c *current) onEqualOperator1() (any, error) { +func (c *current) onTimeMonth1() (any, error) { return c.text, nil } -func (p *parser) callonEqualOperator1() (any, error) { +func (p *parser) callonTimeMonth1() (any, error) { stack := p.vstack[len(p.vstack)-1] _ = stack - return p.cur.onEqualOperator1() + return p.cur.onTimeMonth1() +} + +func (c *current) onTimeDay1() (any, error) { + return c.text, nil + +} + +func (p *parser) callonTimeDay1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onTimeDay1() +} + +func (c *current) onTimeHour1() (any, error) { + return c.text, nil + +} + +func (p *parser) callonTimeHour1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onTimeHour1() +} + +func (c *current) onTimeMinute1() (any, error) { + return c.text, nil + +} + +func (p *parser) callonTimeMinute1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onTimeMinute1() +} + +func (c *current) onTimeSecond1() (any, error) { + return c.text, nil + +} + +func (p *parser) callonTimeSecond1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onTimeSecond1() +} + +func (c *current) onFullDate1() (any, error) { + return c.text, nil + +} + +func (p *parser) callonFullDate1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onFullDate1() +} + +func (c *current) onFullTime1() (any, error) { + return c.text, nil + +} + +func (p *parser) callonFullTime1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onFullTime1() } func (c *current) onChar1() (any, error) { @@ -666,6 +1258,17 @@ func (p *parser) callonString1() (any, error) { return p.cur.onString1(stack["v"]) } +func (c *current) onDigit1() (any, error) { + return c.text, nil + +} + +func (p *parser) callonDigit1() (any, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onDigit1() +} + var ( // errNoRule is returned when the grammar to parse has no rule. errNoRule = errors.New("grammar has no rule") diff --git a/services/search/pkg/query/kql/dictionary_test.go b/services/search/pkg/query/kql/dictionary_test.go index b73ab7cacb6..0b56eba69de 100644 --- a/services/search/pkg/query/kql/dictionary_test.go +++ b/services/search/pkg/query/kql/dictionary_test.go @@ -3,6 +3,7 @@ package kql_test import ( "strings" "testing" + "time" tAssert "github.com/stretchr/testify/assert" @@ -11,6 +12,15 @@ import ( "github.com/owncloud/ocis/v2/services/search/pkg/query/kql" ) +var timeMustParse = func(t *testing.T, ts string) time.Time { + tp, err := time.Parse(time.RFC3339Nano, ts) + if err != nil { + t.Fatalf("time.Parse(...) error = %v", err) + } + + return tp +} + var FullDictionary = []string{ `federated search`, `federat* search`, @@ -242,6 +252,133 @@ func TestParse(t *testing.T) { }, }, }, + { + name: "unicode", + givenQuery: []string{ + ` 😂 "*😀 😁*" name:😂💁👌🎍😍 name:😂💁👌 😍`, + }, + expectedAst: &ast.Ast{ + Nodes: []ast.Node{ + &ast.StringNode{ + Value: "😂", + }, + &ast.StringNode{ + Value: "*😀 😁*", + }, + &ast.StringNode{ + Key: "name", + Value: "😂💁👌🎍😍", + }, + &ast.StringNode{ + Key: "name", + Value: "😂💁👌", + }, + &ast.StringNode{ + Value: "😍", + }, + }, + }, + }, + { + name: "DateTimeRestrictionNode", + givenQuery: []string{ + `Mtime:"2023-09-05T08:42:11.23554+02:00"`, + `Mtime:2023-09-05T08:42:11.23554+02:00`, + `Mtime="2023-09-05T08:42:11.23554+02:00"`, + `Mtime=2023-09-05T08:42:11.23554+02:00`, + `Mtime<"2023-09-05T08:42:11.23554+02:00"`, + `Mtime<2023-09-05T08:42:11.23554+02:00`, + `Mtime<="2023-09-05T08:42:11.23554+02:00"`, + `Mtime<=2023-09-05T08:42:11.23554+02:00`, + `Mtime>"2023-09-05T08:42:11.23554+02:00"`, + `Mtime>2023-09-05T08:42:11.23554+02:00`, + `Mtime>="2023-09-05T08:42:11.23554+02:00"`, + `Mtime>=2023-09-05T08:42:11.23554+02:00`, + }, + expectedAst: &ast.Ast{ + Nodes: []ast.Node{ + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: ":"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: ":"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: "="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: "="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: "<"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: "<"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: "<="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: "<="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: ">"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: ">"}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: ">="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + &ast.DateTimeNode{ + Key: "Mtime", + Operator: &ast.OperatorNode{Value: ">="}, + Value: timeMustParse(t, "2023-09-05T08:42:11.23554+02:00"), + }, + }, + }, + }, + { + name: "id", + givenQuery: []string{ + `id:b27d3bf1-b254-459f-92e8-bdba668d6d3f$d0648459-25fb-4ed8-8684-bc62c7dca29c!d0648459-25fb-4ed8-8684-bc62c7dca29c`, + `ID:b27d3bf1-b254-459f-92e8-bdba668d6d3f$d0648459-25fb-4ed8-8684-bc62c7dca29c!d0648459-25fb-4ed8-8684-bc62c7dca29c`, + }, + expectedAst: &ast.Ast{ + Nodes: []ast.Node{ + &ast.StringNode{ + Key: "id", + Value: "b27d3bf1-b254-459f-92e8-bdba668d6d3f$d0648459-25fb-4ed8-8684-bc62c7dca29c!d0648459-25fb-4ed8-8684-bc62c7dca29c", + }, + &ast.StringNode{ + Key: "ID", + Value: "b27d3bf1-b254-459f-92e8-bdba668d6d3f$d0648459-25fb-4ed8-8684-bc62c7dca29c!d0648459-25fb-4ed8-8684-bc62c7dca29c", + }, + }, + }, + }, } assert := tAssert.New(t) diff --git a/services/search/pkg/query/kql/factory.go b/services/search/pkg/query/kql/factory.go index fc30bde873f..b29a4ab61b9 100644 --- a/services/search/pkg/query/kql/factory.go +++ b/services/search/pkg/query/kql/factory.go @@ -33,7 +33,7 @@ func buildAST(n interface{}, text []byte, pos position) (*ast.Ast, error) { return nil, err } - nodes, err := toNodes(n) + nodes, err := toNodes[ast.Node](n) if err != nil { return nil, err } @@ -54,7 +54,7 @@ func buildNodes(e interface{}) ([]ast.Node, error) { nodes := make([]ast.Node, len(maybeNodesGroups)) for i, maybeNodesGroup := range maybeNodesGroups { - node, err := toNode(toIfaceSlice(maybeNodesGroup)[1]) + node, err := toNode[ast.Node](toIfaceSlice(maybeNodesGroup)[1]) if err != nil { return nil, err } @@ -88,6 +88,35 @@ func buildStringNode(k, v interface{}, text []byte, pos position) (*ast.StringNo }, nil } +func buildDateTimeNode(k, o, v interface{}, text []byte, pos position) (*ast.DateTimeNode, error) { + b, err := base(text, pos) + if err != nil { + return nil, err + } + + operator, err := toNode[*ast.OperatorNode](o) + if err != nil { + return nil, err + } + + key, err := toString(k) + if err != nil { + return nil, err + } + + value, err := toTime(v) + if err != nil { + return nil, err + } + + return &ast.DateTimeNode{ + Base: b, + Key: key, + Operator: operator, + Value: value, + }, nil +} + func buildBooleanNode(k, v interface{}, text []byte, pos position) (*ast.BooleanNode, error) { b, err := base(text, pos) if err != nil { @@ -117,9 +146,14 @@ func buildOperatorNode(text []byte, pos position) (*ast.OperatorNode, error) { return nil, err } + value, err := toString(text) + if err != nil { + return nil, err + } + return &ast.OperatorNode{ Base: b, - Value: string(text), + Value: value, }, nil } @@ -131,7 +165,7 @@ func buildGroupNode(k, n interface{}, text []byte, pos position) (*ast.GroupNode key, _ := toString(k) - nodes, err := toNodes(n) + nodes, err := toNodes[ast.Node](n) if err != nil { return nil, err } diff --git a/services/search/pkg/query/kql/kql.go b/services/search/pkg/query/kql/kql.go index 2422c6cdecb..c49d1fb2c2f 100644 --- a/services/search/pkg/query/kql/kql.go +++ b/services/search/pkg/query/kql/kql.go @@ -9,8 +9,8 @@ import ( type Builder struct{} // Build creates an ast.Ast based on a kql query -func (b Builder) Build(q string, opts ...Option) (*ast.Ast, error) { - f, err := Parse("", []byte(q), opts...) +func (b Builder) Build(q string) (*ast.Ast, error) { + f, err := Parse("", []byte(q)) if err != nil { return nil, err } diff --git a/services/search/pkg/query/kql/normalize.go b/services/search/pkg/query/kql/normalize.go index 7b7dfd3f8f7..9ee287c5b6b 100644 --- a/services/search/pkg/query/kql/normalize.go +++ b/services/search/pkg/query/kql/normalize.go @@ -46,6 +46,14 @@ func NormalizeNodes(nodes []ast.Node) ([]ast.Node, error) { } currentNode = n currentKey = &n.Key + case *ast.DateTimeNode: + if prevKey == nil { + prevKey = &n.Key + res = append(res, node) + continue + } + currentNode = n + currentKey = &n.Key case *ast.BooleanNode: if prevKey == nil { prevKey = &n.Key diff --git a/services/search/pkg/query/kql/normalize_test.go b/services/search/pkg/query/kql/normalize_test.go index 8d3f36ec27b..7beaf60e70a 100644 --- a/services/search/pkg/query/kql/normalize_test.go +++ b/services/search/pkg/query/kql/normalize_test.go @@ -2,6 +2,7 @@ package kql_test import ( "testing" + "time" tAssert "github.com/stretchr/testify/assert" @@ -10,6 +11,8 @@ import ( "github.com/owncloud/ocis/v2/services/search/pkg/query/kql" ) +var now = time.Now() + func TestNormalizeNodes(t *testing.T) { tests := []struct { name string @@ -85,11 +88,14 @@ func TestNormalizeNodes(t *testing.T) { givenNodes: []ast.Node{ &ast.StringNode{Key: "author", Value: "John Smith"}, &ast.StringNode{Key: "filetype", Value: "docx"}, + &ast.DateTimeNode{Key: "mtime", Operator: &ast.OperatorNode{Value: "="}, Value: now}, }, expectedNodes: []ast.Node{ &ast.StringNode{Key: "author", Value: "John Smith"}, &ast.OperatorNode{Value: "AND"}, &ast.StringNode{Key: "filetype", Value: "docx"}, + &ast.OperatorNode{Value: "AND"}, + &ast.DateTimeNode{Key: "mtime", Operator: &ast.OperatorNode{Value: "="}, Value: now}, }, }, } diff --git a/services/search/pkg/search/search.go b/services/search/pkg/search/search.go index 26213790ac8..77eb5b741b6 100644 --- a/services/search/pkg/search/search.go +++ b/services/search/pkg/search/search.go @@ -11,8 +11,8 @@ import ( rpc "github.com/cs3org/go-cs3apis/cs3/rpc/v1beta1" provider "github.com/cs3org/go-cs3apis/cs3/storage/provider/v1beta1" "github.com/cs3org/reva/v2/pkg/rgrpc/todo/pool" - "github.com/cs3org/reva/v2/pkg/storagespace" "github.com/cs3org/reva/v2/pkg/utils" + "github.com/owncloud/ocis/v2/ocis-pkg/log" searchmsg "github.com/owncloud/ocis/v2/protogen/gen/ocis/messages/search/v0" "github.com/owncloud/ocis/v2/services/search/pkg/engine" @@ -140,21 +140,6 @@ func convertToWebDAVPermissions(isShared, isMountpoint, isDir bool, p *provider. return b.String() } -func extractScope(path string) (*provider.Reference, error) { - ref, err := storagespace.ParseReference(path) - if err != nil { - return nil, err - } - return &provider.Reference{ - ResourceId: &provider.ResourceId{ - StorageId: ref.ResourceId.StorageId, - SpaceId: ref.ResourceId.SpaceId, - OpaqueId: ref.ResourceId.OpaqueId, - }, - Path: ref.GetPath(), - }, nil -} - // ParseScope extract a scope value from the query string and returns search, scope strings func ParseScope(query string) (string, string) { match := scopeRegex.FindStringSubmatch(query) diff --git a/services/search/pkg/search/service.go b/services/search/pkg/search/service.go index 3bddd8b5ffb..8b8ee1f0c66 100644 --- a/services/search/pkg/search/service.go +++ b/services/search/pkg/search/service.go @@ -97,16 +97,16 @@ func (s *Service) Search(ctx context.Context, req *searchsvc.SearchRequest) (*se } req.Query = query if len(scope) > 0 { - // if req.Ref != nil { - // return nil, errtypes.BadRequest("cannot scope a search that is limited to a resource") - // } - scopeRef, err := extractScope(scope) + scopedID, err := storagespace.ParseID(scope) if err != nil { - return nil, err + s.logger.Error().Err(err).Msg("failed to parse scope") } + // Stat the scope to get the resource id statRes, err := gatewayClient.Stat(ctx, &provider.StatRequest{ - Ref: scopeRef, + Ref: &provider.Reference{ + ResourceId: &scopedID, + }, FieldMask: &fieldmaskpb.FieldMask{Paths: []string{"space"}}, }) if err != nil { @@ -418,7 +418,7 @@ func (s *Service) IndexSpace(spaceID *provider.StorageSpaceId, uID *user.UserId) s.logger.Debug().Str("path", ref.Path).Msg("Walking tree") searchRes, err := s.engine.Search(ownerCtx, &searchsvc.SearchIndexRequest{ - Query: "+ID:" + storagespace.FormatResourceID(*info.Id) + ` +Mtime:>="` + utils.TSToTime(info.Mtime).Format(time.RFC3339Nano) + `"`, + Query: "id:" + storagespace.FormatResourceID(*info.Id) + ` mtime>=` + utils.TSToTime(info.Mtime).Format(time.RFC3339Nano), }) if err == nil && len(searchRes.Matches) >= 1 { diff --git a/services/search/pkg/service/grpc/v0/service.go b/services/search/pkg/service/grpc/v0/service.go index 75f4e22925e..559264156aa 100644 --- a/services/search/pkg/service/grpc/v0/service.go +++ b/services/search/pkg/service/grpc/v0/service.go @@ -49,7 +49,7 @@ func NewHandler(opts ...Option) (searchsvc.SearchProviderHandler, func(), error) _ = idx.Close() } - eng = engine.NewBleveEngine(idx, bleve.LegacyCreator) + eng = engine.NewBleveEngine(idx, bleve.DefaultCreator) default: return nil, teardown, fmt.Errorf("unknown search engine: %s", cfg.Engine.Type) } diff --git a/services/web/Makefile b/services/web/Makefile index 46e1d4e3382..142ed1e5acb 100644 --- a/services/web/Makefile +++ b/services/web/Makefile @@ -1,6 +1,6 @@ SHELL := bash NAME := web -WEB_ASSETS_VERSION = v8.0.0-alpha.1 +WEB_ASSETS_VERSION = v8.0.0-alpha.2 include ../../.make/recursion.mk diff --git a/services/webdav/pkg/service/v0/service.go b/services/webdav/pkg/service/v0/service.go index 56f4e2513e2..4c1535f3fec 100644 --- a/services/webdav/pkg/service/v0/service.go +++ b/services/webdav/pkg/service/v0/service.go @@ -17,6 +17,10 @@ import ( "github.com/cs3org/reva/v2/pkg/storage/utils/templates" "github.com/go-chi/chi/v5" "github.com/go-chi/render" + "github.com/riandyrn/otelchi" + merrors "go-micro.dev/v4/errors" + grpcmetadata "google.golang.org/grpc/metadata" + "github.com/owncloud/ocis/v2/ocis-pkg/log" "github.com/owncloud/ocis/v2/ocis-pkg/registry" "github.com/owncloud/ocis/v2/ocis-pkg/tracing" @@ -26,9 +30,6 @@ import ( "github.com/owncloud/ocis/v2/services/webdav/pkg/config" "github.com/owncloud/ocis/v2/services/webdav/pkg/constants" "github.com/owncloud/ocis/v2/services/webdav/pkg/dav/requests" - "github.com/riandyrn/otelchi" - merrors "go-micro.dev/v4/errors" - "google.golang.org/grpc/metadata" ) func init() { @@ -113,6 +114,7 @@ func NewService(opts ...Option) (Service, error) { r.Get("/remote.php/dav/files/{id}/*", svc.Thumbnail) r.Get("/dav/files/{id}", svc.Thumbnail) r.Get("/dav/files/{id}/*", svc.Thumbnail) + r.MethodFunc("REPORT", "/remote.php/dav/files*", svc.Search) r.MethodFunc("REPORT", "/dav/files*", svc.Search) }) @@ -309,7 +311,7 @@ func (g Webdav) Thumbnail(w http.ResponseWriter, r *http.Request) { user = userRes.GetUser() } else { // look up user from URL via GetUserByClaim - ctx := metadata.AppendToOutgoingContext(r.Context(), TokenHeader, t) + ctx := grpcmetadata.AppendToOutgoingContext(r.Context(), TokenHeader, t) userRes, err := gatewayClient.GetUserByClaim(ctx, &userv1beta1.GetUserByClaimRequest{ Claim: "username", Value: tr.Identifier, diff --git a/tests/acceptance/features/apiContract/spacesReport.feature b/tests/acceptance/features/apiContract/spacesReport.feature index 9333ee09ca0..568e8c11944 100644 --- a/tests/acceptance/features/apiContract/spacesReport.feature +++ b/tests/acceptance/features/apiContract/spacesReport.feature @@ -74,7 +74,7 @@ Feature: REPORT request to project space Scenario: check the response of the searched sub-folder Given user "Alice" has created a folder "folderMain/sub-folder" in space "findData" And using new DAV path - When user "Alice" searches for "sub" using the WebDAV API + When user "Alice" searches for "*sub*" using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Alice" should contain only these entries: | /folderMain/sub-folder | diff --git a/tests/acceptance/features/apiSearch/apiSpaceSearch.feature b/tests/acceptance/features/apiSearch/apiSpaceSearch.feature index 129030484c8..fdbd277a705 100644 --- a/tests/acceptance/features/apiSearch/apiSpaceSearch.feature +++ b/tests/acceptance/features/apiSearch/apiSpaceSearch.feature @@ -20,7 +20,7 @@ Feature: Search Scenario: user can find data from the project space - When user "Alice" searches for "fol" using the WebDAV API + When user "Alice" searches for "*fol*" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "4" entries And the search result of user "Alice" should contain these entries: @@ -31,7 +31,7 @@ Feature: Search Scenario: user can only find data that they searched for from the project space - When user "Alice" searches for "SUB" using the WebDAV API + When user "Alice" searches for "*SUB*" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "2" entries And the search result of user "Alice" should contain these entries: @@ -48,7 +48,7 @@ Feature: Search | shareWith | Brian | | role | viewer | And user "Brian" has accepted share "/folderMain" offered by user "Alice" - When user "Brian" searches for "folder" using the WebDAV API + When user "Brian" searches for "*folder*" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "4" entries And the search result of user "Brian" should contain these entries: @@ -60,7 +60,7 @@ Feature: Search Scenario: user can find hidden file Given user "Alice" has created a folder ".space" in space "find data" - When user "Alice" searches for ".sp" using the WebDAV API + When user "Alice" searches for "*.sp*" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "1" entries And the search result of user "Alice" should contain these entries: @@ -72,7 +72,7 @@ Feature: Search | path | folderMain | | shareWith | Brian | | role | viewer | - When user "Brian" searches for "folder" using the WebDAV API + When user "Brian" searches for "*folder*" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "0" entries And the search result of user "Brian" should not contain these entries: @@ -87,7 +87,7 @@ Feature: Search | shareWith | Brian | | role | viewer | And user "Brian" has declined share "/folderMain" offered by user "Alice" - When user "Brian" searches for "folder" using the WebDAV API + When user "Brian" searches for "*folder*" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "0" entries And the search result of user "Brian" should not contain these entries: @@ -98,20 +98,20 @@ Feature: Search Scenario: user cannot find deleted folder Given user "Alice" has removed the folder "folderMain" from space "find data" - When user "Alice" searches for "folderMain" using the WebDAV API + When user "Alice" searches for "*folderMain*" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "0" entries Scenario: user can find project space by name - When user "Alice" searches for "find data" using the WebDAV API + When user "Alice" searches for '"*find data*"' using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "1" entries And for user "Alice" the search result should contain space "find data" Scenario: user can search inside folder in space - When user "Alice" searches for "folder" inside folder "/folderMain" in space "find data" using the WebDAV API + When user "Alice" searches for "*folder*" inside folder "/folderMain" in space "find data" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "3" entries And the search result of user "Alice" should contain only these entries: @@ -128,7 +128,7 @@ Feature: Search | shareWith | Brian | | role | viewer | And user "Brian" has accepted share "/folderMain" offered by user "Alice" - When user "Brian" searches for "folder" inside folder "/folderMain" in space "Shares" using the WebDAV API + When user "Brian" searches for "*folder*" inside folder "/folderMain" in space "Shares" using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Brian" should contain only these entries: | /SubFolder1 | @@ -136,7 +136,7 @@ Feature: Search | /SubFolder1/subFOLDER2/insideTheFolder.txt | But the search result of user "Brian" should not contain these entries: | /folderMain | - + @issue-enterprise-6000 Scenario: sharee cannot find resources that are not shared Given user "Alice" has created a folder "foo/sharedToBrian" in space "Alice Hansen" @@ -146,7 +146,7 @@ Feature: Search | shareWith | Brian | | role | viewer | And user "Brian" has accepted share "/foo" offered by user "Alice" - When user "Brian" searches for "shared" using the WebDAV API + When user "Brian" searches for "shared*" using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Brian" should contain these entries: | /sharedToBrian | diff --git a/tests/acceptance/features/apiSearch/tagSearch.feature b/tests/acceptance/features/apiSearch/tagSearch.feature index 140d9b26467..239192a6200 100644 --- a/tests/acceptance/features/apiSearch/tagSearch.feature +++ b/tests/acceptance/features/apiSearch/tagSearch.feature @@ -243,7 +243,7 @@ Feature: tag search And user "Alice" has uploaded file with content "hello world inside folder" to "/Folder/file2.txt" And user "Alice" has created folder "/Folder/SubFolder" And user "Alice" has uploaded file with content "hello world inside sub-folder" to "/Folder/SubFolder/file3.txt" - When user "Alice" searches for "file" inside folder "/Folder" using the WebDAV API + When user "Alice" searches for "*file*" inside folder "/Folder" using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Alice" should contain only these entries: | file2.txt | diff --git a/tests/acceptance/features/bootstrap/SearchContext.php b/tests/acceptance/features/bootstrap/SearchContext.php index f67a1d894d5..67d21ce2a8c 100644 --- a/tests/acceptance/features/bootstrap/SearchContext.php +++ b/tests/acceptance/features/bootstrap/SearchContext.php @@ -69,18 +69,10 @@ public function userSearchesUsingWebDavAPI( = "\n" . " \n" . " \n"; - if ($scope !== null && $spaceName !== null) { + if ($scope !== null) { $scope = \trim($scope, "/"); - $spaceId = $this->featureContext->spacesContext->getSpaceIdByName($user, $spaceName); - $pattern .= " scope:$spaceId/$scope"; - } elseif ($scope !== null) { - $scope = \trim($scope, "/"); - if ($this->featureContext->getDavPathVersion() === 3) { - $rootPath = $this->featureContext->getPersonalSpaceIdForUser($user); - } else { - $rootPath = $this->featureContext->getUserIdByUserName($user); - } - $pattern .= " scope:$rootPath/$scope"; + $resourceID = $this->featureContext->spacesContext->getResourceId($user, $spaceName ?? "Personal", $scope); + $pattern .= " scope:$resourceID"; } $body .= "$pattern\n"; if ($limit !== null) { diff --git a/tests/acceptance/features/coreApiWebdavOperations/search.feature b/tests/acceptance/features/coreApiWebdavOperations/search.feature index 3329cfc208e..568e0a1a61b 100644 --- a/tests/acceptance/features/coreApiWebdavOperations/search.feature +++ b/tests/acceptance/features/coreApiWebdavOperations/search.feature @@ -24,7 +24,7 @@ Feature: Search @smokeTest Scenario Outline: search for entry by pattern Given using DAV path - When user "Alice" searches for "upload" using the WebDAV API + When user "Alice" searches for "*upload*" using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Alice" should contain these entries: | /upload.txt | @@ -51,7 +51,7 @@ Feature: Search Scenario Outline: search for entries by only some letters from the middle of the entry name Given using DAV path And user "Alice" has created folder "FOLDER" - When user "Alice" searches for "ol" using the WebDAV API + When user "Alice" searches for "*ol*" using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "4" entries And the search result of user "Alice" should contain these entries: @@ -72,7 +72,7 @@ Feature: Search Scenario Outline: search for files by extension Given using DAV path - When user "Alice" searches for "png" using the WebDAV API + When user "Alice" searches for "*png*" using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Alice" should contain these entries: | /a-image.png | @@ -110,7 +110,7 @@ Feature: Search Scenario Outline: limit returned search entries Given using DAV path - When user "Alice" searches for "upload" and limits the results to "3" items using the WebDAV API + When user "Alice" searches for "*upload*" and limits the results to "3" items using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Alice" should contain any "3" of these entries: | /just-a-folder/upload.txt | @@ -134,7 +134,7 @@ Feature: Search Scenario Outline: limit returned search entries to only 1 entry Given using DAV path - When user "Alice" searches for "upload" and limits the results to "1" items using the WebDAV API + When user "Alice" searches for "*upload*" and limits the results to "1" items using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Alice" should contain any "1" of these entries: | /just-a-folder/upload.txt | @@ -158,7 +158,7 @@ Feature: Search Scenario Outline: limit returned search entries to more entries than there are Given using DAV path - When user "Alice" searches for "upload" and limits the results to "100" items using the WebDAV API + When user "Alice" searches for "*upload*" and limits the results to "100" items using the WebDAV API Then the HTTP status code should be "207" And the search result should contain "8" entries And the search result of user "Alice" should contain these entries: @@ -183,7 +183,7 @@ Feature: Search @issue-4712 Scenario Outline: report extra properties in search entries for a file Given using DAV path - When user "Alice" searches for "upload" using the WebDAV API requesting these properties: + When user "Alice" searches for "*upload*" using the WebDAV API requesting these properties: | oc:fileid | | oc:permissions | | a:getlastmodified | @@ -216,7 +216,7 @@ Feature: Search @issue-4712 Scenario Outline: report extra properties in search entries for a folder Given using DAV path - When user "Alice" searches for "upload" using the WebDAV API requesting these properties: + When user "Alice" searches for "*upload*" using the WebDAV API requesting these properties: | oc:fileid | | oc:permissions | | a:getlastmodified | @@ -248,7 +248,7 @@ Feature: Search Scenario Outline: search for entry with emoji by pattern Given using DAV path - When user "Alice" searches for "😀 😁" using the WebDAV API + When user "Alice" searches for '"*😀 😁*"' using the WebDAV API Then the HTTP status code should be "207" And the search result of user "Alice" should contain these entries: | /upload😀 😁 |