From 2a3365f67d398c62ea90df43820dadcc8e54fcc5 Mon Sep 17 00:00:00 2001 From: NoodleSamaChan Date: Tue, 2 Jul 2024 20:30:22 +0200 Subject: [PATCH 1/2] started working on similar documents --- src/indexes.rs | 90 ++++++++++- src/search.rs | 399 ++++++++++++++++++++++++++++--------------------- 2 files changed, 316 insertions(+), 173 deletions(-) diff --git a/src/indexes.rs b/src/indexes.rs index 05c728f1..c443ba50 100644 --- a/src/indexes.rs +++ b/src/indexes.rs @@ -226,7 +226,7 @@ impl Index { /// ``` pub async fn execute_query( &self, - body: &SearchQuery<'_, Http>, + body: &SearchQuery<'_, Http, Search<'_>>, ) -> Result, Error> { self.client .http_client @@ -238,6 +238,52 @@ impl Index { .await } + /// The /similar route uses AI-powered search to return a number of documents similar to a target document. + /// + /// See also [`Index::search_similar_documents`]. + /// + /// # Example + /// + /// ```no_run + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, search::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("execute_query_similar"); + /// + /// // add some documents + /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")},Movie{name:String::from("Unknown"), description:String::from("Unknown")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let query = SearchQuery::new_similar(&movies, "Interstellar").with_limit(5).build(); + /// let results = movies.execute_query_similar::(&query).await.unwrap(); + /// + /// assert!(results.hits.len() > 0); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn execute_query_similar( + &self, + body: &SearchQuery<'_, Http, Similar<'_>>, + ) -> Result, Error> { + self.client + .http_client + .request::<(), &SearchQuery, SearchResults>( + &format!("{}/indexes/{}/similar", self.client.host, self.uid), + Method::Post { body, query: () }, + 200, + ) + .await + } + /// Search for documents matching a specific query in the index. /// /// See also [`Index::execute_query`]. @@ -279,6 +325,48 @@ impl Index { SearchQuery::new(self) } + /// Uses AI-powered search to return a number of documents similar to a target document. + /// + /// See also [`Index::execute_query`]. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, search::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// meilisearch_sdk::features::ExperimentalFeatures::new(&client).set_vector_store(true).update().unwrap(); + /// let mut movies = client.index("search_similar_documents"); + /// + /// # // add some documents + /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")},Movie{name:String::from("Unknown"), description:String::from("Unknown")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let results = movies.search_similar_documents() + /// .with_limit(5) + /// .execute::() + /// .await + /// .unwrap(); + /// + /// assert!(results.hits.len() > 0); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[must_use] + pub fn search_similar_documents <'a>(&'a self, id: &'a str) -> SearchQuery<'a, Http, Similar<'a>> { + SearchQuery::new_similar(self, id) + } + /// Get one document using its unique id. /// /// Serde is needed. Add `serde = {version="1.0", features=["derive"]}` in the dependencies section of your Cargo.toml. diff --git a/src/search.rs b/src/search.rs index f56c86b0..66175d8f 100644 --- a/src/search.rs +++ b/src/search.rs @@ -144,6 +144,108 @@ pub enum Selectors { type AttributeToCrop<'a> = (&'a str, Option); +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Similar<'a> { + id: &'a str, + embedder: Option<&'a str>, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Search<'a> { + /// The text that will be searched for among the documents. + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "q")] + pub query: Option<&'a str>, + /// The page number on which you paginate. + /// + /// Pagination starts at 1. If page is 0, no results are returned. + /// + /// **Default: None unless `hits_per_page` is defined, in which case page is `1`** + #[serde(skip_serializing_if = "Option::is_none")] + pub page: Option, + /// The maximum number of results in a page. A page can contain less results than the number of hits_per_page. + /// + /// **Default: None unless `page` is defined, in which case `20`** + #[serde(skip_serializing_if = "Option::is_none")] + pub hits_per_page: Option, + /// Facets for which to retrieve the matching count. + /// + /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. + /// + /// **Default: all attributes found in the documents.** + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(serialize_with = "serialize_with_wildcard")] + pub facets: Option>, + /// Attributes to sort. + #[serde(skip_serializing_if = "Option::is_none")] + pub sort: Option<&'a [&'a str]>, + /// Attributes to perform the search on. + /// + /// Specify the subset of searchableAttributes for a search without modifying Meilisearch’s index settings. + /// + /// **Default: all searchable attributes found in the documents.** + #[serde(skip_serializing_if = "Option::is_none")] + pub attributes_to_search_on: Option<&'a [&'a str]>, + /// Attributes whose values have to be cropped. + /// + /// Attributes are composed by the attribute name and an optional `usize` that overwrites the `crop_length` parameter. + /// + /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(serialize_with = "serialize_attributes_to_crop_with_wildcard")] + pub attributes_to_crop: Option]>>, + /// Maximum number of words including the matched query term(s) contained in the returned cropped value(s). + /// + /// See [attributes_to_crop](#structfield.attributes_to_crop). + /// + /// **Default: `10`** + #[serde(skip_serializing_if = "Option::is_none")] + pub crop_length: Option, + /// Marker at the start and the end of a cropped value. + /// + /// ex: `...middle of a crop...` + /// + /// **Default: `...`** + #[serde(skip_serializing_if = "Option::is_none")] + pub crop_marker: Option<&'a str>, + /// Attributes whose values will contain **highlighted matching terms**. + /// + /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(serialize_with = "serialize_with_wildcard")] + pub attributes_to_highlight: Option>, + /// Tag in front of a highlighted term. + /// + /// ex: `hello world` + /// + /// **Default: ``** + #[serde(skip_serializing_if = "Option::is_none")] + pub highlight_pre_tag: Option<&'a str>, + /// Tag after a highlighted term. + /// + /// ex: `hello world` + /// + /// **Default: ``** + #[serde(skip_serializing_if = "Option::is_none")] + pub highlight_post_tag: Option<&'a str>, + /// Defines whether an object that contains information about the matches should be returned or not. + /// + /// **Default: `false`** + #[serde(skip_serializing_if = "Option::is_none")] + pub show_matches_position: Option, + /// Defines the strategy on how to handle queries containing multiple words. + #[serde(skip_serializing_if = "Option::is_none")] + pub matching_strategy: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) index_uid: Option<&'a str>, + + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) distinct: Option<&'a str>, +} + /// A struct representing a query. /// /// You can add search parameters using the builder syntax. @@ -204,13 +306,10 @@ type AttributeToCrop<'a> = (&'a str, Option); /// ``` #[derive(Debug, Serialize, Clone)] #[serde(rename_all = "camelCase")] -pub struct SearchQuery<'a, Http: HttpClient> { +pub struct SearchQuery<'a, Http: HttpClient, SearchKind = Search<'a>> { + #[serde(skip_serializing)] index: &'a Index, - /// The text that will be searched for among the documents. - #[serde(skip_serializing_if = "Option::is_none")] - #[serde(rename = "q")] - pub query: Option<&'a str>, /// The number of documents to skip. /// /// If the value of the parameter `offset` is `n`, the `n` first documents (ordered by relevance) will not be returned. @@ -229,41 +328,11 @@ pub struct SearchQuery<'a, Http: HttpClient> { /// **Default: `20`** #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option, - /// The page number on which you paginate. - /// - /// Pagination starts at 1. If page is 0, no results are returned. - /// - /// **Default: None unless `hits_per_page` is defined, in which case page is `1`** - #[serde(skip_serializing_if = "Option::is_none")] - pub page: Option, - /// The maximum number of results in a page. A page can contain less results than the number of hits_per_page. - /// - /// **Default: None unless `page` is defined, in which case `20`** - #[serde(skip_serializing_if = "Option::is_none")] - pub hits_per_page: Option, /// Filter applied to documents. /// /// Read the [dedicated guide](https://www.meilisearch.com/docs/learn/advanced/filtering) to learn the syntax. #[serde(skip_serializing_if = "Option::is_none")] pub filter: Option>, - /// Facets for which to retrieve the matching count. - /// - /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. - /// - /// **Default: all attributes found in the documents.** - #[serde(skip_serializing_if = "Option::is_none")] - #[serde(serialize_with = "serialize_with_wildcard")] - pub facets: Option>, - /// Attributes to sort. - #[serde(skip_serializing_if = "Option::is_none")] - pub sort: Option<&'a [&'a str]>, - /// Attributes to perform the search on. - /// - /// Specify the subset of searchableAttributes for a search without modifying Meilisearch’s index settings. - /// - /// **Default: all searchable attributes found in the documents.** - #[serde(skip_serializing_if = "Option::is_none")] - pub attributes_to_search_on: Option<&'a [&'a str]>, /// Attributes to display in the returned documents. /// /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. @@ -272,120 +341,135 @@ pub struct SearchQuery<'a, Http: HttpClient> { #[serde(skip_serializing_if = "Option::is_none")] #[serde(serialize_with = "serialize_with_wildcard")] pub attributes_to_retrieve: Option>, - /// Attributes whose values have to be cropped. - /// - /// Attributes are composed by the attribute name and an optional `usize` that overwrites the `crop_length` parameter. - /// - /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. - #[serde(skip_serializing_if = "Option::is_none")] - #[serde(serialize_with = "serialize_attributes_to_crop_with_wildcard")] - pub attributes_to_crop: Option]>>, - /// Maximum number of words including the matched query term(s) contained in the returned cropped value(s). - /// - /// See [attributes_to_crop](#structfield.attributes_to_crop). - /// - /// **Default: `10`** - #[serde(skip_serializing_if = "Option::is_none")] - pub crop_length: Option, - /// Marker at the start and the end of a cropped value. - /// - /// ex: `...middle of a crop...` - /// - /// **Default: `...`** - #[serde(skip_serializing_if = "Option::is_none")] - pub crop_marker: Option<&'a str>, - /// Attributes whose values will contain **highlighted matching terms**. - /// - /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. - #[serde(skip_serializing_if = "Option::is_none")] - #[serde(serialize_with = "serialize_with_wildcard")] - pub attributes_to_highlight: Option>, - /// Tag in front of a highlighted term. - /// - /// ex: `hello world` - /// - /// **Default: ``** - #[serde(skip_serializing_if = "Option::is_none")] - pub highlight_pre_tag: Option<&'a str>, - /// Tag after a highlighted term. - /// - /// ex: `hello world` - /// - /// **Default: ``** - #[serde(skip_serializing_if = "Option::is_none")] - pub highlight_post_tag: Option<&'a str>, - /// Defines whether an object that contains information about the matches should be returned or not. - /// - /// **Default: `false`** - #[serde(skip_serializing_if = "Option::is_none")] - pub show_matches_position: Option, - /// Defines whether to show the relevancy score of the match. /// /// **Default: `false`** #[serde(skip_serializing_if = "Option::is_none")] pub show_ranking_score: Option, - ///Adds a detailed global ranking score field to each document. /// /// **Default: `false`** #[serde(skip_serializing_if = "Option::is_none")] pub show_ranking_score_details: Option, - /// Defines the strategy on how to handle queries containing multiple words. - #[serde(skip_serializing_if = "Option::is_none")] - pub matching_strategy: Option, + #[serde (flatten)] + search_kind: SearchKind, +} - #[serde(skip_serializing_if = "Option::is_none")] - pub(crate) index_uid: Option<&'a str>, +#[allow(missing_docs)] +impl<'a, Http: HttpClient, SearchKind: Clone> SearchQuery<'a, Http, SearchKind> { + pub fn with_offset<'b>( + &'b mut self, + offset: usize, + ) -> &'b mut SearchQuery<'a, Http, SearchKind> { + self.offset = Some(offset); + self + } + pub fn with_limit<'b>(&'b mut self, limit: usize) -> &'b mut SearchQuery<'a, Http, SearchKind> { + self.limit = Some(limit); + self + } + pub fn with_filter<'b>( + &'b mut self, + filter: &'a str, + ) -> &'b mut SearchQuery<'a, Http, SearchKind> { + self.filter = Some(Filter::new(Either::Left(filter))); + self + } + pub fn with_array_filter<'b>( + &'b mut self, + filter: Vec<&'a str>, + ) -> &'b mut SearchQuery<'a, Http, SearchKind> { + self.filter = Some(Filter::new(Either::Right(filter))); + self + } + pub fn with_attributes_to_retrieve<'b>( + &'b mut self, + attributes_to_retrieve: Selectors<&'a [&'a str]>, + ) -> &'b mut SearchQuery<'a, Http, SearchKind> { + self.attributes_to_retrieve = Some(attributes_to_retrieve); + self + } + pub fn with_show_ranking_score<'b>( + &'b mut self, + show_ranking_score: bool, + ) -> &'b mut SearchQuery<'a, Http, SearchKind> { + self.show_ranking_score = Some(show_ranking_score); + self + } - ///Defines one attribute in the filterableAttributes list as a distinct attribute. - #[serde(skip_serializing_if = "Option::is_none")] - pub(crate) distinct: Option<&'a str>, + pub fn with_show_ranking_score_details<'b>( + &'b mut self, + show_ranking_score_details: bool, + ) -> &'b mut SearchQuery<'a, Http, SearchKind> { + self.show_ranking_score_details = Some(show_ranking_score_details); + self + } + pub fn build(&mut self) -> SearchQuery<'a, Http, SearchKind> { + self.clone() + } } -#[allow(missing_docs)] -impl<'a, Http: HttpClient> SearchQuery<'a, Http> { +impl<'a, Http: HttpClient> SearchQuery<'a, Http, Similar<'a>> { #[must_use] - pub fn new(index: &'a Index) -> SearchQuery<'a, Http> { + pub fn new_similar(index: &'a Index, id: &'a str) -> SearchQuery<'a, Http, Similar<'a>> { SearchQuery { + search_kind: Similar { id, embedder: None }, index, - query: None, offset: None, limit: None, - page: None, - hits_per_page: None, filter: None, - sort: None, - facets: None, - attributes_to_search_on: None, attributes_to_retrieve: None, - attributes_to_crop: None, - crop_length: None, - crop_marker: None, - attributes_to_highlight: None, - highlight_pre_tag: None, - highlight_post_tag: None, - show_matches_position: None, show_ranking_score: None, show_ranking_score_details: None, - matching_strategy: None, - index_uid: None, - distinct: None, } } - pub fn with_query<'b>(&'b mut self, query: &'a str) -> &'b mut SearchQuery<'a, Http> { - self.query = Some(query); + pub fn with_embedder<'b>( + &'b mut self, + embedder: &'a str, + ) -> &'b mut SearchQuery<'a, Http, Similar<'a>> { + self.search_kind.embedder = Some(embedder); self } - pub fn with_offset<'b>(&'b mut self, offset: usize) -> &'b mut SearchQuery<'a, Http> { - self.offset = Some(offset); - self + /// Execute the query and fetch the results. + pub async fn execute( + &'a self, + ) -> Result, Error> { + self.index.execute_query_similar::(self).await } - pub fn with_limit<'b>(&'b mut self, limit: usize) -> &'b mut SearchQuery<'a, Http> { - self.limit = Some(limit); - self +} + +impl<'a, Http: HttpClient> SearchQuery<'a, Http, Search<'a>> { + #[must_use] + pub fn new(index: &'a Index) -> SearchQuery<'a, Http, Search> { + SearchQuery { + index, + offset: None, + limit: None, + filter: None, + attributes_to_retrieve: None, + show_ranking_score: None, + show_ranking_score_details: None, + search_kind: Search { + query: None, + page: None, + hits_per_page: None, + sort: None, + facets: None, + attributes_to_search_on: None, + attributes_to_crop: None, + crop_length: None, + crop_marker: None, + attributes_to_highlight: None, + highlight_pre_tag: None, + highlight_post_tag: None, + show_matches_position: None, + matching_strategy: None, + index_uid: None, + distinct: None, + }, + } } /// Add the page number on which to paginate. /// @@ -415,7 +499,12 @@ impl<'a, Http: HttpClient> SearchQuery<'a, Http> { /// # }); /// ``` pub fn with_page<'b>(&'b mut self, page: usize) -> &'b mut SearchQuery<'a, Http> { - self.page = Some(page); + self.search_kind.page = Some(page); + self + } + + pub fn with_query<'b>(&'b mut self, query: &'a str) -> &'b mut SearchQuery<'a, Http> { + self.search_kind.query = Some(query); self } @@ -450,29 +539,19 @@ impl<'a, Http: HttpClient> SearchQuery<'a, Http> { &'b mut self, hits_per_page: usize, ) -> &'b mut SearchQuery<'a, Http> { - self.hits_per_page = Some(hits_per_page); - self - } - pub fn with_filter<'b>(&'b mut self, filter: &'a str) -> &'b mut SearchQuery<'a, Http> { - self.filter = Some(Filter::new(Either::Left(filter))); - self - } - pub fn with_array_filter<'b>( - &'b mut self, - filter: Vec<&'a str>, - ) -> &'b mut SearchQuery<'a, Http> { - self.filter = Some(Filter::new(Either::Right(filter))); + self.search_kind.hits_per_page = Some(hits_per_page); self } + pub fn with_facets<'b>( &'b mut self, facets: Selectors<&'a [&'a str]>, ) -> &'b mut SearchQuery<'a, Http> { - self.facets = Some(facets); + self.search_kind.facets = Some(facets); self } pub fn with_sort<'b>(&'b mut self, sort: &'a [&'a str]) -> &'b mut SearchQuery<'a, Http> { - self.sort = Some(sort); + self.search_kind.sort = Some(sort); self } @@ -480,98 +559,74 @@ impl<'a, Http: HttpClient> SearchQuery<'a, Http> { &'b mut self, attributes_to_search_on: &'a [&'a str], ) -> &'b mut SearchQuery<'a, Http> { - self.attributes_to_search_on = Some(attributes_to_search_on); - self - } - pub fn with_attributes_to_retrieve<'b>( - &'b mut self, - attributes_to_retrieve: Selectors<&'a [&'a str]>, - ) -> &'b mut SearchQuery<'a, Http> { - self.attributes_to_retrieve = Some(attributes_to_retrieve); + self.search_kind.attributes_to_search_on = Some(attributes_to_search_on); self } pub fn with_attributes_to_crop<'b>( &'b mut self, attributes_to_crop: Selectors<&'a [(&'a str, Option)]>, ) -> &'b mut SearchQuery<'a, Http> { - self.attributes_to_crop = Some(attributes_to_crop); + self.search_kind.attributes_to_crop = Some(attributes_to_crop); self } pub fn with_crop_length<'b>(&'b mut self, crop_length: usize) -> &'b mut SearchQuery<'a, Http> { - self.crop_length = Some(crop_length); + self.search_kind.crop_length = Some(crop_length); self } pub fn with_crop_marker<'b>( &'b mut self, crop_marker: &'a str, ) -> &'b mut SearchQuery<'a, Http> { - self.crop_marker = Some(crop_marker); + self.search_kind.crop_marker = Some(crop_marker); self } pub fn with_attributes_to_highlight<'b>( &'b mut self, attributes_to_highlight: Selectors<&'a [&'a str]>, ) -> &'b mut SearchQuery<'a, Http> { - self.attributes_to_highlight = Some(attributes_to_highlight); + self.search_kind.attributes_to_highlight = Some(attributes_to_highlight); self } pub fn with_highlight_pre_tag<'b>( &'b mut self, highlight_pre_tag: &'a str, ) -> &'b mut SearchQuery<'a, Http> { - self.highlight_pre_tag = Some(highlight_pre_tag); + self.search_kind.highlight_pre_tag = Some(highlight_pre_tag); self } pub fn with_highlight_post_tag<'b>( &'b mut self, highlight_post_tag: &'a str, ) -> &'b mut SearchQuery<'a, Http> { - self.highlight_post_tag = Some(highlight_post_tag); + self.search_kind.highlight_post_tag = Some(highlight_post_tag); self } pub fn with_show_matches_position<'b>( &'b mut self, show_matches_position: bool, ) -> &'b mut SearchQuery<'a, Http> { - self.show_matches_position = Some(show_matches_position); - self - } - - pub fn with_show_ranking_score<'b>( - &'b mut self, - show_ranking_score: bool, - ) -> &'b mut SearchQuery<'a, Http> { - self.show_ranking_score = Some(show_ranking_score); - self - } - - pub fn with_show_ranking_score_details<'b>( - &'b mut self, - show_ranking_score_details: bool, - ) -> &'b mut SearchQuery<'a, Http> { - self.show_ranking_score_details = Some(show_ranking_score_details); + self.search_kind.show_matches_position = Some(show_matches_position); self } - pub fn with_matching_strategy<'b>( &'b mut self, matching_strategy: MatchingStrategies, ) -> &'b mut SearchQuery<'a, Http> { - self.matching_strategy = Some(matching_strategy); + self.search_kind.matching_strategy = Some(matching_strategy); self } pub fn with_index_uid<'b>(&'b mut self) -> &'b mut SearchQuery<'a, Http> { - self.index_uid = Some(&self.index.uid); + self.search_kind.index_uid = Some(&self.index.uid); self } + pub fn with_distinct<'b>(&'b mut self, distinct: &'a str) -> &'b mut SearchQuery<'a, Http> { - self.distinct = Some(distinct); + self.search_kind.distinct = Some(distinct); self } - pub fn build(&mut self) -> SearchQuery<'a, Http> { - self.clone() - } + /// Execute the query and fetch the results. + /// pub async fn execute( &'a self, ) -> Result, Error> { From 71f402b21e781eca47f2dab9bfa14d0e6efb86c0 Mon Sep 17 00:00:00 2001 From: NoodleSamaChan Date: Tue, 2 Jul 2024 23:24:43 +0200 Subject: [PATCH 2/2] fmt --- src/indexes.rs | 7 +++++-- src/search.rs | 7 +++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/indexes.rs b/src/indexes.rs index c443ba50..5c09a967 100644 --- a/src/indexes.rs +++ b/src/indexes.rs @@ -348,7 +348,7 @@ impl Index { /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); /// meilisearch_sdk::features::ExperimentalFeatures::new(&client).set_vector_store(true).update().unwrap(); /// let mut movies = client.index("search_similar_documents"); - /// + /// /// # // add some documents /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")},Movie{name:String::from("Unknown"), description:String::from("Unknown")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); /// @@ -363,7 +363,10 @@ impl Index { /// # }); /// ``` #[must_use] - pub fn search_similar_documents <'a>(&'a self, id: &'a str) -> SearchQuery<'a, Http, Similar<'a>> { + pub fn search_similar_documents<'a>( + &'a self, + id: &'a str, + ) -> SearchQuery<'a, Http, Similar<'a>> { SearchQuery::new_similar(self, id) } diff --git a/src/search.rs b/src/search.rs index 66175d8f..0224ba79 100644 --- a/src/search.rs +++ b/src/search.rs @@ -307,7 +307,6 @@ pub struct Search<'a> { #[derive(Debug, Serialize, Clone)] #[serde(rename_all = "camelCase")] pub struct SearchQuery<'a, Http: HttpClient, SearchKind = Search<'a>> { - #[serde(skip_serializing)] index: &'a Index, /// The number of documents to skip. @@ -352,7 +351,7 @@ pub struct SearchQuery<'a, Http: HttpClient, SearchKind = Search<'a>> { #[serde(skip_serializing_if = "Option::is_none")] pub show_ranking_score_details: Option, - #[serde (flatten)] + #[serde(flatten)] search_kind: SearchKind, } @@ -624,9 +623,9 @@ impl<'a, Http: HttpClient> SearchQuery<'a, Http, Search<'a>> { self.search_kind.distinct = Some(distinct); self } - + /// Execute the query and fetch the results. - /// + /// pub async fn execute( &'a self, ) -> Result, Error> {