Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rename AnalyzeRequest to AnalyzeTextOptions #12489

Merged
merged 5 commits into from
Jun 5, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sdk/search/Azure.Search.Documents/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

- Moved models for managing indexes, indexers, and skillsets to `Azure.Search.Documents.Indexes.Models`.
- Split `SearchServiceClient` into `SearchIndexClient` for managing indexes, and `SearchIndexerClient` for managing indexers, both of which are now in `Azure.Search.Documents.Indexes`.
- Renamed `AnalyzeRequest` to `AnalyzeTextRequest`.
- Renamed `AnalyzeRequest` to `AnalyzeTextOptions`, and overloaded constructors to make constructing it
heaths marked this conversation as resolved.
Show resolved Hide resolved
- Renamed `SearchIndexerDataSource` to `SearchIndexerDataSourceConnection`.
- Renamed methods on `SearchIndexerClient` matching "\*DataSource" to "\*DataSourceConnection".
- Made collection- and dictionary-type properties read-only, i.e. has only get-accessors, based on [.NET Guidelines][net-guidelines-collection-properties].
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ public SearchIndexClient(System.Uri endpoint, Azure.AzureKeyCredential credentia
public SearchIndexClient(System.Uri endpoint, Azure.AzureKeyCredential credential, Azure.Search.Documents.SearchClientOptions options) { }
public virtual System.Uri Endpoint { get { throw null; } }
public virtual string ServiceName { get { throw null; } }
public virtual Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Search.Documents.Indexes.Models.AnalyzedTokenInfo>> AnalyzeText(string indexName, Azure.Search.Documents.Indexes.Models.AnalyzeTextRequest request, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Search.Documents.Indexes.Models.AnalyzedTokenInfo>>> AnalyzeTextAsync(string indexName, Azure.Search.Documents.Indexes.Models.AnalyzeTextRequest request, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Search.Documents.Indexes.Models.AnalyzedTokenInfo>> AnalyzeText(string indexName, Azure.Search.Documents.Indexes.Models.AnalyzeTextOptions request, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
heaths marked this conversation as resolved.
Show resolved Hide resolved
public virtual System.Threading.Tasks.Task<Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Search.Documents.Indexes.Models.AnalyzedTokenInfo>>> AnalyzeTextAsync(string indexName, Azure.Search.Documents.Indexes.Models.AnalyzeTextOptions request, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Search.Documents.Indexes.Models.SearchIndex> CreateIndex(Azure.Search.Documents.Indexes.Models.SearchIndex index, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Search.Documents.Indexes.Models.SearchIndex>> CreateIndexAsync(Azure.Search.Documents.Indexes.Models.SearchIndex index, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Search.Documents.Indexes.Models.SearchIndex> CreateOrUpdateIndex(Azure.Search.Documents.Indexes.Models.SearchIndex index, bool allowIndexDowntime = false, bool onlyIfUnchanged = false, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
Expand Down Expand Up @@ -212,14 +212,16 @@ internal AnalyzedTokenInfo() { }
public int StartOffset { get { throw null; } }
public string Token { get { throw null; } }
}
public partial class AnalyzeTextRequest
public partial class AnalyzeTextOptions
{
public AnalyzeTextRequest(string text) { }
public Azure.Search.Documents.Indexes.Models.LexicalAnalyzerName? Analyzer { get { throw null; } set { } }
public AnalyzeTextOptions(string text) { }
public AnalyzeTextOptions(string text, Azure.Search.Documents.Indexes.Models.LexicalAnalyzerName analyzer) { }
public AnalyzeTextOptions(string text, Azure.Search.Documents.Indexes.Models.LexicalTokenizerName tokenizer) { }
public Azure.Search.Documents.Indexes.Models.LexicalAnalyzerName? Analyzer { get { throw null; } }
public System.Collections.Generic.IList<string> CharFilters { get { throw null; } }
public string Text { get { throw null; } }
public System.Collections.Generic.IList<Azure.Search.Documents.Indexes.Models.TokenFilterName> TokenFilters { get { throw null; } }
public Azure.Search.Documents.Indexes.Models.LexicalTokenizerName? Tokenizer { get { throw null; } set { } }
public Azure.Search.Documents.Indexes.Models.LexicalTokenizerName? Tokenizer { get { throw null; } }
}
public partial class AsciiFoldingTokenFilter : Azure.Search.Documents.Indexes.Models.TokenFilter
{
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

using System;
using System.Collections.Generic;
using Azure.Core;

namespace Azure.Search.Documents.Indexes.Models
{
[CodeGenModel("AnalyzeRequest")]
public partial class AnalyzeTextOptions
{
/// <summary>
/// Initializes a new instance of AnalyzeRequest.
/// </summary>
/// <param name="text">Required text to break into tokens.</param>
/// <param name="analyzer">The name of the analyzer to use to break the given <paramref name="text"/>.</param>
/// <exception cref="ArgumentNullException"><paramref name="text"/> is null.</exception>
public AnalyzeTextOptions(string text, LexicalAnalyzerName analyzer)
{
Text = text ?? throw new ArgumentNullException(nameof(text));
Analyzer = analyzer;

TokenFilters = new List<TokenFilterName>();
CharFilters = new List<string>();
}

/// <summary>
/// Initializes a new instance of AnalyzeRequest.
/// </summary>
/// <param name="text">Required text to break into tokens.</param>
/// <param name="tokenizer">The name of the tokenizer to use to break the given <paramref name="text"/>.</param>
/// <exception cref="ArgumentNullException"><paramref name="text"/> is null.</exception>
public AnalyzeTextOptions(string text, LexicalTokenizerName tokenizer)
{
Text = text ?? throw new ArgumentNullException(nameof(text));
Tokenizer = tokenizer;

TokenFilters = new List<TokenFilterName>();
CharFilters = new List<string>();
}

/// <summary> The name of the analyzer to use to break the given text. If this parameter is not specified, you must specify a tokenizer instead. The tokenizer and analyzer parameters are mutually exclusive. </summary>
[CodeGenMember("Analyzer")]
public LexicalAnalyzerName? Analyzer { get; }

/// <summary> The name of the tokenizer to use to break the given text. If this parameter is not specified, you must specify an analyzer instead. The tokenizer and analyzer parameters are mutually exclusive. </summary>
[CodeGenMember("Tokenizer")]
public LexicalTokenizerName? Tokenizer { get; }

/// <summary> An optional list of token filters to use when breaking the given text. This parameter can only be set when using the tokenizer parameter. </summary>
[CodeGenMember(EmptyAsUndefined = true, Initialize = true)]
public IList<TokenFilterName> TokenFilters { get; }

/// <summary> An optional list of character filters to use when breaking the given text. This parameter can only be set when using the tokenizer parameter. </summary>
[CodeGenMember(EmptyAsUndefined = true, Initialize = true)]
public IList<string> CharFilters { get; }
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ public virtual async Task<Response<SearchServiceStatistics>> GetServiceStatistic
/// Shows how an analyzer breaks text into tokens.
/// </summary>
/// <param name="indexName">The name of the index used to test an analyzer.</param>
/// <param name="request">The <see cref="AnalyzeTextRequest"/> containing the text and analyzer or analyzer components to test.</param>
/// <param name="request">The <see cref="AnalyzeTextOptions"/> containing the text and analyzer or analyzer components to test.</param>
/// <param name="cancellationToken">Optional <see cref="CancellationToken"/> to propagate notifications that the operation should be canceled.</param>
/// <returns>
/// The <see cref="Response{T}"/> from the server containing a list of <see cref="AnalyzedTokenInfo"/> for analyzed text.
Expand All @@ -236,7 +236,7 @@ public virtual async Task<Response<SearchServiceStatistics>> GetServiceStatistic
/// <exception cref="RequestFailedException">Thrown when a failure is returned by the Search service.</exception>
public virtual Response<IReadOnlyList<AnalyzedTokenInfo>> AnalyzeText(
string indexName,
AnalyzeTextRequest request,
AnalyzeTextOptions request,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(SearchIndexClient)}.{nameof(AnalyzeText)}");
Expand All @@ -261,7 +261,7 @@ public virtual Response<IReadOnlyList<AnalyzedTokenInfo>> AnalyzeText(
/// Shows how an analyzer breaks text into tokens.
/// </summary>
/// <param name="indexName">The name of the index used to test an analyzer.</param>
/// <param name="request">The <see cref="AnalyzeTextRequest"/> containing the text and analyzer or analyzer components to test.</param>
/// <param name="request">The <see cref="AnalyzeTextOptions"/> containing the text and analyzer or analyzer components to test.</param>
/// <param name="cancellationToken">Optional <see cref="CancellationToken"/> to propagate notifications that the operation should be canceled.</param>
/// <returns>
/// The <see cref="Response{T}"/> from the server containing a list of <see cref="AnalyzedTokenInfo"/> for analyzed text.
Expand All @@ -270,7 +270,7 @@ public virtual Response<IReadOnlyList<AnalyzedTokenInfo>> AnalyzeText(
/// <exception cref="RequestFailedException">Thrown when a failure is returned by the Search service.</exception>
public virtual async Task<Response<IReadOnlyList<AnalyzedTokenInfo>>> AnalyzeTextAsync(
string indexName,
AnalyzeTextRequest request,
AnalyzeTextOptions request,
CancellationToken cancellationToken = default)
{
using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(SearchIndexClient)}.{nameof(AnalyzeText)}");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@

namespace Azure.Search.Documents.Tests.Models
{
public class AnalyzeTextRequestTests
public class AnalyzeTextOptionsTests
{
[Test]
public void RequiresText()
{
ArgumentNullException ex = Assert.Throws<ArgumentNullException>(() => new AnalyzeTextRequest(null));
ArgumentNullException ex = Assert.Throws<ArgumentNullException>(() => new AnalyzeTextOptions(null, LexicalTokenizerName.Whitespace));
Assert.AreEqual("text", ex.ParamName);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -323,10 +323,7 @@ public async Task AnalyzeText()

SearchIndexClient client = resources.GetIndexClient();

AnalyzeTextRequest request = new AnalyzeTextRequest("The quick brown fox jumped over the lazy dog.")
{
Tokenizer = LexicalTokenizerName.Whitespace,
};
AnalyzeTextOptions request = new AnalyzeTextOptions("The quick brown fox jumped over the lazy dog.", LexicalTokenizerName.Whitespace);

Response<IReadOnlyList<AnalyzedTokenInfo>> result = await client.AnalyzeTextAsync(resources.IndexName, request);
IReadOnlyList<AnalyzedTokenInfo> tokens = result.Value;
Expand Down