Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions sdk/search/azure-search-documents/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@
and uplifted the properties to `SearchIndexerDataSourceConnection` and `SearchResourceEncryptionKey` respectively.
- Removed `select` parameter from list service resource APIs.
- Added list names APIs for each search service resource. (e.g. `listSearchIndexNames`, `listSearchIndexerNames`, `listDataSourceNames`, `listSkillsetNames`, `listSynonymMapNames`)
- Removed deprecated versions and removed the V2 suffix. SDK is currently having `EdgeNGramTokenFilter`, `KeywordTokenizer`, `LuceneStandardTokenizer`,
`NGramTokenFilter`, and `PathHierarchyTokenizer`.
- Renamed `Similarity` to `SimilarityAlgorithm`.
- Renamed `Suggester` to `SearchSuggester`.

## 1.0.0-beta.3 (2020-05-05)
- Replaced `isRetrievable` API with `isHidden`, parameter name changed from `retrievable` to `hidden`.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,21 @@

package com.azure.search.documents.implementation.converters;

import com.azure.search.documents.indexes.models.BM25Similarity;
import com.azure.search.documents.indexes.models.BM25SimilarityAlgorithm;

/**
* A converter between {@link com.azure.search.documents.indexes.implementation.models.BM25Similarity} and
* {@link BM25Similarity}.
* {@link BM25SimilarityAlgorithm}.
*/
public final class BM25SimilarityConverter {
/**
* Maps from {@link com.azure.search.documents.indexes.implementation.models.BM25Similarity} to {@link BM25Similarity}.
* Maps from {@link com.azure.search.documents.indexes.implementation.models.BM25Similarity} to {@link BM25SimilarityAlgorithm}.
*/
public static BM25Similarity map(com.azure.search.documents.indexes.implementation.models.BM25Similarity obj) {
public static BM25SimilarityAlgorithm map(com.azure.search.documents.indexes.implementation.models.BM25Similarity obj) {
if (obj == null) {
return null;
}
BM25Similarity bM25Similarity = new BM25Similarity();
BM25SimilarityAlgorithm bM25Similarity = new BM25SimilarityAlgorithm();

Double b = obj.getB();
bM25Similarity.setB(b);
Expand All @@ -28,9 +28,9 @@ public static BM25Similarity map(com.azure.search.documents.indexes.implementati
}

/**
* Maps from {@link BM25Similarity} to {@link com.azure.search.documents.indexes.implementation.models.BM25Similarity}.
* Maps from {@link BM25SimilarityAlgorithm} to {@link com.azure.search.documents.indexes.implementation.models.BM25Similarity}.
*/
public static com.azure.search.documents.indexes.implementation.models.BM25Similarity map(BM25Similarity obj) {
public static com.azure.search.documents.indexes.implementation.models.BM25Similarity map(BM25SimilarityAlgorithm obj) {
if (obj == null) {
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,30 +3,30 @@

package com.azure.search.documents.implementation.converters;

import com.azure.search.documents.indexes.models.ClassicSimilarity;
import com.azure.search.documents.indexes.models.ClassicSimilarityAlgorithm;

/**
* A converter between {@link com.azure.search.documents.indexes.implementation.models.ClassicSimilarity} and
* {@link ClassicSimilarity}.
* {@link ClassicSimilarityAlgorithm}.
*/
public final class ClassicSimilarityConverter {
/**
* Maps from {@link com.azure.search.documents.indexes.implementation.models.ClassicSimilarity} to
* {@link ClassicSimilarity}.
* {@link ClassicSimilarityAlgorithm}.
*/
public static ClassicSimilarity map(com.azure.search.documents.indexes.implementation.models.ClassicSimilarity obj) {
public static ClassicSimilarityAlgorithm map(com.azure.search.documents.indexes.implementation.models.ClassicSimilarity obj) {
if (obj == null) {
return null;
}
ClassicSimilarity classicSimilarity = new ClassicSimilarity();
ClassicSimilarityAlgorithm classicSimilarity = new ClassicSimilarityAlgorithm();
return classicSimilarity;
}

/**
* Maps from {@link ClassicSimilarity} to
* Maps from {@link ClassicSimilarityAlgorithm} to
* {@link com.azure.search.documents.indexes.implementation.models.ClassicSimilarity}.
*/
public static com.azure.search.documents.indexes.implementation.models.ClassicSimilarity map(ClassicSimilarity obj) {
public static com.azure.search.documents.indexes.implementation.models.ClassicSimilarity map(ClassicSimilarityAlgorithm obj) {
if (obj == null) {
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,42 @@ public static EdgeNGramTokenFilter map(com.azure.search.documents.indexes.implem
return edgeNGramTokenFilter;
}

/**
* Maps from {@link com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilterV2} to
* {@link EdgeNGramTokenFilter}.
*/
public static EdgeNGramTokenFilter map(com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilterV2 obj) {
if (obj == null) {
return null;
}
EdgeNGramTokenFilter edgeNGramTokenFilter = new EdgeNGramTokenFilter();

String name = obj.getName();
edgeNGramTokenFilter.setName(name);

Integer maxGram = obj.getMaxGram();
edgeNGramTokenFilter.setMaxGram(maxGram);

if (obj.getSide() != null) {
EdgeNGramTokenFilterSide side = EdgeNGramTokenFilterSideConverter.map(obj.getSide());
edgeNGramTokenFilter.setSide(side);
}

Integer minGram = obj.getMinGram();
edgeNGramTokenFilter.setMinGram(minGram);
return edgeNGramTokenFilter;
}

/**
* Maps from {@link EdgeNGramTokenFilter} to
* {@link com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilter}.
* {@link com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilterV2}.
*/
public static com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilter map(EdgeNGramTokenFilter obj) {
public static com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilterV2 map(EdgeNGramTokenFilter obj) {
if (obj == null) {
return null;
}
com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilter edgeNGramTokenFilter =
new com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilter();
com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilterV2 edgeNGramTokenFilter =
new com.azure.search.documents.indexes.implementation.models.EdgeNGramTokenFilterV2();

String name = obj.getName();
edgeNGramTokenFilter.setName(name);
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,15 @@
import com.azure.search.documents.indexes.models.KeywordTokenizer;

/**
* A converter between {@link com.azure.search.documents.indexes.implementation.models.KeywordTokenizer} and
* A converter between {@link com.azure.search.documents.indexes.implementation.models.KeywordTokenizerV2} and
* {@link KeywordTokenizer}.
*/
public final class KeywordTokenizerConverter {
/**
* Maps from {@link com.azure.search.documents.indexes.implementation.models.KeywordTokenizer} to {@link KeywordTokenizer}.
* Maps from {@link com.azure.search.documents.indexes.implementation.models.KeywordTokenizerV2} to
* {@link KeywordTokenizer}.
*/
public static KeywordTokenizer map(com.azure.search.documents.indexes.implementation.models.KeywordTokenizer obj) {
public static KeywordTokenizer map(com.azure.search.documents.indexes.implementation.models.KeywordTokenizerV2 obj) {
if (obj == null) {
return null;
}
Expand All @@ -22,29 +23,48 @@ public static KeywordTokenizer map(com.azure.search.documents.indexes.implementa
String name = obj.getName();
keywordTokenizer.setName(name);

Integer bufferSize = obj.getBufferSize();
keywordTokenizer.setBufferSize(bufferSize);
Integer maxTokenLength = obj.getMaxTokenLength();
keywordTokenizer.setMaxTokenLength(maxTokenLength);
return keywordTokenizer;
}

/**
* Maps from {@link KeywordTokenizer} to {@link com.azure.search.documents.indexes.implementation.models.KeywordTokenizer}.
* Maps from {@link com.azure.search.documents.indexes.implementation.models.KeywordTokenizer} to
* {@link KeywordTokenizer}.
*/
public static com.azure.search.documents.indexes.implementation.models.KeywordTokenizer map(KeywordTokenizer obj) {
public static KeywordTokenizer map(com.azure.search.documents.indexes.implementation.models.KeywordTokenizer obj) {
if (obj == null) {
return null;
}
com.azure.search.documents.indexes.implementation.models.KeywordTokenizer keywordTokenizer =
new com.azure.search.documents.indexes.implementation.models.KeywordTokenizer();
KeywordTokenizer keywordTokenizer = new KeywordTokenizer();

String name = obj.getName();
keywordTokenizer.setName(name);

Integer bufferSize = obj.getBufferSize();
keywordTokenizer.setBufferSize(bufferSize);
keywordTokenizer.setMaxTokenLength(bufferSize);
return keywordTokenizer;
}

/**
* Maps from {@link KeywordTokenizer} to
* {@link com.azure.search.documents.indexes.implementation.models.KeywordTokenizerV2}.
*/
public static com.azure.search.documents.indexes.implementation.models.KeywordTokenizerV2 map(KeywordTokenizer obj) {
if (obj == null) {
return null;
}
com.azure.search.documents.indexes.implementation.models.KeywordTokenizerV2 keywordTokenizerV2 =
new com.azure.search.documents.indexes.implementation.models.KeywordTokenizerV2();

String name = obj.getName();
keywordTokenizerV2.setName(name);

Integer maxTokenLength = obj.getMaxTokenLength();
keywordTokenizerV2.setMaxTokenLength(maxTokenLength);
return keywordTokenizerV2;
}

private KeywordTokenizerConverter() {
}
}

This file was deleted.

Loading