-
Notifications
You must be signed in to change notification settings - Fork 70
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Adding L2 norm technique Signed-off-by: Martin Gaievski <gaievski@amazon.com>
- Loading branch information
1 parent
fe72dbc
commit 6ad641a
Showing
8 changed files
with
559 additions
and
20 deletions.
There are no files selected for viewing
104 changes: 104 additions & 0 deletions
104
.../opensearch/neuralsearch/processor/combination/HarmonicMeanScoreCombinationTechnique.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,104 @@ | ||
/* | ||
* Copyright OpenSearch Contributors | ||
* SPDX-License-Identifier: Apache-2.0 | ||
*/ | ||
|
||
package org.opensearch.neuralsearch.processor.combination; | ||
|
||
import java.util.List; | ||
import java.util.Locale; | ||
import java.util.Map; | ||
import java.util.Objects; | ||
import java.util.Optional; | ||
import java.util.Set; | ||
import java.util.stream.Collectors; | ||
|
||
/** | ||
* Abstracts combination of scores based on arithmetic mean method | ||
*/ | ||
public class HarmonicMeanScoreCombinationTechnique implements ScoreCombinationTechnique { | ||
|
||
public static final String TECHNIQUE_NAME = "arithmetic_mean"; | ||
public static final String PARAM_NAME_WEIGHTS = "weights"; | ||
private static final Set<String> SUPPORTED_PARAMS = Set.of(PARAM_NAME_WEIGHTS); | ||
private static final Float ZERO_SCORE = 0.0f; | ||
private final List<Float> weights; | ||
|
||
public HarmonicMeanScoreCombinationTechnique(final Map<String, Object> params) { | ||
validateParams(params); | ||
weights = getWeights(params); | ||
} | ||
|
||
private List<Float> getWeights(final Map<String, Object> params) { | ||
if (Objects.isNull(params) || params.isEmpty()) { | ||
return List.of(); | ||
} | ||
// get weights, we don't need to check for instance as it's done during validation | ||
return ((List<Double>) params.getOrDefault(PARAM_NAME_WEIGHTS, List.of())).stream() | ||
.map(Double::floatValue) | ||
.collect(Collectors.toUnmodifiableList()); | ||
} | ||
|
||
/** | ||
* Arithmetic mean method for combining scores. | ||
* score = (weight1*score1 + weight2*score2 +...+ weightN*scoreN)/(weight1 + weight2 + ... + weightN) | ||
* | ||
* Zero (0.0) scores are excluded from number of scores N | ||
*/ | ||
@Override | ||
public float combine(final float[] scores) { | ||
float combinedScore = 0.0f; | ||
float weights = 0; | ||
for (int indexOfSubQuery = 0; indexOfSubQuery < scores.length; indexOfSubQuery++) { | ||
float score = scores[indexOfSubQuery]; | ||
if (score >= 0.0) { | ||
float weight = getWeightForSubQuery(indexOfSubQuery); | ||
score = score * weight; | ||
combinedScore += score; | ||
weights += weight; | ||
} | ||
} | ||
if (weights == 0.0f) { | ||
return ZERO_SCORE; | ||
} | ||
return combinedScore / weights; | ||
} | ||
|
||
private void validateParams(final Map<String, Object> params) { | ||
if (Objects.isNull(params) || params.isEmpty()) { | ||
return; | ||
} | ||
// check if only supported params are passed | ||
Optional<String> optionalNotSupportedParam = params.keySet() | ||
.stream() | ||
.filter(paramName -> !SUPPORTED_PARAMS.contains(paramName)) | ||
.findFirst(); | ||
if (optionalNotSupportedParam.isPresent()) { | ||
throw new IllegalArgumentException( | ||
String.format( | ||
Locale.ROOT, | ||
"provided parameter for combination technique is not supported. supported parameters are [%s]", | ||
SUPPORTED_PARAMS.stream().collect(Collectors.joining(",")) | ||
) | ||
); | ||
} | ||
|
||
// check param types | ||
if (params.keySet().stream().anyMatch(PARAM_NAME_WEIGHTS::equalsIgnoreCase)) { | ||
if (!(params.get(PARAM_NAME_WEIGHTS) instanceof List)) { | ||
throw new IllegalArgumentException( | ||
String.format(Locale.ROOT, "parameter [%s] must be a collection of numbers", PARAM_NAME_WEIGHTS) | ||
); | ||
} | ||
} | ||
} | ||
|
||
/** | ||
* Get weight for sub-query based on its index in the hybrid search query. Use user provided weight or 1.0 otherwise | ||
* @param indexOfSubQuery 0-based index of sub-query in the Hybrid Search query | ||
* @return weight for sub-query, use one that is set in processor/pipeline definition or 1.0 as default | ||
*/ | ||
private float getWeightForSubQuery(int indexOfSubQuery) { | ||
return indexOfSubQuery < weights.size() ? weights.get(indexOfSubQuery) : 1.0f; | ||
} | ||
} |
88 changes: 88 additions & 0 deletions
88
...va/org/opensearch/neuralsearch/processor/normalization/L2ScoreNormalizationTechnique.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,88 @@ | ||
/* | ||
* Copyright OpenSearch Contributors | ||
* SPDX-License-Identifier: Apache-2.0 | ||
*/ | ||
|
||
package org.opensearch.neuralsearch.processor.normalization; | ||
|
||
import java.util.ArrayList; | ||
import java.util.List; | ||
import java.util.Objects; | ||
|
||
import org.apache.lucene.search.ScoreDoc; | ||
import org.apache.lucene.search.TopDocs; | ||
import org.opensearch.neuralsearch.search.CompoundTopDocs; | ||
|
||
/** | ||
* Abstracts normalization of scores based on L2 method | ||
*/ | ||
public class L2ScoreNormalizationTechnique implements ScoreNormalizationTechnique { | ||
|
||
public static final String TECHNIQUE_NAME = "l2"; | ||
private static final float MIN_SCORE = 0.001f; | ||
|
||
/** | ||
* L2 normalization method. | ||
* n_score_i = score_i/sqrt(score1^2 + score2^2 + ... + scoren^2) | ||
* Main algorithm steps: | ||
* - calculate sum of squares of all scores | ||
* - iterate over each result and update score as per formula above where "score" is raw score returned by Hybrid query | ||
*/ | ||
@Override | ||
public void normalize(final List<CompoundTopDocs> queryTopDocs) { | ||
// get l2 norms for each sub-query | ||
List<Float> normsPerSubquery = getL2Norm(queryTopDocs); | ||
|
||
// do normalization using actual score and l2 norm | ||
for (CompoundTopDocs compoundQueryTopDocs : queryTopDocs) { | ||
if (Objects.isNull(compoundQueryTopDocs)) { | ||
continue; | ||
} | ||
List<TopDocs> topDocsPerSubQuery = compoundQueryTopDocs.getCompoundTopDocs(); | ||
for (int j = 0; j < topDocsPerSubQuery.size(); j++) { | ||
TopDocs subQueryTopDoc = topDocsPerSubQuery.get(j); | ||
for (ScoreDoc scoreDoc : subQueryTopDoc.scoreDocs) { | ||
scoreDoc.score = normalizeSingleScore(scoreDoc.score, normsPerSubquery.get(j)); | ||
} | ||
} | ||
} | ||
} | ||
|
||
private List<Float> getL2Norm(final List<CompoundTopDocs> queryTopDocs) { | ||
// find any non-empty compound top docs, it's either empty if shard does not have any results for all of sub-queries, | ||
// or it has results for all the sub-queries. In edge case of shard having results only for one sub-query, there will be TopDocs for | ||
// rest of sub-queries with zero total hits | ||
int numOfSubqueries = queryTopDocs.stream() | ||
.filter(Objects::nonNull) | ||
.filter(topDocs -> topDocs.getCompoundTopDocs().size() > 0) | ||
.findAny() | ||
.get() | ||
.getCompoundTopDocs() | ||
.size(); | ||
float[] l2Norms = new float[numOfSubqueries]; | ||
for (CompoundTopDocs compoundQueryTopDocs : queryTopDocs) { | ||
if (Objects.isNull(compoundQueryTopDocs)) { | ||
continue; | ||
} | ||
List<TopDocs> topDocsPerSubQuery = compoundQueryTopDocs.getCompoundTopDocs(); | ||
int bound = topDocsPerSubQuery.size(); | ||
for (int index = 0; index < bound; index++) { | ||
for (ScoreDoc scoreDocs : topDocsPerSubQuery.get(index).scoreDocs) { | ||
l2Norms[index] += scoreDocs.score * scoreDocs.score; | ||
} | ||
} | ||
} | ||
for (int index = 0; index < l2Norms.length; index++) { | ||
l2Norms[index] = (float) Math.sqrt(l2Norms[index]); | ||
} | ||
List<Float> l2NormList = new ArrayList<>(); | ||
for (int index = 0; index < numOfSubqueries; index++) { | ||
l2NormList.add(l2Norms[index]); | ||
} | ||
return l2NormList; | ||
} | ||
|
||
private float normalizeSingleScore(final float score, final float l2Norm) { | ||
return l2Norm == 0 ? MIN_SCORE : score / l2Norm; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.