Skip to content

Commit 7c10174

Browse files
authored
OPENNLP-1285: Changing Math to StrictMath for Java 8 vs 11 test inconsistencies. (#377)
1 parent e4b331d commit 7c10174

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+114
-114
lines changed

opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/CLI.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ private static void usage() {
8080

8181
System.out.print(" " + tool.getName());
8282

83-
for (int i = 0; i < Math.abs(tool.getName().length()
83+
for (int i = 0; i < StrictMath.abs(tool.getName().length()
8484
- numberOfSpaces); i++) {
8585
System.out.print(" ");
8686
}

opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,7 @@ private static void usage() {
203203

204204
System.out.print(" " + tool.getName());
205205

206-
for (int i = 0; i < Math.abs(tool.getName().length() - numberOfSpaces); i++) {
206+
for (int i = 0; i < StrictMath.abs(tool.getName().length() - numberOfSpaces); i++) {
207207
System.out.print(" ");
208208
}
209209

opennlp-tools/src/main/java/opennlp/tools/cmdline/FineGrainedReportListener.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -723,7 +723,7 @@ private void increment(String column) {
723723
*/
724724
public double getAccuracy() {
725725
// we save the accuracy because it is frequently used by the comparator
726-
if (Math.abs(acc - 1.0d) < 0.0000000001) {
726+
if (StrictMath.abs(acc - 1.0d) < 0.0000000001) {
727727
if (total == 0)
728728
acc = 0.0d;
729729
acc = (double) correct / (double) total;

opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,8 @@ public Dictionary(InputStream in) throws IOException {
124124
*/
125125
public void put(StringList tokens) {
126126
entrySet.add(new StringListWrapper(tokens));
127-
minTokenCount = Math.min(minTokenCount, tokens.size());
128-
maxTokenCount = Math.max(maxTokenCount, tokens.size());
127+
minTokenCount = StrictMath.min(minTokenCount, tokens.size());
128+
maxTokenCount = StrictMath.max(maxTokenCount, tokens.size());
129129
}
130130

131131
/**

opennlp-tools/src/main/java/opennlp/tools/languagemodel/NGramLanguageModel.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -62,13 +62,13 @@ public double calculateProbability(StringList tokens) {
6262
if (size() > 0) {
6363
for (StringList ngram : NGramUtils.getNGrams(tokens, n)) {
6464
double score = stupidBackoff(ngram);
65-
probability += Math.log(score);
65+
probability += StrictMath.log(score);
6666
if (Double.isNaN(probability)) {
6767
probability = 0d;
6868
break;
6969
}
7070
}
71-
probability = Math.exp(probability);
71+
probability = StrictMath.exp(probability);
7272
}
7373
return probability;
7474
}
@@ -79,13 +79,13 @@ public double calculateProbability(String... tokens) {
7979
if (size() > 0) {
8080
for (String[] ngram : NGramUtils.getNGrams(tokens, n)) {
8181
double score = stupidBackoff(new StringList(ngram));
82-
probability += Math.log(score);
82+
probability += StrictMath.log(score);
8383
if (Double.isNaN(probability)) {
8484
probability = 0d;
8585
break;
8686
}
8787
}
88-
probability = Math.exp(probability);
88+
probability = StrictMath.exp(probability);
8989
}
9090
return probability;
9191
}

opennlp-tools/src/main/java/opennlp/tools/lemmatizer/DefaultLemmatizerContextGenerator.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -42,15 +42,15 @@ public DefaultLemmatizerContextGenerator() {
4242
protected static String[] getPrefixes(String lex) {
4343
String[] prefs = new String[PREFIX_LENGTH];
4444
for (int li = 1; li < PREFIX_LENGTH; li++) {
45-
prefs[li] = lex.substring(0, Math.min(li + 1, lex.length()));
45+
prefs[li] = lex.substring(0, StrictMath.min(li + 1, lex.length()));
4646
}
4747
return prefs;
4848
}
4949

5050
protected static String[] getSuffixes(String lex) {
5151
String[] suffs = new String[SUFFIX_LENGTH];
5252
for (int li = 1; li < SUFFIX_LENGTH; li++) {
53-
suffs[li] = lex.substring(Math.max(lex.length() - li - 1, 0));
53+
suffs[li] = lex.substring(StrictMath.max(lex.length() - li - 1, 0));
5454
}
5555
return suffs;
5656
}

opennlp-tools/src/main/java/opennlp/tools/ml/ArrayMath.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -43,15 +43,15 @@ public static double innerProduct(double[] vecA, double[] vecB) {
4343
public static double l1norm(double[] v) {
4444
double norm = 0;
4545
for (int i = 0; i < v.length; i++)
46-
norm += Math.abs(v[i]);
46+
norm += StrictMath.abs(v[i]);
4747
return norm;
4848
}
4949

5050
/**
5151
* L2-norm
5252
*/
5353
public static double l2norm(double[] v) {
54-
return Math.sqrt(innerProduct(v, v));
54+
return StrictMath.sqrt(innerProduct(v, v));
5555
}
5656

5757
/**
@@ -73,9 +73,9 @@ public static double logSumOfExps(double[] x) {
7373
double sum = 0.0;
7474
for (int i = 0; i < x.length; i++) {
7575
if (x[i] != Double.NEGATIVE_INFINITY)
76-
sum += Math.exp(x[i] - max);
76+
sum += StrictMath.exp(x[i] - max);
7777
}
78-
return max + Math.log(sum);
78+
return max + StrictMath.log(sum);
7979
}
8080

8181
public static double max(double[] x) {

opennlp-tools/src/main/java/opennlp/tools/ml/BeamSearch.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ public Sequence[] bestSequences(int numSequences, T[] sequence,
9696
}
9797

9898
for (int i = 0; i < sequence.length; i++) {
99-
int sz = Math.min(size, prev.size());
99+
int sz = StrictMath.min(size, prev.size());
100100

101101
for (int sc = 0; prev.size() > 0 && sc < sz; sc++) {
102102
Sequence top = prev.remove();
@@ -115,7 +115,7 @@ public Sequence[] bestSequences(int numSequences, T[] sequence,
115115

116116
Arrays.sort(temp_scores);
117117

118-
double min = temp_scores[Math.max(0,scores.length - size)];
118+
double min = temp_scores[StrictMath.max(0,scores.length - size)];
119119

120120
for (int p = 0; p < scores.length; p++) {
121121
if (scores[p] >= min) {
@@ -149,7 +149,7 @@ public Sequence[] bestSequences(int numSequences, T[] sequence,
149149
next = tmp;
150150
}
151151

152-
int numSeq = Math.min(numSequences, prev.size());
152+
int numSeq = StrictMath.min(numSequences, prev.size());
153153
Sequence[] topSequences = new Sequence[numSeq];
154154

155155
for (int seqIndex = 0; seqIndex < numSeq; seqIndex++) {

opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISModel.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ static double[] eval(Context[] context, float[] values, double[] prior,
188188

189189
double normal = 0.0;
190190
for (int oid = 0; oid < model.getNumOutcomes(); oid++) {
191-
prior[oid] = Math.exp(prior[oid]);
191+
prior[oid] = StrictMath.exp(prior[oid]);
192192
normal += prior[oid];
193193
}
194194

opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java

+5-5
Original file line numberDiff line numberDiff line change
@@ -530,14 +530,14 @@ private double gaussianUpdate(int predicate, int oid, double correctionConstant)
530530
double modelValue = modelExpects[0][predicate].getParameters()[oid];
531531
double observedValue = observedExpects[predicate].getParameters()[oid];
532532
for (int i = 0; i < 50; i++) {
533-
double tmp = modelValue * Math.exp(correctionConstant * x0);
533+
double tmp = modelValue * StrictMath.exp(correctionConstant * x0);
534534
double f = tmp + (param + x0) / sigma - observedValue;
535535
double fp = tmp * correctionConstant + 1 / sigma;
536536
if (fp == 0) {
537537
break;
538538
}
539539
double x = x0 - f / fp;
540-
if (Math.abs(x - x0) < 0.000001) {
540+
if (StrictMath.abs(x - x0) < 0.000001) {
541541
x0 = x;
542542
break;
543543
}
@@ -623,8 +623,8 @@ private double nextIteration(double correctionConstant,
623623
if (model[aoi] == 0) {
624624
System.err.println("Model expects == 0 for " + predLabels[pi] + " " + outcomeLabels[aoi]);
625625
}
626-
//params[pi].updateParameter(aoi,(Math.log(observed[aoi]) - Math.log(model[aoi])));
627-
params[pi].updateParameter(aoi, ((Math.log(observed[aoi]) - Math.log(model[aoi]))
626+
//params[pi].updateParameter(aoi,(StrictMath.log(observed[aoi]) - StrictMath.log(model[aoi])));
627+
params[pi].updateParameter(aoi, ((StrictMath.log(observed[aoi]) - StrictMath.log(model[aoi]))
628628
/ correctionConstant));
629629
}
630630

@@ -695,7 +695,7 @@ public ModelExpectationComputeTask call() {
695695
}
696696
}
697697

698-
loglikelihood += Math.log(modelDistribution[outcomeList[ei]]) * numTimesEventsSeen[ei];
698+
loglikelihood += StrictMath.log(modelDistribution[outcomeList[ei]]) * numTimesEventsSeen[ei];
699699

700700
numEvents += numTimesEventsSeen[ei];
701701
if (printMessages) {

opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihood.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ public double[] gradientAt(double[] x) {
137137
logSumOfExps = ArrayMath.logSumOfExps(expectation);
138138

139139
for (oi = 0; oi < numOutcomes; oi++) {
140-
expectation[oi] = Math.exp(expectation[oi] - logSumOfExps);
140+
expectation[oi] = StrictMath.exp(expectation[oi] - logSumOfExps);
141141
}
142142

143143
for (oi = 0; oi < numOutcomes; oi++) {

opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/ParallelNegLogLikelihood.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,7 @@ public GradientComputeTask call() {
237237
logSumOfExps = ArrayMath.logSumOfExps(expectation);
238238

239239
for (oi = 0; oi < numOutcomes; oi++) {
240-
expectation[oi] = Math.exp(expectation[oi] - logSumOfExps);
240+
expectation[oi] = StrictMath.exp(expectation[oi] - logSumOfExps);
241241
}
242242

243243
for (oi = 0; oi < numOutcomes; oi++) {

opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizer.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
*
3737
* {@literal @}Override
3838
* public double valueAt(double[] x) {
39-
* return Math.pow(x[0]-1, 2) + 10;
39+
* return StrictMath.pow(x[0]-1, 2) + 10;
4040
* }
4141
*
4242
* {@literal @}Override
@@ -279,7 +279,7 @@ else if (iter < 100)
279279
if (l1Cost > 0 && l2Cost > 0) {
280280
double[] x = lsr.getNextPoint();
281281
for (int i = 0; i < dimension; i++) {
282-
x[i] = Math.sqrt(1 + l2Cost) * x[i];
282+
x[i] = StrictMath.sqrt(1 + l2Cost) * x[i];
283283
}
284284
}
285285

@@ -375,7 +375,7 @@ private boolean isConverged(LineSearchResult lsr) {
375375
}
376376

377377
// Check gradient's norm using the criteria: ||g(x)|| / max(1, ||x||) < threshold
378-
double xNorm = Math.max(1, ArrayMath.l2norm(lsr.getNextPoint()));
378+
double xNorm = StrictMath.max(1, ArrayMath.l2norm(lsr.getNextPoint()));
379379
double gradNorm = l1Cost > 0 ?
380380
ArrayMath.l2norm(lsr.getPseudoGradAtNext()) : ArrayMath.l2norm(lsr.getGradAtNext());
381381
if (gradNorm / xNorm < REL_GRAD_NORM_TOL) {

opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNModel.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ private double[] eval(String[] context, float[] values, double[] probs) {
8080

8181
double logSumExp = ArrayMath.logSumOfExps(probs);
8282
for (int oi = 0; oi < outcomeNames.length; oi++) {
83-
probs[oi] = Math.exp(probs[oi] - logSumExp);
83+
probs[oi] = StrictMath.exp(probs[oi] - logSumExp);
8484
}
8585
return probs;
8686
}
@@ -117,7 +117,7 @@ static double[] eval(int[] context, float[] values, double[] probs,
117117
double logSumExp = ArrayMath.logSumOfExps(probs);
118118

119119
for (int oi = 0; oi < nOutcomes; oi++) {
120-
probs[oi] = Math.exp(probs[oi] - logSumExp);
120+
probs[oi] = StrictMath.exp(probs[oi] - logSumExp);
121121
}
122122

123123
return probs;

opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparableEvent.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ public int compareTo(ComparableEvent ce) {
4848
return compareOutcome;
4949
}
5050

51-
int smallerLength = Math.min(predIndexes.length, ce.predIndexes.length);
51+
int smallerLength = StrictMath.min(predIndexes.length, ce.predIndexes.length);
5252

5353
for (int i = 0; i < smallerLength; i++) {
5454
int comparePredIndexes = Integer.compare(predIndexes[i], ce.predIndexes[i]);

opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparablePredicate.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ public ComparablePredicate(String n, int[] ocs, double[] ps) {
3737
}
3838

3939
public int compareTo(ComparablePredicate cp) {
40-
int smallerLength = Math.min(outcomes.length, cp.outcomes.length);
40+
int smallerLength = StrictMath.min(outcomes.length, cp.outcomes.length);
4141

4242
for (int i = 0; i < smallerLength; i++) {
4343
int compareOutcomes = Integer.compare(outcomes[i], cp.outcomes[i]);

opennlp-tools/src/main/java/opennlp/tools/ml/model/UniformPrior.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ public void logPrior(double[] dist, int[] context) {
4444

4545
public void setLabels(String[] outcomeLabels, String[] contextLabels) {
4646
this.numOutcomes = outcomeLabels.length;
47-
r = Math.log(1.0 / numOutcomes);
47+
r = StrictMath.log(1.0 / numOutcomes);
4848
}
4949

5050
@Override

opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbabilities.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ private Map<T, Double> normalize() {
113113
T t = entry.getKey();
114114
Double p = entry.getValue();
115115
if (p != null) {
116-
double temp_p = Math.exp(p - highestLogProbability);
116+
double temp_p = StrictMath.exp(p - highestLogProbability);
117117
if (!Double.isNaN(temp_p)) {
118118
sum += temp_p;
119119
temp.put(t, temp_p);
@@ -133,7 +133,7 @@ private Map<T, Double> normalize() {
133133
}
134134

135135
private double log(double prob) {
136-
return Math.log(prob);
136+
return StrictMath.log(prob);
137137
}
138138

139139
/**
@@ -163,7 +163,7 @@ public Double getLog(T t) {
163163
}
164164

165165
public void discardCountsBelow(double i) {
166-
i = Math.log(i);
166+
i = StrictMath.log(i);
167167
ArrayList<T> labelsToRemove = new ArrayList<>();
168168
for (Entry<T, Double> entry : map.entrySet()) {
169169
final T label = entry.getKey();

opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbability.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ public LogProbability(T label) {
3636
* @param probability the probability to assign
3737
*/
3838
public void set(double probability) {
39-
this.probability = Math.log(probability);
39+
this.probability = StrictMath.log(probability);
4040
}
4141

4242
/**
@@ -55,7 +55,7 @@ public void set(Probability probability) {
5555
* @param probability the probability to assign
5656
*/
5757
public void setIfLarger(double probability) {
58-
double logP = Math.log(probability);
58+
double logP = StrictMath.log(probability);
5959
if (this.probability < logP) {
6060
this.probability = logP;
6161
}
@@ -98,7 +98,7 @@ public void setLog(double probability) {
9898
* @param probability the probability weight to add
9999
*/
100100
public void addIn(double probability) {
101-
setLog(this.probability + Math.log(probability));
101+
setLog(this.probability + StrictMath.log(probability));
102102
}
103103

104104
/**
@@ -107,7 +107,7 @@ public void addIn(double probability) {
107107
* @return the probability associated with the label
108108
*/
109109
public Double get() {
110-
return Math.exp(probability);
110+
return StrictMath.exp(probability);
111111
}
112112

113113
/**

opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probabilities.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ public void setIfLarger(T t, double probability) {
8282
* @param probability the log probability to assign
8383
*/
8484
public void setLog(T t, double probability) {
85-
set(t, Math.exp(probability));
85+
set(t, StrictMath.exp(probability));
8686
}
8787

8888
/**
@@ -97,7 +97,7 @@ public void addIn(T t, double probability, int count) {
9797
Double p = map.get(t);
9898
if (p == null)
9999
p = 1.0;
100-
probability = Math.pow(probability, count);
100+
probability = StrictMath.pow(probability, count);
101101
map.put(t, p * probability);
102102
}
103103

@@ -121,7 +121,7 @@ public Double get(T t) {
121121
* @return the log probability associated with the label
122122
*/
123123
public Double getLog(T t) {
124-
return Math.log(get(t));
124+
return StrictMath.log(get(t));
125125
}
126126

127127
/**

opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probability.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ public boolean isLarger(Probability probability) {
8888
* @param probability the log probability to assign
8989
*/
9090
public void setLog(double probability) {
91-
set(Math.exp(probability));
91+
set(StrictMath.exp(probability));
9292
}
9393

9494
/**
@@ -115,7 +115,7 @@ public Double get() {
115115
* @return the log probability associated with the label
116116
*/
117117
public Double getLog() {
118-
return Math.log(get());
118+
return StrictMath.log(get());
119119
}
120120

121121
/**

0 commit comments

Comments
 (0)