From a2525a2cdcc8cb02bf75ef0f2c96a79c9b1251d2 Mon Sep 17 00:00:00 2001 From: Bruno Roustant Date: Sun, 26 May 2024 20:14:59 +0200 Subject: [PATCH 01/10] Remove hppc dependency. --- .../apache/lucene/util/hppc/DoubleCursor.java | 35 + .../lucene/util/hppc/FloatArrayList.java | 444 +++++++++ .../apache/lucene/util/hppc/FloatCursor.java | 35 + .../apache/lucene/util/hppc/IntArrayList.java | 10 +- .../lucene/util/hppc/IntDoubleHashMap.java | 851 ++++++++++++++++++ .../lucene/util/hppc/IntFloatHashMap.java | 850 +++++++++++++++++ .../lucene/util/hppc/IntIntHashMap.java | 4 +- .../lucene/util/hppc/IntObjectHashMap.java | 2 +- .../lucene/util/hppc/LongArrayList.java | 10 +- .../lucene/util/hppc/LongFloatHashMap.java | 850 +++++++++++++++++ .../lucene/util/hppc/LongIntHashMap.java | 4 +- .../lucene/util/hppc/LongObjectHashMap.java | 2 +- .../lucene/util/hppc/TestFloatArrayList.java | 460 ++++++++++ .../lucene/util/hppc/TestIntArrayList.java | 5 +- .../util/hppc/TestIntDoubleHashMap.java | 654 ++++++++++++++ .../lucene/util/hppc/TestIntFloatHashMap.java | 654 ++++++++++++++ .../lucene/util/hppc/TestIntIntHashMap.java | 30 +- .../util/hppc/TestIntObjectHashMap.java | 44 +- .../lucene/util/hppc/TestLongArrayList.java | 5 +- .../util/hppc/TestLongFloatHashMap.java | 654 ++++++++++++++ .../lucene/util/hppc/TestLongIntHashMap.java | 30 +- .../util/hppc/TestLongObjectHashMap.java | 44 +- lucene/facet/build.gradle | 1 - lucene/facet/src/java/module-info.java | 2 - .../lucene/facet/LongValueFacetCounts.java | 11 +- .../lucene/facet/StringValueFacetCounts.java | 7 +- .../range/OverlappingLongRangeCounter.java | 11 +- .../facet/taxonomy/FloatTaxonomyFacets.java | 2 +- .../facet/taxonomy/IntTaxonomyFacets.java | 2 +- .../taxonomy/OrdinalMappingLeafReader.java | 2 +- .../lucene/facet/taxonomy/TaxonomyFacets.java | 9 +- .../directory/DirectoryTaxonomyReader.java | 4 +- .../directory/TaxonomyIndexArrays.java | 2 +- lucene/join/build.gradle | 1 - lucene/join/src/java/module-info.java | 2 - ...versifyingNearestChildrenKnnCollector.java | 2 +- .../apache/lucene/search/join/JoinUtil.java | 20 +- .../join/PointInSetIncludingScoreQuery.java | 4 +- lucene/spatial-extras/build.gradle | 2 - .../spatial-extras/src/java/module-info.java | 1 - .../util/CachingDoubleValueSource.java | 2 +- lucene/spatial3d/build.gradle | 2 - lucene/spatial3d/src/java/module-info.java | 2 - .../spatial3d/geom/GeoStandardPath.java | 6 +- .../spatial3d/geom/StandardObjects.java | 2 +- versions.lock | 1 - versions.props | 1 - 47 files changed, 5586 insertions(+), 192 deletions(-) create mode 100644 lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java create mode 100644 lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java create mode 100644 lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java create mode 100644 lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java create mode 100644 lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java create mode 100644 lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java create mode 100644 lucene/core/src/test/org/apache/lucene/util/hppc/TestFloatArrayList.java create mode 100644 lucene/core/src/test/org/apache/lucene/util/hppc/TestIntDoubleHashMap.java create mode 100644 lucene/core/src/test/org/apache/lucene/util/hppc/TestIntFloatHashMap.java create mode 100644 lucene/core/src/test/org/apache/lucene/util/hppc/TestLongFloatHashMap.java diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java b/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java new file mode 100644 index 000000000000..20771b9e4df7 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +/** Forked from HPPC, holding int index and double value */ +public final class DoubleCursor { + /** + * The current value's index in the container this cursor belongs to. The meaning of this index is + * defined by the container (usually it will be an index in the underlying storage buffer). + */ + public int index; + + /** The current value. */ + public double value; + + @Override + public String toString() { + return "[cursor, index: " + index + ", value: " + value + "]"; + } +} diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java new file mode 100644 index 000000000000..35ad322f3102 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java @@ -0,0 +1,444 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; + +import java.util.Arrays; +import java.util.Iterator; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.RamUsageEstimator; + +/** + * An array-backed list of {@code float}. + * + *

Mostly forked and trimmed from com.carrotsearch.hppc.FloatArrayList + * + *

github: https://github.com/carrotsearch/hppc release 0.9.0 + */ +public class FloatArrayList implements Iterable, Cloneable, Accountable { + private static final long BASE_RAM_BYTES_USED = + RamUsageEstimator.shallowSizeOfInstance(FloatArrayList.class); + + /** An immutable empty buffer (array). */ + public static final float[] EMPTY_ARRAY = new float[0]; + + /** + * Internal array for storing the list. The array may be larger than the current size ({@link + * #size()}). + */ + public float[] buffer; + + /** Current number of elements stored in {@link #buffer}. */ + public int elementsCount; + + /** New instance with sane defaults. */ + public FloatArrayList() { + this(DEFAULT_EXPECTED_ELEMENTS); + } + + /** + * New instance with sane defaults. + * + * @param expectedElements The expected number of elements guaranteed not to cause buffer + * expansion (inclusive). + */ + public FloatArrayList(int expectedElements) { + buffer = new float[expectedElements]; + } + + /** Creates a new list from the elements of another list in its iteration order. */ + public FloatArrayList(FloatArrayList list) { + this(list.size()); + addAll(list); + } + + public void add(float e1) { + ensureBufferSpace(1); + buffer[elementsCount++] = e1; + } + + /** Add all elements from a range of given array to the list. */ + public void add(float[] elements, int start, int length) { + assert length >= 0 : "Length must be >= 0"; + + ensureBufferSpace(length); + System.arraycopy(elements, start, buffer, elementsCount, length); + elementsCount += length; + } + + /** + * Vararg-signature method for adding elements at the end of the list. + * + *

This method is handy, but costly if used in tight loops (anonymous array passing) + */ + /* */ + public final void add(float... elements) { + add(elements, 0, elements.length); + } + + /** Adds all elements from another list. */ + public int addAll(FloatArrayList list) { + final int size = list.size(); + ensureBufferSpace(size); + + for (FloatCursor cursor : list) { + add(cursor.value); + } + + return size; + } + + /** Adds all elements from another iterable. */ + public int addAll(Iterable iterable) { + int size = 0; + for (FloatCursor cursor : iterable) { + add(cursor.value); + size++; + } + return size; + } + + public void insert(int index, float e1) { + assert (index >= 0 && index <= size()) + : "Index " + index + " out of bounds [" + 0 + ", " + size() + "]."; + + ensureBufferSpace(1); + System.arraycopy(buffer, index, buffer, index + 1, elementsCount - index); + buffer[index] = e1; + elementsCount++; + } + + public float get(int index) { + assert (index >= 0 && index < size()) + : "Index " + index + " out of bounds [" + 0 + ", " + size() + ")."; + + return buffer[index]; + } + + public float set(int index, float e1) { + assert (index >= 0 && index < size()) + : "Index " + index + " out of bounds [" + 0 + ", " + size() + ")."; + + final float v = buffer[index]; + buffer[index] = e1; + return v; + } + + /** Removes the element at the specified position in this container and returns it. */ + public float removeAt(int index) { + assert (index >= 0 && index < size()) + : "Index " + index + " out of bounds [" + 0 + ", " + size() + ")."; + + final float v = buffer[index]; + System.arraycopy(buffer, index + 1, buffer, index, --elementsCount - index); + return v; + } + + /** Removes and returns the last element of this list. */ + public float removeLast() { + assert !isEmpty() : "List is empty"; + + return buffer[--elementsCount]; + } + + /** + * Removes from this list all the elements with indexes between fromIndex, inclusive, + * and toIndex, exclusive. + */ + public void removeRange(int fromIndex, int toIndex) { + assert (fromIndex >= 0 && fromIndex <= size()) + : "Index " + fromIndex + " out of bounds [" + 0 + ", " + size() + ")."; + assert (toIndex >= 0 && toIndex <= size()) + : "Index " + toIndex + " out of bounds [" + 0 + ", " + size() + "]."; + assert fromIndex <= toIndex : "fromIndex must be <= toIndex: " + fromIndex + ", " + toIndex; + + System.arraycopy(buffer, toIndex, buffer, fromIndex, elementsCount - toIndex); + final int count = toIndex - fromIndex; + elementsCount -= count; + } + + /** + * Removes the first element that equals e, returning whether an element has been + * removed. + */ + public boolean removeElement(float e) { + return removeFirst(e) != -1; + } + + /** + * Removes the first element that equals e1, returning its deleted position or + * -1 if the element was not found. + */ + public int removeFirst(float e1) { + final int index = indexOf(e1); + if (index >= 0) removeAt(index); + return index; + } + + /** + * Removes the last element that equals e1, returning its deleted position or + * -1 if the element was not found. + */ + public int removeLast(float e1) { + final int index = lastIndexOf(e1); + if (index >= 0) removeAt(index); + return index; + } + + /** + * Removes all occurrences of e from this collection. + * + * @param e Element to be removed from this collection, if present. + * @return The number of removed elements as a result of this call. + */ + public int removeAll(float e) { + int to = 0; + for (int from = 0; from < elementsCount; from++) { + if (((e) == (buffer[from]))) { + continue; + } + if (to != from) { + buffer[to] = buffer[from]; + } + to++; + } + final int deleted = elementsCount - to; + this.elementsCount = to; + return deleted; + } + + public boolean contains(float e1) { + return indexOf(e1) >= 0; + } + + public int indexOf(float e1) { + for (int i = 0; i < elementsCount; i++) { + if (((e1) == (buffer[i]))) { + return i; + } + } + + return -1; + } + + public int lastIndexOf(float e1) { + for (int i = elementsCount - 1; i >= 0; i--) { + if (((e1) == (buffer[i]))) { + return i; + } + } + + return -1; + } + + public boolean isEmpty() { + return elementsCount == 0; + } + + /** + * Ensure this container can hold at least the given number of elements without resizing its + * buffers. + * + * @param expectedElements The total number of elements, inclusive. + */ + public void ensureCapacity(int expectedElements) { + if (expectedElements > buffer.length) { + ensureBufferSpace(expectedElements - size()); + } + } + + /** + * Ensures the internal buffer has enough free slots to store expectedAdditions. + * Increases internal buffer size if needed. + */ + protected void ensureBufferSpace(int expectedAdditions) { + if (elementsCount + expectedAdditions > buffer.length) { + this.buffer = ArrayUtil.grow(buffer, elementsCount + expectedAdditions); + } + } + + /** + * Truncate or expand the list to the new size. If the list is truncated, the buffer will not be + * reallocated (use {@link #trimToSize()} if you need a truncated buffer), but the truncated + * values will be reset to the default value (zero). If the list is expanded, the elements beyond + * the current size are initialized with JVM-defaults (zero or null values). + */ + public void resize(int newSize) { + if (newSize <= buffer.length) { + if (newSize < elementsCount) { + Arrays.fill(buffer, newSize, elementsCount, 0L); + } else { + Arrays.fill(buffer, elementsCount, newSize, 0L); + } + } else { + ensureCapacity(newSize); + } + this.elementsCount = newSize; + } + + public int size() { + return elementsCount; + } + + /** Trim the internal buffer to the current size. */ + public void trimToSize() { + if (size() != this.buffer.length) { + this.buffer = toArray(); + } + } + + /** + * Sets the number of stored elements to zero. Releases and initializes the internal storage array + * to default values. To clear the list without cleaning the buffer, simply set the {@link + * #elementsCount} field to zero. + */ + public void clear() { + Arrays.fill(buffer, 0, elementsCount, 0); + this.elementsCount = 0; + } + + /** Sets the number of stored elements to zero and releases the internal storage array. */ + public void release() { + this.buffer = EMPTY_ARRAY; + this.elementsCount = 0; + } + + /** The returned array is sized to match exactly the number of elements of the stack. */ + public float[] toArray() { + + return ArrayUtil.copyOfSubArray(buffer, 0, elementsCount); + } + + /** + * Clone this object. The returned clone will reuse the same hash function and array resizing + * strategy. + */ + @Override + public FloatArrayList clone() { + try { + final FloatArrayList cloned = (FloatArrayList) super.clone(); + cloned.buffer = buffer.clone(); + return cloned; + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } + } + + @Override + public int hashCode() { + int h = 1, max = elementsCount; + for (int i = 0; i < max; i++) { + h = 31 * h + BitMixer.mix(this.buffer[i]); + } + return h; + } + + /** + * Returns true only if the other object is an instance of the same class and with + * the same elements. + */ + @Override + public boolean equals(Object obj) { + return (this == obj) + || (obj != null && getClass() == obj.getClass() && equalElements(getClass().cast(obj))); + } + + /** Compare index-aligned elements against another {@link FloatArrayList}. */ + protected boolean equalElements(FloatArrayList other) { + int max = size(); + if (other.size() != max) { + return false; + } + + for (int i = 0; i < max; i++) { + if (!((get(i)) == (other.get(i)))) { + return false; + } + } + + return true; + } + + /** Convert the contents of this list to a human-friendly string. */ + @Override + public String toString() { + return Arrays.toString(this.toArray()); + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(buffer); + } + + /** Sorts the elements in this list and returns this list. */ + public FloatArrayList sort() { + Arrays.sort(buffer, 0, elementsCount); + return this; + } + + /** Reverses the elements in this list and returns this list. */ + public FloatArrayList reverse() { + for (int i = 0, mid = elementsCount >> 1, j = elementsCount - 1; i < mid; i++, j--) { + float tmp = buffer[i]; + buffer[i] = buffer[j]; + buffer[j] = tmp; + } + return this; + } + + /** An iterator implementation for {@link FloatArrayList#iterator}. */ + static final class ValueIterator extends AbstractIterator { + private final FloatCursor cursor; + + private final float[] buffer; + private final int size; + + public ValueIterator(float[] buffer, int size) { + this.cursor = new FloatCursor(); + this.cursor.index = -1; + this.size = size; + this.buffer = buffer; + } + + @Override + protected FloatCursor fetch() { + if (cursor.index + 1 == size) return done(); + + cursor.value = buffer[++cursor.index]; + return cursor; + } + } + + @Override + public Iterator iterator() { + return new ValueIterator(buffer, size()); + } + + /** + * Create a list from a variable number of arguments or an array of int. The elements + * are copied from the argument to the internal buffer. + */ + /* */ + public static FloatArrayList from(float... elements) { + final FloatArrayList list = new FloatArrayList(elements.length); + list.add(elements); + return list; + } +} diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java new file mode 100644 index 000000000000..0fdd7889b789 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +/** Forked from HPPC, holding int index and float value */ +public final class FloatCursor { + /** + * The current value's index in the container this cursor belongs to. The meaning of this index is + * defined by the container (usually it will be an index in the underlying storage buffer). + */ + public int index; + + /** The current value. */ + public float value; + + @Override + public String toString() { + return "[cursor, index: " + index + ", value: " + value + "]"; + } +} diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java index bc80e36463b0..42e2c9544ac8 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java @@ -40,13 +40,11 @@ public class IntArrayList implements Iterable, Cloneable, Accountable /** An immutable empty buffer (array). */ public static final int[] EMPTY_ARRAY = new int[0]; - ; - /** * Internal array for storing the list. The array may be larger than the current size ({@link * #size()}). */ - public int[] buffer = EMPTY_ARRAY; + public int[] buffer; /** Current number of elements stored in {@link #buffer}. */ public int elementsCount; @@ -262,8 +260,7 @@ public boolean isEmpty() { * @param expectedElements The total number of elements, inclusive. */ public void ensureCapacity(int expectedElements) { - final int bufferLen = (buffer == null ? 0 : buffer.length); - if (expectedElements > bufferLen) { + if (expectedElements > buffer.length) { ensureBufferSpace(expectedElements - size()); } } @@ -273,8 +270,7 @@ public void ensureCapacity(int expectedElements) { * Increases internal buffer size if needed. */ protected void ensureBufferSpace(int expectedAdditions) { - final int bufferLen = (buffer == null ? 0 : buffer.length); - if (elementsCount + expectedAdditions > bufferLen) { + if (elementsCount + expectedAdditions > buffer.length) { this.buffer = ArrayUtil.grow(buffer, elementsCount + expectedAdditions); } } diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java new file mode 100644 index 000000000000..19aac4e7b05e --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java @@ -0,0 +1,851 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; +import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; +import static org.apache.lucene.util.hppc.HashContainers.checkPowerOfTwo; +import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; +import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; +import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; +import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; + +import java.util.Arrays; +import java.util.Iterator; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; + +/** + * A hash map of int to double, implemented using open addressing with + * linear probing for collision resolution. + * + *

Mostly forked and trimmed from com.carrotsearch.hppc.IntDoubleHashMap + * + *

github: https://github.com/carrotsearch/hppc release 0.9.0 + */ +public class IntDoubleHashMap + implements Iterable, Accountable, Cloneable { + + private static final long BASE_RAM_BYTES_USED = + RamUsageEstimator.shallowSizeOfInstance(IntDoubleHashMap.class); + + /** The array holding keys. */ + public int[] keys; + + /** The array holding values. */ + public double[] values; + + /** + * The number of stored keys (assigned key slots), excluding the special "empty" key, if any (use + * {@link #size()} instead). + * + * @see #size() + */ + protected int assigned; + + /** Mask for slot scans in {@link #keys}. */ + protected int mask; + + /** Expand (rehash) {@link #keys} when {@link #assigned} hits this value. */ + protected int resizeAt; + + /** Special treatment for the "empty slot" key marker. */ + protected boolean hasEmptyKey; + + /** The load factor for {@link #keys}. */ + protected double loadFactor; + + /** Seed used to ensure the hash iteration order is different from an iteration to another. */ + protected int iterationSeed; + + /** New instance with sane defaults. */ + public IntDoubleHashMap() { + this(DEFAULT_EXPECTED_ELEMENTS); + } + + /** + * New instance with sane defaults. + * + * @param expectedElements The expected number of elements guaranteed not to cause buffer + * expansion (inclusive). + */ + public IntDoubleHashMap(int expectedElements) { + this(expectedElements, DEFAULT_LOAD_FACTOR); + } + + /** + * New instance with the provided defaults. + * + * @param expectedElements The expected number of elements guaranteed not to cause a rehash + * (inclusive). + * @param loadFactor The load factor for internal buffers. Insane load factors (zero, full + * capacity) are rejected by {@link #verifyLoadFactor(double)}. + */ + public IntDoubleHashMap(int expectedElements, double loadFactor) { + this.loadFactor = verifyLoadFactor(loadFactor); + iterationSeed = ITERATION_SEED.incrementAndGet(); + ensureCapacity(expectedElements); + } + + /** Create a hash map from all key-value pairs of another map. */ + public IntDoubleHashMap(IntDoubleHashMap map) { + this(map.size()); + putAll(map); + } + + public double put(int key, double value) { + assert assigned < mask + 1; + + final int mask = this.mask; + if (((key) == 0)) { + double previousValue = hasEmptyKey ? values[mask + 1] : 0; + hasEmptyKey = true; + values[mask + 1] = value; + return previousValue; + } else { + final int[] keys = this.keys; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + final double previousValue = values[slot]; + values[slot] = value; + return previousValue; + } + slot = (slot + 1) & mask; + } + + if (assigned == resizeAt) { + allocateThenInsertThenRehash(slot, key, value); + } else { + keys[slot] = key; + values[slot] = value; + } + + assigned++; + return 0; + } + } + + public int putAll(Iterable iterable) { + final int count = size(); + for (IntDoubleCursor c : iterable) { + put(c.key, c.value); + } + return size() - count; + } + + /** + * Trove-inspired API method. An equivalent of the + * following code: + * + *

+   * if (!map.containsKey(key)) map.put(value);
+   * 
+ * + * @param key The key of the value to check. + * @param value The value to put if key does not exist. + * @return true if key did not exist and value was placed + * in the map. + */ + public boolean putIfAbsent(int key, double value) { + int keyIndex = indexOf(key); + if (!indexExists(keyIndex)) { + indexInsert(keyIndex, key, value); + return true; + } else { + return false; + } + } + + /** + * If key exists, putValue is inserted into the map, otherwise any + * existing value is incremented by additionValue. + * + * @param key The key of the value to adjust. + * @param putValue The value to put if key does not exist. + * @param incrementValue The value to add to the existing value if key exists. + * @return Returns the current value associated with key (after changes). + */ + public double putOrAdd(int key, double putValue, double incrementValue) { + assert assigned < mask + 1; + + int keyIndex = indexOf(key); + if (indexExists(keyIndex)) { + putValue = values[keyIndex] + incrementValue; + indexReplace(keyIndex, putValue); + } else { + indexInsert(keyIndex, key, putValue); + } + return putValue; + } + + /** + * Adds incrementValue to any existing value for the given key or + * inserts incrementValue if key did not previously exist. + * + * @param key The key of the value to adjust. + * @param incrementValue The value to put or add to the existing value if key exists. + * @return Returns the current value associated with key (after changes). + */ + public double addTo(int key, double incrementValue) { + return putOrAdd(key, incrementValue, incrementValue); + } + + public double remove(int key) { + final int mask = this.mask; + if (((key) == 0)) { + if (!hasEmptyKey) { + return 0; + } + hasEmptyKey = false; + double previousValue = values[mask + 1]; + values[mask + 1] = 0; + return previousValue; + } else { + final int[] keys = this.keys; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + final double previousValue = values[slot]; + shiftConflictingKeys(slot); + return previousValue; + } + slot = (slot + 1) & mask; + } + + return 0; + } + } + + public double get(int key) { + if (((key) == 0)) { + return hasEmptyKey ? values[mask + 1] : 0; + } else { + final int[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return values[slot]; + } + slot = (slot + 1) & mask; + } + + return 0; + } + } + + public double getOrDefault(int key, double defaultValue) { + if (((key) == 0)) { + return hasEmptyKey ? values[mask + 1] : defaultValue; + } else { + final int[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return values[slot]; + } + slot = (slot + 1) & mask; + } + + return defaultValue; + } + } + + public boolean containsKey(int key) { + if (((key) == 0)) { + return hasEmptyKey; + } else { + final int[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return true; + } + slot = (slot + 1) & mask; + } + + return false; + } + } + + public int indexOf(int key) { + final int mask = this.mask; + if (((key) == 0)) { + return hasEmptyKey ? mask + 1 : ~(mask + 1); + } else { + final int[] keys = this.keys; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return slot; + } + slot = (slot + 1) & mask; + } + + return ~slot; + } + } + + public boolean indexExists(int index) { + assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey); + + return index >= 0; + } + + public double indexGet(int index) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + return values[index]; + } + + public double indexReplace(int index, double newValue) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + double previousValue = values[index]; + values[index] = newValue; + return previousValue; + } + + public void indexInsert(int index, int key, double value) { + assert index < 0 : "The index must not point at an existing key."; + + index = ~index; + if (((key) == 0)) { + assert index == mask + 1; + values[index] = value; + hasEmptyKey = true; + } else { + assert ((keys[index]) == 0); + + if (assigned == resizeAt) { + allocateThenInsertThenRehash(index, key, value); + } else { + keys[index] = key; + values[index] = value; + } + + assigned++; + } + } + + public double indexRemove(int index) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + double previousValue = values[index]; + if (index > mask) { + assert index == mask + 1; + hasEmptyKey = false; + values[index] = 0; + } else { + shiftConflictingKeys(index); + } + return previousValue; + } + + public void clear() { + assigned = 0; + hasEmptyKey = false; + + Arrays.fill(keys, 0); + + /* */ + } + + public void release() { + assigned = 0; + hasEmptyKey = false; + + keys = null; + values = null; + ensureCapacity(DEFAULT_EXPECTED_ELEMENTS); + } + + public int size() { + return assigned + (hasEmptyKey ? 1 : 0); + } + + public boolean isEmpty() { + return size() == 0; + } + + @Override + public int hashCode() { + int h = hasEmptyKey ? 0xDEADBEEF : 0; + for (IntDoubleCursor c : this) { + h += BitMixer.mix(c.key) + BitMixer.mix(c.value); + } + return h; + } + + @Override + public boolean equals(Object obj) { + return (this == obj) + || (obj != null && getClass() == obj.getClass() && equalElements(getClass().cast(obj))); + } + + /** Return true if all keys of some other container exist in this container. */ + protected boolean equalElements(IntDoubleHashMap other) { + if (other.size() != size()) { + return false; + } + + for (IntDoubleCursor c : other) { + int key = c.key; + if (!containsKey(key) + || !(Double.doubleToLongBits(c.value) == Double.doubleToLongBits(get(key)))) { + return false; + } + } + + return true; + } + + /** + * Ensure this container can hold at least the given number of keys (entries) without resizing its + * buffers. + * + * @param expectedElements The total number of keys, inclusive. + */ + public void ensureCapacity(int expectedElements) { + if (expectedElements > resizeAt || keys == null) { + final int[] prevKeys = this.keys; + final double[] prevValues = this.values; + allocateBuffers(minBufferSize(expectedElements, loadFactor)); + if (prevKeys != null && !isEmpty()) { + rehash(prevKeys, prevValues); + } + } + } + + /** + * Provides the next iteration seed used to build the iteration starting slot and offset + * increment. This method does not need to be synchronized, what matters is that each thread gets + * a sequence of varying seeds. + */ + protected int nextIterationSeed() { + return iterationSeed = BitMixer.mixPhi(iterationSeed); + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(keys) + RamUsageEstimator.sizeOf(values); + } + + /** An iterator implementation for {@link #iterator}. */ + private final class EntryIterator extends AbstractIterator { + private final IntDoubleCursor cursor; + private final int increment; + private int index; + private int slot; + + public EntryIterator() { + cursor = new IntDoubleCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected IntDoubleCursor fetch() { + final int mask = IntDoubleHashMap.this.mask; + while (index <= mask) { + int existing; + index++; + slot = (slot + increment) & mask; + if (!((existing = keys[slot]) == 0)) { + cursor.index = slot; + cursor.key = existing; + cursor.value = values[slot]; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index; + cursor.key = 0; + cursor.value = values[index++]; + return cursor; + } + + return done(); + } + } + + @Override + public Iterator iterator() { + return new EntryIterator(); + } + + /** Returns a specialized view of the keys of this associated container. */ + public KeysContainer keys() { + return new KeysContainer(); + } + + /** A view of the keys inside this hash map. */ + public final class KeysContainer implements Iterable { + + @Override + public Iterator iterator() { + return new KeysIterator(); + } + + public int size() { + return IntDoubleHashMap.this.size(); + } + + public int[] toArray() { + int[] array = new int[size()]; + int i = 0; + for (IntCursor cursor : this) { + array[i++] = cursor.value; + } + return array; + } + } + + /** An iterator over the set of assigned keys. */ + private final class KeysIterator extends AbstractIterator { + private final IntCursor cursor; + private final int increment; + private int index; + private int slot; + + public KeysIterator() { + cursor = new IntCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected IntCursor fetch() { + final int mask = IntDoubleHashMap.this.mask; + while (index <= mask) { + int existing; + index++; + slot = (slot + increment) & mask; + if (!((existing = keys[slot]) == 0)) { + cursor.index = slot; + cursor.value = existing; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index++; + cursor.value = 0; + return cursor; + } + + return done(); + } + } + + /** + * @return Returns a container with all values stored in this map. + */ + public ValuesContainer values() { + return new ValuesContainer(); + } + + /** A view over the set of values of this map. */ + public final class ValuesContainer implements Iterable { + + @Override + public Iterator iterator() { + return new ValuesIterator(); + } + + public int size() { + return IntDoubleHashMap.this.size(); + } + + public double[] toArray() { + double[] array = new double[size()]; + int i = 0; + for (DoubleCursor cursor : this) { + array[i++] = cursor.value; + } + return array; + } + } + + /** An iterator over the set of assigned values. */ + private final class ValuesIterator extends AbstractIterator { + private final DoubleCursor cursor; + private final int increment; + private int index; + private int slot; + + public ValuesIterator() { + cursor = new DoubleCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected DoubleCursor fetch() { + final int mask = IntDoubleHashMap.this.mask; + while (index <= mask) { + index++; + slot = (slot + increment) & mask; + if (!((keys[slot]) == 0)) { + cursor.index = slot; + cursor.value = values[slot]; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index; + cursor.value = values[index++]; + return cursor; + } + + return done(); + } + } + + @Override + public IntDoubleHashMap clone() { + try { + /* */ + IntDoubleHashMap cloned = (IntDoubleHashMap) super.clone(); + cloned.keys = keys.clone(); + cloned.values = values.clone(); + cloned.hasEmptyKey = hasEmptyKey; + cloned.iterationSeed = ITERATION_SEED.incrementAndGet(); + return cloned; + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } + } + + /** Convert the contents of this map to a human-friendly string. */ + @Override + public String toString() { + final StringBuilder buffer = new StringBuilder(); + buffer.append("["); + + boolean first = true; + for (IntDoubleCursor cursor : this) { + if (!first) { + buffer.append(", "); + } + buffer.append(cursor.key); + buffer.append("=>"); + buffer.append(cursor.value); + first = false; + } + buffer.append("]"); + return buffer.toString(); + } + + /** Creates a hash map from two index-aligned arrays of key-value pairs. */ + public static IntDoubleHashMap from(int[] keys, double[] values) { + if (keys.length != values.length) { + throw new IllegalArgumentException( + "Arrays of keys and values must have an identical length."); + } + + IntDoubleHashMap map = new IntDoubleHashMap(keys.length); + for (int i = 0; i < keys.length; i++) { + map.put(keys[i], values[i]); + } + + return map; + } + + /** + * Returns a hash code for the given key. + * + *

The output from this function should evenly distribute keys across the entire integer range. + */ + protected int hashKey(int key) { + assert !((key) == 0); // Handled as a special case (empty slot marker). + return BitMixer.mixPhi(key); + } + + /** + * Validate load factor range and return it. Override and suppress if you need insane load + * factors. + */ + protected double verifyLoadFactor(double loadFactor) { + checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); + return loadFactor; + } + + /** Rehash from old buffers to new buffers. */ + protected void rehash(int[] fromKeys, double[] fromValues) { + assert fromKeys.length == fromValues.length && checkPowerOfTwo(fromKeys.length - 1); + + // Rehash all stored key/value pairs into the new buffers. + final int[] keys = this.keys; + final double[] values = this.values; + final int mask = this.mask; + int existing; + + // Copy the zero element's slot, then rehash everything else. + int from = fromKeys.length - 1; + keys[keys.length - 1] = fromKeys[from]; + values[values.length - 1] = fromValues[from]; + while (--from >= 0) { + if (!((existing = fromKeys[from]) == 0)) { + int slot = hashKey(existing) & mask; + while (!((keys[slot]) == 0)) { + slot = (slot + 1) & mask; + } + keys[slot] = existing; + values[slot] = fromValues[from]; + } + } + } + + /** + * Allocate new internal buffers. This method attempts to allocate and assign internal buffers + * atomically (either allocations succeed or not). + */ + protected void allocateBuffers(int arraySize) { + assert Integer.bitCount(arraySize) == 1; + + // Ensure no change is done if we hit an OOM. + int[] prevKeys = this.keys; + double[] prevValues = this.values; + try { + int emptyElementSlot = 1; + this.keys = (new int[arraySize + emptyElementSlot]); + this.values = (new double[arraySize + emptyElementSlot]); + } catch (OutOfMemoryError e) { + this.keys = prevKeys; + this.values = prevValues; + throw new BufferAllocationException( + "Not enough memory to allocate buffers for rehashing: %,d -> %,d", + e, this.mask + 1, arraySize); + } + + this.resizeAt = expandAtCount(arraySize, loadFactor); + this.mask = arraySize - 1; + } + + /** + * This method is invoked when there is a new key/ value pair to be inserted into the buffers but + * there is not enough empty slots to do so. + * + *

New buffers are allocated. If this succeeds, we know we can proceed with rehashing so we + * assign the pending element to the previous buffer (possibly violating the invariant of having + * at least one empty slot) and rehash all keys, substituting new buffers at the end. + */ + protected void allocateThenInsertThenRehash(int slot, int pendingKey, double pendingValue) { + assert assigned == resizeAt && ((keys[slot]) == 0) && !((pendingKey) == 0); + + // Try to allocate new buffers first. If we OOM, we leave in a consistent state. + final int[] prevKeys = this.keys; + final double[] prevValues = this.values; + allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); + assert this.keys.length > prevKeys.length; + + // We have succeeded at allocating new data so insert the pending key/value at + // the free slot in the old arrays before rehashing. + prevKeys[slot] = pendingKey; + prevValues[slot] = pendingValue; + + // Rehash old keys, including the pending key. + rehash(prevKeys, prevValues); + } + + /** + * Shift all the slot-conflicting keys and values allocated to (and including) slot. + */ + protected void shiftConflictingKeys(int gapSlot) { + final int[] keys = this.keys; + final double[] values = this.values; + final int mask = this.mask; + + // Perform shifts of conflicting keys to fill in the gap. + int distance = 0; + while (true) { + final int slot = (gapSlot + (++distance)) & mask; + final int existing = keys[slot]; + if (((existing) == 0)) { + break; + } + + final int idealSlot = hashKey(existing); + final int shift = (slot - idealSlot) & mask; + if (shift >= distance) { + // Entry at this position was originally at or before the gap slot. + // Move the conflict-shifted entry to the gap's position and repeat the procedure + // for any entries to the right of the current position, treating it + // as the new gap. + keys[gapSlot] = existing; + values[gapSlot] = values[slot]; + gapSlot = slot; + distance = 0; + } + } + + // Mark the last found gap slot without a conflict as empty. + keys[gapSlot] = 0; + values[gapSlot] = 0; + assigned--; + } + + /** Forked from HPPC, holding int index,key and value */ + public static final class IntDoubleCursor { + /** + * The current key and value's index in the container this cursor beints to. The meaning of this + * index is defined by the container (usually it will be an index in the underlying storage + * buffer). + */ + public int index; + + /** The current key. */ + public int key; + + /** The current value. */ + public double value; + + @Override + public String toString() { + return "[cursor, index: " + index + ", key: " + key + ", value: " + value + "]"; + } + } +} diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java new file mode 100644 index 000000000000..06224fb4a8a3 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java @@ -0,0 +1,850 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; +import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; +import static org.apache.lucene.util.hppc.HashContainers.checkPowerOfTwo; +import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; +import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; +import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; +import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; + +import java.util.Arrays; +import java.util.Iterator; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; + +/** + * A hash map of int to float, implemented using open addressing with + * linear probing for collision resolution. + * + *

Mostly forked and trimmed from com.carrotsearch.hppc.IntFloatHashMap + * + *

github: https://github.com/carrotsearch/hppc release 0.9.0 + */ +public class IntFloatHashMap + implements Iterable, Accountable, Cloneable { + + private static final long BASE_RAM_BYTES_USED = + RamUsageEstimator.shallowSizeOfInstance(IntFloatHashMap.class); + + /** The array holding keys. */ + public int[] keys; + + /** The array holding values. */ + public float[] values; + + /** + * The number of stored keys (assigned key slots), excluding the special "empty" key, if any (use + * {@link #size()} instead). + * + * @see #size() + */ + protected int assigned; + + /** Mask for slot scans in {@link #keys}. */ + protected int mask; + + /** Expand (rehash) {@link #keys} when {@link #assigned} hits this value. */ + protected int resizeAt; + + /** Special treatment for the "empty slot" key marker. */ + protected boolean hasEmptyKey; + + /** The load factor for {@link #keys}. */ + protected double loadFactor; + + /** Seed used to ensure the hash iteration order is different from an iteration to another. */ + protected int iterationSeed; + + /** New instance with sane defaults. */ + public IntFloatHashMap() { + this(DEFAULT_EXPECTED_ELEMENTS); + } + + /** + * New instance with sane defaults. + * + * @param expectedElements The expected number of elements guaranteed not to cause buffer + * expansion (inclusive). + */ + public IntFloatHashMap(int expectedElements) { + this(expectedElements, DEFAULT_LOAD_FACTOR); + } + + /** + * New instance with the provided defaults. + * + * @param expectedElements The expected number of elements guaranteed not to cause a rehash + * (inclusive). + * @param loadFactor The load factor for internal buffers. Insane load factors (zero, full + * capacity) are rejected by {@link #verifyLoadFactor(double)}. + */ + public IntFloatHashMap(int expectedElements, double loadFactor) { + this.loadFactor = verifyLoadFactor(loadFactor); + iterationSeed = ITERATION_SEED.incrementAndGet(); + ensureCapacity(expectedElements); + } + + /** Create a hash map from all key-value pairs of another map. */ + public IntFloatHashMap(IntFloatHashMap map) { + this(map.size()); + putAll(map); + } + + public float put(int key, float value) { + assert assigned < mask + 1; + + final int mask = this.mask; + if (((key) == 0)) { + float previousValue = hasEmptyKey ? values[mask + 1] : 0; + hasEmptyKey = true; + values[mask + 1] = value; + return previousValue; + } else { + final int[] keys = this.keys; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + final float previousValue = values[slot]; + values[slot] = value; + return previousValue; + } + slot = (slot + 1) & mask; + } + + if (assigned == resizeAt) { + allocateThenInsertThenRehash(slot, key, value); + } else { + keys[slot] = key; + values[slot] = value; + } + + assigned++; + return 0; + } + } + + public int putAll(Iterable iterable) { + final int count = size(); + for (IntFloatCursor c : iterable) { + put(c.key, c.value); + } + return size() - count; + } + + /** + * Trove-inspired API method. An equivalent of the + * following code: + * + *

+   * if (!map.containsKey(key)) map.put(value);
+   * 
+ * + * @param key The key of the value to check. + * @param value The value to put if key does not exist. + * @return true if key did not exist and value was placed + * in the map. + */ + public boolean putIfAbsent(int key, float value) { + int keyIndex = indexOf(key); + if (!indexExists(keyIndex)) { + indexInsert(keyIndex, key, value); + return true; + } else { + return false; + } + } + + /** + * If key exists, putValue is inserted into the map, otherwise any + * existing value is incremented by additionValue. + * + * @param key The key of the value to adjust. + * @param putValue The value to put if key does not exist. + * @param incrementValue The value to add to the existing value if key exists. + * @return Returns the current value associated with key (after changes). + */ + public float putOrAdd(int key, float putValue, float incrementValue) { + assert assigned < mask + 1; + + int keyIndex = indexOf(key); + if (indexExists(keyIndex)) { + putValue = values[keyIndex] + incrementValue; + indexReplace(keyIndex, putValue); + } else { + indexInsert(keyIndex, key, putValue); + } + return putValue; + } + + /** + * Adds incrementValue to any existing value for the given key or + * inserts incrementValue if key did not previously exist. + * + * @param key The key of the value to adjust. + * @param incrementValue The value to put or add to the existing value if key exists. + * @return Returns the current value associated with key (after changes). + */ + public float addTo(int key, float incrementValue) { + return putOrAdd(key, incrementValue, incrementValue); + } + + public float remove(int key) { + final int mask = this.mask; + if (((key) == 0)) { + if (!hasEmptyKey) { + return 0; + } + hasEmptyKey = false; + float previousValue = values[mask + 1]; + values[mask + 1] = 0; + return previousValue; + } else { + final int[] keys = this.keys; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + final float previousValue = values[slot]; + shiftConflictingKeys(slot); + return previousValue; + } + slot = (slot + 1) & mask; + } + + return 0; + } + } + + public float get(int key) { + if (((key) == 0)) { + return hasEmptyKey ? values[mask + 1] : 0; + } else { + final int[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return values[slot]; + } + slot = (slot + 1) & mask; + } + + return 0; + } + } + + public float getOrDefault(int key, float defaultValue) { + if (((key) == 0)) { + return hasEmptyKey ? values[mask + 1] : defaultValue; + } else { + final int[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return values[slot]; + } + slot = (slot + 1) & mask; + } + + return defaultValue; + } + } + + public boolean containsKey(int key) { + if (((key) == 0)) { + return hasEmptyKey; + } else { + final int[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return true; + } + slot = (slot + 1) & mask; + } + + return false; + } + } + + public int indexOf(int key) { + final int mask = this.mask; + if (((key) == 0)) { + return hasEmptyKey ? mask + 1 : ~(mask + 1); + } else { + final int[] keys = this.keys; + int slot = hashKey(key) & mask; + + int existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return slot; + } + slot = (slot + 1) & mask; + } + + return ~slot; + } + } + + public boolean indexExists(int index) { + assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey); + + return index >= 0; + } + + public float indexGet(int index) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + return values[index]; + } + + public float indexReplace(int index, float newValue) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + float previousValue = values[index]; + values[index] = newValue; + return previousValue; + } + + public void indexInsert(int index, int key, float value) { + assert index < 0 : "The index must not point at an existing key."; + + index = ~index; + if (((key) == 0)) { + assert index == mask + 1; + values[index] = value; + hasEmptyKey = true; + } else { + assert ((keys[index]) == 0); + + if (assigned == resizeAt) { + allocateThenInsertThenRehash(index, key, value); + } else { + keys[index] = key; + values[index] = value; + } + + assigned++; + } + } + + public float indexRemove(int index) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + float previousValue = values[index]; + if (index > mask) { + assert index == mask + 1; + hasEmptyKey = false; + values[index] = 0; + } else { + shiftConflictingKeys(index); + } + return previousValue; + } + + public void clear() { + assigned = 0; + hasEmptyKey = false; + + Arrays.fill(keys, 0); + + /* */ + } + + public void release() { + assigned = 0; + hasEmptyKey = false; + + keys = null; + values = null; + ensureCapacity(DEFAULT_EXPECTED_ELEMENTS); + } + + public int size() { + return assigned + (hasEmptyKey ? 1 : 0); + } + + public boolean isEmpty() { + return size() == 0; + } + + @Override + public int hashCode() { + int h = hasEmptyKey ? 0xDEADBEEF : 0; + for (IntFloatCursor c : this) { + h += BitMixer.mix(c.key) + BitMixer.mix(c.value); + } + return h; + } + + @Override + public boolean equals(Object obj) { + return (this == obj) + || (obj != null && getClass() == obj.getClass() && equalElements(getClass().cast(obj))); + } + + /** Return true if all keys of some other container exist in this container. */ + protected boolean equalElements(IntFloatHashMap other) { + if (other.size() != size()) { + return false; + } + + for (IntFloatCursor c : other) { + int key = c.key; + if (!containsKey(key) || !(Float.floatToIntBits(c.value) == Float.floatToIntBits(get(key)))) { + return false; + } + } + + return true; + } + + /** + * Ensure this container can hold at least the given number of keys (entries) without resizing its + * buffers. + * + * @param expectedElements The total number of keys, inclusive. + */ + public void ensureCapacity(int expectedElements) { + if (expectedElements > resizeAt || keys == null) { + final int[] prevKeys = this.keys; + final float[] prevValues = this.values; + allocateBuffers(minBufferSize(expectedElements, loadFactor)); + if (prevKeys != null && !isEmpty()) { + rehash(prevKeys, prevValues); + } + } + } + + /** + * Provides the next iteration seed used to build the iteration starting slot and offset + * increment. This method does not need to be synchronized, what matters is that each thread gets + * a sequence of varying seeds. + */ + protected int nextIterationSeed() { + return iterationSeed = BitMixer.mixPhi(iterationSeed); + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(keys) + RamUsageEstimator.sizeOf(values); + } + + /** An iterator implementation for {@link #iterator}. */ + private final class EntryIterator extends AbstractIterator { + private final IntFloatCursor cursor; + private final int increment; + private int index; + private int slot; + + public EntryIterator() { + cursor = new IntFloatCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected IntFloatCursor fetch() { + final int mask = IntFloatHashMap.this.mask; + while (index <= mask) { + int existing; + index++; + slot = (slot + increment) & mask; + if (!((existing = keys[slot]) == 0)) { + cursor.index = slot; + cursor.key = existing; + cursor.value = values[slot]; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index; + cursor.key = 0; + cursor.value = values[index++]; + return cursor; + } + + return done(); + } + } + + @Override + public Iterator iterator() { + return new EntryIterator(); + } + + /** Returns a specialized view of the keys of this associated container. */ + public KeysContainer keys() { + return new KeysContainer(); + } + + /** A view of the keys inside this hash map. */ + public final class KeysContainer implements Iterable { + + @Override + public Iterator iterator() { + return new KeysIterator(); + } + + public int size() { + return IntFloatHashMap.this.size(); + } + + public int[] toArray() { + int[] array = new int[size()]; + int i = 0; + for (IntCursor cursor : this) { + array[i++] = cursor.value; + } + return array; + } + } + + /** An iterator over the set of assigned keys. */ + private final class KeysIterator extends AbstractIterator { + private final IntCursor cursor; + private final int increment; + private int index; + private int slot; + + public KeysIterator() { + cursor = new IntCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected IntCursor fetch() { + final int mask = IntFloatHashMap.this.mask; + while (index <= mask) { + int existing; + index++; + slot = (slot + increment) & mask; + if (!((existing = keys[slot]) == 0)) { + cursor.index = slot; + cursor.value = existing; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index++; + cursor.value = 0; + return cursor; + } + + return done(); + } + } + + /** + * @return Returns a container with all values stored in this map. + */ + public ValuesContainer values() { + return new ValuesContainer(); + } + + /** A view over the set of values of this map. */ + public final class ValuesContainer implements Iterable { + + @Override + public Iterator iterator() { + return new ValuesIterator(); + } + + public int size() { + return IntFloatHashMap.this.size(); + } + + public float[] toArray() { + float[] array = new float[size()]; + int i = 0; + for (FloatCursor cursor : this) { + array[i++] = cursor.value; + } + return array; + } + } + + /** An iterator over the set of assigned values. */ + private final class ValuesIterator extends AbstractIterator { + private final FloatCursor cursor; + private final int increment; + private int index; + private int slot; + + public ValuesIterator() { + cursor = new FloatCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected FloatCursor fetch() { + final int mask = IntFloatHashMap.this.mask; + while (index <= mask) { + index++; + slot = (slot + increment) & mask; + if (!((keys[slot]) == 0)) { + cursor.index = slot; + cursor.value = values[slot]; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index; + cursor.value = values[index++]; + return cursor; + } + + return done(); + } + } + + @Override + public IntFloatHashMap clone() { + try { + /* */ + IntFloatHashMap cloned = (IntFloatHashMap) super.clone(); + cloned.keys = keys.clone(); + cloned.values = values.clone(); + cloned.hasEmptyKey = hasEmptyKey; + cloned.iterationSeed = ITERATION_SEED.incrementAndGet(); + return cloned; + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } + } + + /** Convert the contents of this map to a human-friendly string. */ + @Override + public String toString() { + final StringBuilder buffer = new StringBuilder(); + buffer.append("["); + + boolean first = true; + for (IntFloatCursor cursor : this) { + if (!first) { + buffer.append(", "); + } + buffer.append(cursor.key); + buffer.append("=>"); + buffer.append(cursor.value); + first = false; + } + buffer.append("]"); + return buffer.toString(); + } + + /** Creates a hash map from two index-aligned arrays of key-value pairs. */ + public static IntFloatHashMap from(int[] keys, float[] values) { + if (keys.length != values.length) { + throw new IllegalArgumentException( + "Arrays of keys and values must have an identical length."); + } + + IntFloatHashMap map = new IntFloatHashMap(keys.length); + for (int i = 0; i < keys.length; i++) { + map.put(keys[i], values[i]); + } + + return map; + } + + /** + * Returns a hash code for the given key. + * + *

The output from this function should evenly distribute keys across the entire integer range. + */ + protected int hashKey(int key) { + assert !((key) == 0); // Handled as a special case (empty slot marker). + return BitMixer.mixPhi(key); + } + + /** + * Validate load factor range and return it. Override and suppress if you need insane load + * factors. + */ + protected double verifyLoadFactor(double loadFactor) { + checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); + return loadFactor; + } + + /** Rehash from old buffers to new buffers. */ + protected void rehash(int[] fromKeys, float[] fromValues) { + assert fromKeys.length == fromValues.length && checkPowerOfTwo(fromKeys.length - 1); + + // Rehash all stored key/value pairs into the new buffers. + final int[] keys = this.keys; + final float[] values = this.values; + final int mask = this.mask; + int existing; + + // Copy the zero element's slot, then rehash everything else. + int from = fromKeys.length - 1; + keys[keys.length - 1] = fromKeys[from]; + values[values.length - 1] = fromValues[from]; + while (--from >= 0) { + if (!((existing = fromKeys[from]) == 0)) { + int slot = hashKey(existing) & mask; + while (!((keys[slot]) == 0)) { + slot = (slot + 1) & mask; + } + keys[slot] = existing; + values[slot] = fromValues[from]; + } + } + } + + /** + * Allocate new internal buffers. This method attempts to allocate and assign internal buffers + * atomically (either allocations succeed or not). + */ + protected void allocateBuffers(int arraySize) { + assert Integer.bitCount(arraySize) == 1; + + // Ensure no change is done if we hit an OOM. + int[] prevKeys = this.keys; + float[] prevValues = this.values; + try { + int emptyElementSlot = 1; + this.keys = (new int[arraySize + emptyElementSlot]); + this.values = (new float[arraySize + emptyElementSlot]); + } catch (OutOfMemoryError e) { + this.keys = prevKeys; + this.values = prevValues; + throw new BufferAllocationException( + "Not enough memory to allocate buffers for rehashing: %,d -> %,d", + e, this.mask + 1, arraySize); + } + + this.resizeAt = expandAtCount(arraySize, loadFactor); + this.mask = arraySize - 1; + } + + /** + * This method is invoked when there is a new key/ value pair to be inserted into the buffers but + * there is not enough empty slots to do so. + * + *

New buffers are allocated. If this succeeds, we know we can proceed with rehashing so we + * assign the pending element to the previous buffer (possibly violating the invariant of having + * at least one empty slot) and rehash all keys, substituting new buffers at the end. + */ + protected void allocateThenInsertThenRehash(int slot, int pendingKey, float pendingValue) { + assert assigned == resizeAt && ((keys[slot]) == 0) && !((pendingKey) == 0); + + // Try to allocate new buffers first. If we OOM, we leave in a consistent state. + final int[] prevKeys = this.keys; + final float[] prevValues = this.values; + allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); + assert this.keys.length > prevKeys.length; + + // We have succeeded at allocating new data so insert the pending key/value at + // the free slot in the old arrays before rehashing. + prevKeys[slot] = pendingKey; + prevValues[slot] = pendingValue; + + // Rehash old keys, including the pending key. + rehash(prevKeys, prevValues); + } + + /** + * Shift all the slot-conflicting keys and values allocated to (and including) slot. + */ + protected void shiftConflictingKeys(int gapSlot) { + final int[] keys = this.keys; + final float[] values = this.values; + final int mask = this.mask; + + // Perform shifts of conflicting keys to fill in the gap. + int distance = 0; + while (true) { + final int slot = (gapSlot + (++distance)) & mask; + final int existing = keys[slot]; + if (((existing) == 0)) { + break; + } + + final int idealSlot = hashKey(existing); + final int shift = (slot - idealSlot) & mask; + if (shift >= distance) { + // Entry at this position was originally at or before the gap slot. + // Move the conflict-shifted entry to the gap's position and repeat the procedure + // for any entries to the right of the current position, treating it + // as the new gap. + keys[gapSlot] = existing; + values[gapSlot] = values[slot]; + gapSlot = slot; + distance = 0; + } + } + + // Mark the last found gap slot without a conflict as empty. + keys[gapSlot] = 0; + values[gapSlot] = 0; + assigned--; + } + + /** Forked from HPPC, holding int index,key and value */ + public static final class IntFloatCursor { + /** + * The current key and value's index in the container this cursor beints to. The meaning of this + * index is defined by the container (usually it will be an index in the underlying storage + * buffer). + */ + public int index; + + /** The current key. */ + public int key; + + /** The current value. */ + public float value; + + @Override + public String toString() { + return "[cursor, index: " + index + ", key: " + key + ", value: " + value + "]"; + } + } +} diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java index bb9488ce9d54..b33024faf965 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java @@ -310,7 +310,7 @@ public int indexOf(int key) { } public boolean indexExists(int index) { - assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); + assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey); return index >= 0; } @@ -806,7 +806,7 @@ protected void shiftConflictingKeys(int gapSlot) { } /** Forked from HPPC, holding int index,key and value */ - public final class IntIntCursor { + public static final class IntIntCursor { /** * The current key and value's index in the container this cursor belongs to. The meaning of * this index is defined by the container (usually it will be an index in the underlying storage diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java index b493312115f1..ec4a99291dd1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java @@ -278,7 +278,7 @@ public int indexOf(int key) { } public boolean indexExists(int index) { - assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); + assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey); return index >= 0; } diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java index a3503944bdd9..a8aa680443e0 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java @@ -40,13 +40,11 @@ public class LongArrayList implements Iterable, Cloneable, Accountab /** An immutable empty buffer (array). */ public static final long[] EMPTY_ARRAY = new long[0]; - ; - /** * Internal array for storing the list. The array may be larger than the current size ({@link * #size()}). */ - public long[] buffer = EMPTY_ARRAY; + public long[] buffer; /** Current number of elements stored in {@link #buffer}. */ public int elementsCount; @@ -262,8 +260,7 @@ public boolean isEmpty() { * @param expectedElements The total number of elements, inclusive. */ public void ensureCapacity(int expectedElements) { - final int bufferLen = (buffer == null ? 0 : buffer.length); - if (expectedElements > bufferLen) { + if (expectedElements > buffer.length) { ensureBufferSpace(expectedElements - size()); } } @@ -273,8 +270,7 @@ public void ensureCapacity(int expectedElements) { * Increases internal buffer size if needed. */ protected void ensureBufferSpace(int expectedAdditions) { - final int bufferLen = (buffer == null ? 0 : buffer.length); - if (elementsCount + expectedAdditions > bufferLen) { + if (elementsCount + expectedAdditions > buffer.length) { this.buffer = ArrayUtil.grow(buffer, elementsCount + expectedAdditions); } } diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java new file mode 100644 index 000000000000..e86fd89f4844 --- /dev/null +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java @@ -0,0 +1,850 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; +import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; +import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; +import static org.apache.lucene.util.hppc.HashContainers.checkPowerOfTwo; +import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; +import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; +import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; +import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; + +import java.util.Arrays; +import java.util.Iterator; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; + +/** + * A hash map of long to float, implemented using open addressing with + * linear probing for collision resolution. + * + *

Mostly forked and trimmed from com.carrotsearch.hppc.LongFloatHashMap + * + *

github: https://github.com/carrotsearch/hppc release 0.9.0 + */ +public class LongFloatHashMap + implements Iterable, Accountable, Cloneable { + + private static final long BASE_RAM_BYTES_USED = + RamUsageEstimator.shallowSizeOfInstance(LongFloatHashMap.class); + + /** The array holding keys. */ + public long[] keys; + + /** The array holding values. */ + public float[] values; + + /** + * The number of stored keys (assigned key slots), excluding the special "empty" key, if any (use + * {@link #size()} instead). + * + * @see #size() + */ + protected int assigned; + + /** Mask for slot scans in {@link #keys}. */ + protected int mask; + + /** Expand (rehash) {@link #keys} when {@link #assigned} hits this value. */ + protected int resizeAt; + + /** Special treatment for the "empty slot" key marker. */ + protected boolean hasEmptyKey; + + /** The load factor for {@link #keys}. */ + protected double loadFactor; + + /** Seed used to ensure the hash iteration order is different from an iteration to another. */ + protected int iterationSeed; + + /** New instance with sane defaults. */ + public LongFloatHashMap() { + this(DEFAULT_EXPECTED_ELEMENTS); + } + + /** + * New instance with sane defaults. + * + * @param expectedElements The expected number of elements guaranteed not to cause buffer + * expansion (inclusive). + */ + public LongFloatHashMap(int expectedElements) { + this(expectedElements, DEFAULT_LOAD_FACTOR); + } + + /** + * New instance with the provided defaults. + * + * @param expectedElements The expected number of elements guaranteed not to cause a rehash + * (inclusive). + * @param loadFactor The load factor for internal buffers. Insane load factors (zero, full + * capacity) are rejected by {@link #verifyLoadFactor(double)}. + */ + public LongFloatHashMap(int expectedElements, double loadFactor) { + this.loadFactor = verifyLoadFactor(loadFactor); + iterationSeed = ITERATION_SEED.incrementAndGet(); + ensureCapacity(expectedElements); + } + + /** Create a hash map from all key-value pairs of another map. */ + public LongFloatHashMap(LongFloatHashMap map) { + this(map.size()); + putAll(map); + } + + public float put(long key, float value) { + assert assigned < mask + 1; + + final int mask = this.mask; + if (((key) == 0)) { + float previousValue = hasEmptyKey ? values[mask + 1] : 0; + hasEmptyKey = true; + values[mask + 1] = value; + return previousValue; + } else { + final long[] keys = this.keys; + int slot = hashKey(key) & mask; + + long existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + final float previousValue = values[slot]; + values[slot] = value; + return previousValue; + } + slot = (slot + 1) & mask; + } + + if (assigned == resizeAt) { + allocateThenInsertThenRehash(slot, key, value); + } else { + keys[slot] = key; + values[slot] = value; + } + + assigned++; + return 0; + } + } + + public int putAll(Iterable iterable) { + final int count = size(); + for (LongFloatCursor c : iterable) { + put(c.key, c.value); + } + return size() - count; + } + + /** + * Trove-inspired API method. An equivalent of the + * following code: + * + *

+   * if (!map.containsKey(key)) map.put(value);
+   * 
+ * + * @param key The key of the value to check. + * @param value The value to put if key does not exist. + * @return true if key did not exist and value was placed + * in the map. + */ + public boolean putIfAbsent(long key, float value) { + int keyIndex = indexOf(key); + if (!indexExists(keyIndex)) { + indexInsert(keyIndex, key, value); + return true; + } else { + return false; + } + } + + /** + * If key exists, putValue is inserted into the map, otherwise any + * existing value is incremented by additionValue. + * + * @param key The key of the value to adjust. + * @param putValue The value to put if key does not exist. + * @param incrementValue The value to add to the existing value if key exists. + * @return Returns the current value associated with key (after changes). + */ + public float putOrAdd(long key, float putValue, float incrementValue) { + assert assigned < mask + 1; + + int keyIndex = indexOf(key); + if (indexExists(keyIndex)) { + putValue = values[keyIndex] + incrementValue; + indexReplace(keyIndex, putValue); + } else { + indexInsert(keyIndex, key, putValue); + } + return putValue; + } + + /** + * Adds incrementValue to any existing value for the given key or + * inserts incrementValue if key did not previously exist. + * + * @param key The key of the value to adjust. + * @param incrementValue The value to put or add to the existing value if key exists. + * @return Returns the current value associated with key (after changes). + */ + public float addTo(long key, float incrementValue) { + return putOrAdd(key, incrementValue, incrementValue); + } + + public float remove(long key) { + final int mask = this.mask; + if (((key) == 0)) { + if (!hasEmptyKey) { + return 0; + } + hasEmptyKey = false; + float previousValue = values[mask + 1]; + values[mask + 1] = 0; + return previousValue; + } else { + final long[] keys = this.keys; + int slot = hashKey(key) & mask; + + long existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + final float previousValue = values[slot]; + shiftConflictingKeys(slot); + return previousValue; + } + slot = (slot + 1) & mask; + } + + return 0; + } + } + + public float get(long key) { + if (((key) == 0)) { + return hasEmptyKey ? values[mask + 1] : 0; + } else { + final long[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + long existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return values[slot]; + } + slot = (slot + 1) & mask; + } + + return 0; + } + } + + public float getOrDefault(long key, float defaultValue) { + if (((key) == 0)) { + return hasEmptyKey ? values[mask + 1] : defaultValue; + } else { + final long[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + long existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return values[slot]; + } + slot = (slot + 1) & mask; + } + + return defaultValue; + } + } + + public boolean containsKey(long key) { + if (((key) == 0)) { + return hasEmptyKey; + } else { + final long[] keys = this.keys; + final int mask = this.mask; + int slot = hashKey(key) & mask; + + long existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return true; + } + slot = (slot + 1) & mask; + } + + return false; + } + } + + public int indexOf(long key) { + final int mask = this.mask; + if (((key) == 0)) { + return hasEmptyKey ? mask + 1 : ~(mask + 1); + } else { + final long[] keys = this.keys; + int slot = hashKey(key) & mask; + + long existing; + while (!((existing = keys[slot]) == 0)) { + if (((existing) == (key))) { + return slot; + } + slot = (slot + 1) & mask; + } + + return ~slot; + } + } + + public boolean indexExists(int index) { + assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey); + + return index >= 0; + } + + public float indexGet(int index) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + return values[index]; + } + + public float indexReplace(int index, float newValue) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + float previousValue = values[index]; + values[index] = newValue; + return previousValue; + } + + public void indexInsert(int index, long key, float value) { + assert index < 0 : "The index must not point at an existing key."; + + index = ~index; + if (((key) == 0)) { + assert index == mask + 1; + values[index] = value; + hasEmptyKey = true; + } else { + assert ((keys[index]) == 0); + + if (assigned == resizeAt) { + allocateThenInsertThenRehash(index, key, value); + } else { + keys[index] = key; + values[index] = value; + } + + assigned++; + } + } + + public float indexRemove(int index) { + assert index >= 0 : "The index must point at an existing key."; + assert index <= mask || (index == mask + 1 && hasEmptyKey); + + float previousValue = values[index]; + if (index > mask) { + assert index == mask + 1; + hasEmptyKey = false; + values[index] = 0; + } else { + shiftConflictingKeys(index); + } + return previousValue; + } + + public void clear() { + assigned = 0; + hasEmptyKey = false; + + Arrays.fill(keys, 0); + + /* */ + } + + public void release() { + assigned = 0; + hasEmptyKey = false; + + keys = null; + values = null; + ensureCapacity(DEFAULT_EXPECTED_ELEMENTS); + } + + public int size() { + return assigned + (hasEmptyKey ? 1 : 0); + } + + public boolean isEmpty() { + return size() == 0; + } + + @Override + public int hashCode() { + int h = hasEmptyKey ? 0xDEADBEEF : 0; + for (LongFloatCursor c : this) { + h += BitMixer.mix(c.key) + BitMixer.mix(c.value); + } + return h; + } + + @Override + public boolean equals(Object obj) { + return (this == obj) + || (obj != null && getClass() == obj.getClass() && equalElements(getClass().cast(obj))); + } + + /** Return true if all keys of some other container exist in this container. */ + protected boolean equalElements(LongFloatHashMap other) { + if (other.size() != size()) { + return false; + } + + for (LongFloatCursor c : other) { + long key = c.key; + if (!containsKey(key) || !(Float.floatToIntBits(c.value) == Float.floatToIntBits(get(key)))) { + return false; + } + } + + return true; + } + + /** + * Ensure this container can hold at least the given number of keys (entries) without resizing its + * buffers. + * + * @param expectedElements The total number of keys, inclusive. + */ + public void ensureCapacity(int expectedElements) { + if (expectedElements > resizeAt || keys == null) { + final long[] prevKeys = this.keys; + final float[] prevValues = this.values; + allocateBuffers(minBufferSize(expectedElements, loadFactor)); + if (prevKeys != null && !isEmpty()) { + rehash(prevKeys, prevValues); + } + } + } + + /** + * Provides the next iteration seed used to build the iteration starting slot and offset + * increment. This method does not need to be synchronized, what matters is that each thread gets + * a sequence of varying seeds. + */ + protected int nextIterationSeed() { + return iterationSeed = BitMixer.mixPhi(iterationSeed); + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(keys) + RamUsageEstimator.sizeOf(values); + } + + /** An iterator implementation for {@link #iterator}. */ + private final class EntryIterator extends AbstractIterator { + private final LongFloatCursor cursor; + private final int increment; + private int index; + private int slot; + + public EntryIterator() { + cursor = new LongFloatCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected LongFloatCursor fetch() { + final int mask = LongFloatHashMap.this.mask; + while (index <= mask) { + long existing; + index++; + slot = (slot + increment) & mask; + if (!((existing = keys[slot]) == 0)) { + cursor.index = slot; + cursor.key = existing; + cursor.value = values[slot]; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index; + cursor.key = 0; + cursor.value = values[index++]; + return cursor; + } + + return done(); + } + } + + @Override + public Iterator iterator() { + return new EntryIterator(); + } + + /** Returns a specialized view of the keys of this associated container. */ + public KeysContainer keys() { + return new KeysContainer(); + } + + /** A view of the keys inside this hash map. */ + public final class KeysContainer implements Iterable { + + @Override + public Iterator iterator() { + return new KeysIterator(); + } + + public int size() { + return LongFloatHashMap.this.size(); + } + + public long[] toArray() { + long[] array = new long[size()]; + int i = 0; + for (LongCursor cursor : this) { + array[i++] = cursor.value; + } + return array; + } + } + + /** An iterator over the set of assigned keys. */ + private final class KeysIterator extends AbstractIterator { + private final LongCursor cursor; + private final int increment; + private int index; + private int slot; + + public KeysIterator() { + cursor = new LongCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected LongCursor fetch() { + final int mask = LongFloatHashMap.this.mask; + while (index <= mask) { + long existing; + index++; + slot = (slot + increment) & mask; + if (!((existing = keys[slot]) == 0)) { + cursor.index = slot; + cursor.value = existing; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index++; + cursor.value = 0; + return cursor; + } + + return done(); + } + } + + /** + * @return Returns a container with all values stored in this map. + */ + public ValuesContainer values() { + return new ValuesContainer(); + } + + /** A view over the set of values of this map. */ + public final class ValuesContainer implements Iterable { + + @Override + public Iterator iterator() { + return new ValuesIterator(); + } + + public int size() { + return LongFloatHashMap.this.size(); + } + + public float[] toArray() { + float[] array = new float[size()]; + int i = 0; + for (FloatCursor cursor : this) { + array[i++] = cursor.value; + } + return array; + } + } + + /** An iterator over the set of assigned values. */ + private final class ValuesIterator extends AbstractIterator { + private final FloatCursor cursor; + private final int increment; + private int index; + private int slot; + + public ValuesIterator() { + cursor = new FloatCursor(); + int seed = nextIterationSeed(); + increment = iterationIncrement(seed); + slot = seed & mask; + } + + @Override + protected FloatCursor fetch() { + final int mask = LongFloatHashMap.this.mask; + while (index <= mask) { + index++; + slot = (slot + increment) & mask; + if (!((keys[slot]) == 0)) { + cursor.index = slot; + cursor.value = values[slot]; + return cursor; + } + } + + if (index == mask + 1 && hasEmptyKey) { + cursor.index = index; + cursor.value = values[index++]; + return cursor; + } + + return done(); + } + } + + @Override + public LongFloatHashMap clone() { + try { + /* */ + LongFloatHashMap cloned = (LongFloatHashMap) super.clone(); + cloned.keys = keys.clone(); + cloned.values = values.clone(); + cloned.hasEmptyKey = hasEmptyKey; + cloned.iterationSeed = ITERATION_SEED.incrementAndGet(); + return cloned; + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } + } + + /** Convert the contents of this map to a human-friendly string. */ + @Override + public String toString() { + final StringBuilder buffer = new StringBuilder(); + buffer.append("["); + + boolean first = true; + for (LongFloatCursor cursor : this) { + if (!first) { + buffer.append(", "); + } + buffer.append(cursor.key); + buffer.append("=>"); + buffer.append(cursor.value); + first = false; + } + buffer.append("]"); + return buffer.toString(); + } + + /** Creates a hash map from two index-aligned arrays of key-value pairs. */ + public static LongFloatHashMap from(long[] keys, float[] values) { + if (keys.length != values.length) { + throw new IllegalArgumentException( + "Arrays of keys and values must have an identical length."); + } + + LongFloatHashMap map = new LongFloatHashMap(keys.length); + for (int i = 0; i < keys.length; i++) { + map.put(keys[i], values[i]); + } + + return map; + } + + /** + * Returns a hash code for the given key. + * + *

The output from this function should evenly distribute keys across the entire integer range. + */ + protected int hashKey(long key) { + assert !((key) == 0); // Handled as a special case (empty slot marker). + return BitMixer.mixPhi(key); + } + + /** + * Validate load factor range and return it. Override and suppress if you need insane load + * factors. + */ + protected double verifyLoadFactor(double loadFactor) { + checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); + return loadFactor; + } + + /** Rehash from old buffers to new buffers. */ + protected void rehash(long[] fromKeys, float[] fromValues) { + assert fromKeys.length == fromValues.length && checkPowerOfTwo(fromKeys.length - 1); + + // Rehash all stored key/value pairs into the new buffers. + final long[] keys = this.keys; + final float[] values = this.values; + final int mask = this.mask; + long existing; + + // Copy the zero element's slot, then rehash everything else. + int from = fromKeys.length - 1; + keys[keys.length - 1] = fromKeys[from]; + values[values.length - 1] = fromValues[from]; + while (--from >= 0) { + if (!((existing = fromKeys[from]) == 0)) { + int slot = hashKey(existing) & mask; + while (!((keys[slot]) == 0)) { + slot = (slot + 1) & mask; + } + keys[slot] = existing; + values[slot] = fromValues[from]; + } + } + } + + /** + * Allocate new internal buffers. This method attempts to allocate and assign internal buffers + * atomically (either allocations succeed or not). + */ + protected void allocateBuffers(int arraySize) { + assert Integer.bitCount(arraySize) == 1; + + // Ensure no change is done if we hit an OOM. + long[] prevKeys = this.keys; + float[] prevValues = this.values; + try { + int emptyElementSlot = 1; + this.keys = (new long[arraySize + emptyElementSlot]); + this.values = (new float[arraySize + emptyElementSlot]); + } catch (OutOfMemoryError e) { + this.keys = prevKeys; + this.values = prevValues; + throw new BufferAllocationException( + "Not enough memory to allocate buffers for rehashing: %,d -> %,d", + e, this.mask + 1, arraySize); + } + + this.resizeAt = expandAtCount(arraySize, loadFactor); + this.mask = arraySize - 1; + } + + /** + * This method is invoked when there is a new key/ value pair to be inserted into the buffers but + * there is not enough empty slots to do so. + * + *

New buffers are allocated. If this succeeds, we know we can proceed with rehashing so we + * assign the pending element to the previous buffer (possibly violating the invariant of having + * at least one empty slot) and rehash all keys, substituting new buffers at the end. + */ + protected void allocateThenInsertThenRehash(int slot, long pendingKey, float pendingValue) { + assert assigned == resizeAt && ((keys[slot]) == 0) && !((pendingKey) == 0); + + // Try to allocate new buffers first. If we OOM, we leave in a consistent state. + final long[] prevKeys = this.keys; + final float[] prevValues = this.values; + allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); + assert this.keys.length > prevKeys.length; + + // We have succeeded at allocating new data so insert the pending key/value at + // the free slot in the old arrays before rehashing. + prevKeys[slot] = pendingKey; + prevValues[slot] = pendingValue; + + // Rehash old keys, including the pending key. + rehash(prevKeys, prevValues); + } + + /** + * Shift all the slot-conflicting keys and values allocated to (and including) slot. + */ + protected void shiftConflictingKeys(int gapSlot) { + final long[] keys = this.keys; + final float[] values = this.values; + final int mask = this.mask; + + // Perform shifts of conflicting keys to fill in the gap. + int distance = 0; + while (true) { + final int slot = (gapSlot + (++distance)) & mask; + final long existing = keys[slot]; + if (((existing) == 0)) { + break; + } + + final int idealSlot = hashKey(existing); + final int shift = (slot - idealSlot) & mask; + if (shift >= distance) { + // Entry at this position was originally at or before the gap slot. + // Move the conflict-shifted entry to the gap's position and repeat the procedure + // for any entries to the right of the current position, treating it + // as the new gap. + keys[gapSlot] = existing; + values[gapSlot] = values[slot]; + gapSlot = slot; + distance = 0; + } + } + + // Mark the last found gap slot without a conflict as empty. + keys[gapSlot] = 0; + values[gapSlot] = 0; + assigned--; + } + + /** Forked from HPPC, holding int index,key and value */ + public static final class LongFloatCursor { + /** + * The current key and value's index in the container this cursor belongs to. The meaning of + * this index is defined by the container (usually it will be an index in the underlying storage + * buffer). + */ + public int index; + + /** The current key. */ + public long key; + + /** The current value. */ + public float value; + + @Override + public String toString() { + return "[cursor, index: " + index + ", key: " + key + ", value: " + value + "]"; + } + } +} diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java index 5240beeecedf..14c3cf045a60 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java @@ -311,7 +311,7 @@ public int indexOf(long key) { } public boolean indexExists(int index) { - assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); + assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey); return index >= 0; } @@ -818,7 +818,7 @@ protected void shiftConflictingKeys(int gapSlot) { } /** Forked from HPPC, holding int index,key and value */ - public final class LongIntCursor { + public static final class LongIntCursor { /** * The current key and value's index in the container this cursor belongs to. The meaning of * this index is defined by the container (usually it will be an index in the underlying storage diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java index ae6c111789be..4cfbb63c0b2b 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java @@ -278,7 +278,7 @@ public int indexOf(long key) { } public boolean indexExists(int index) { - assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); + assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey); return index >= 0; } diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestFloatArrayList.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestFloatArrayList.java new file mode 100644 index 000000000000..f6e156116934 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestFloatArrayList.java @@ -0,0 +1,460 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +import java.util.Arrays; +import java.util.Iterator; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.junit.Before; +import org.junit.Test; + +/** + * Tests for {@link FloatArrayList}. + * + *

Mostly forked and trimmed from com.carrotsearch.hppc.FloatArrayListTest + * + *

github: https://github.com/carrotsearch/hppc release: 0.9.0 + */ +public class TestFloatArrayList extends LuceneTestCase { + private final float key0 = cast(0); + private final float key1 = cast(1); + private final float key2 = cast(2); + private final float key3 = cast(3); + private final float key4 = cast(4); + private final float key5 = cast(5); + private final float key6 = cast(6); + private final float key7 = cast(7); + + /** Convert to target type from an integer used to test stuff. */ + private float cast(int v) { + return v; + } + + /** Per-test fresh initialized instance. */ + private FloatArrayList list; + + @Before + public void initialize() { + list = new FloatArrayList(); + } + + @Test + public void testInitiallyEmpty() { + assertEquals(0, list.size()); + } + + @Test + public void testAdd() { + list.add(key1, key2); + assertListEquals(list.toArray(), 1, 2); + } + + @Test + public void testAddTwoArgs() { + list.add(key1, key2); + list.add(key3, key4); + assertListEquals(list.toArray(), 1, 2, 3, 4); + } + + @Test + public void testAddArray() { + list.add(asArray(0, 1, 2, 3), 1, 2); + assertListEquals(list.toArray(), 1, 2); + } + + @Test + public void testAddVarArg() { + list.add(asArray(0, 1, 2, 3)); + list.add(key4, key5, key6, key7); + assertListEquals(list.toArray(), 0, 1, 2, 3, 4, 5, 6, 7); + } + + @Test + public void testAddAll() { + FloatArrayList list2 = new FloatArrayList(); + list2.add(asArray(0, 1, 2)); + + list.addAll(list2); + list.addAll(list2); + + assertListEquals(list.toArray(), 0, 1, 2, 0, 1, 2); + } + + @Test + public void testInsert() { + list.insert(0, key1); + list.insert(0, key2); + list.insert(2, key3); + list.insert(1, key4); + + assertListEquals(list.toArray(), 2, 4, 1, 3); + } + + @Test + public void testSet() { + list.add(asArray(0, 1, 2)); + + assertEquals(0, list.set(0, key3)); + assertEquals(1, list.set(1, key4)); + assertEquals(2, list.set(2, key5)); + + assertListEquals(list.toArray(), 3, 4, 5); + } + + @Test + public void testRemoveAt() { + list.add(asArray(0, 1, 2, 3, 4)); + + list.removeAt(0); + list.removeAt(2); + list.removeAt(1); + + assertListEquals(list.toArray(), 1, 4); + } + + @Test + public void testRemoveLast() { + list.add(asArray(0, 1, 2, 3, 4)); + + assertEquals(4, list.removeLast()); + assertEquals(4, list.size()); + assertListEquals(list.toArray(), 0, 1, 2, 3); + assertEquals(3, list.removeLast()); + assertEquals(3, list.size()); + assertListEquals(list.toArray(), 0, 1, 2); + assertEquals(2, list.removeLast()); + assertEquals(1, list.removeLast()); + assertEquals(0, list.removeLast()); + assertTrue(list.isEmpty()); + } + + @Test + public void testRemoveElement() { + list.add(asArray(0, 1, 2, 3, 3, 4)); + + assertTrue(list.removeElement(3)); + assertTrue(list.removeElement(2)); + assertFalse(list.removeElement(5)); + + assertListEquals(list.toArray(), 0, 1, 3, 4); + } + + @Test + public void testRemoveRange() { + list.add(asArray(0, 1, 2, 3, 4)); + + list.removeRange(0, 2); + assertListEquals(list.toArray(), 2, 3, 4); + + list.removeRange(2, 3); + assertListEquals(list.toArray(), 2, 3); + + list.removeRange(1, 1); + assertListEquals(list.toArray(), 2, 3); + + list.removeRange(0, 1); + assertListEquals(list.toArray(), 3); + } + + @Test + public void testRemoveFirstLast() { + list.add(asArray(0, 1, 2, 1, 0)); + + assertEquals(-1, list.removeFirst(key5)); + assertEquals(-1, list.removeLast(key5)); + assertListEquals(list.toArray(), 0, 1, 2, 1, 0); + + assertEquals(1, list.removeFirst(key1)); + assertListEquals(list.toArray(), 0, 2, 1, 0); + assertEquals(3, list.removeLast(key0)); + assertListEquals(list.toArray(), 0, 2, 1); + assertEquals(0, list.removeLast(key0)); + assertListEquals(list.toArray(), 2, 1); + assertEquals(-1, list.removeLast(key0)); + } + + @Test + public void testRemoveAll() { + list.add(asArray(0, 1, 0, 1, 0)); + + assertEquals(0, list.removeAll(key2)); + assertEquals(3, list.removeAll(key0)); + assertListEquals(list.toArray(), 1, 1); + + assertEquals(2, list.removeAll(key1)); + assertTrue(list.isEmpty()); + } + + @Test + public void testIndexOf() { + list.add(asArray(0, 1, 2, 1, 0)); + + assertEquals(0, list.indexOf(key0)); + assertEquals(-1, list.indexOf(key3)); + assertEquals(2, list.indexOf(key2)); + } + + @Test + public void testLastIndexOf() { + list.add(asArray(0, 1, 2, 1, 0)); + + assertEquals(4, list.lastIndexOf(key0)); + assertEquals(-1, list.lastIndexOf(key3)); + assertEquals(2, list.lastIndexOf(key2)); + } + + @Test + public void testEnsureCapacity() { + FloatArrayList list = new FloatArrayList(0); + assertEquals(list.size(), list.buffer.length); + float[] buffer1 = list.buffer; + list.ensureCapacity(100); + assertNotSame(buffer1, list.buffer); + } + + @Test + public void testResizeAndCleanBuffer() { + list.ensureCapacity(20); + Arrays.fill(list.buffer, key1); + + list.resize(10); + assertEquals(10, list.size()); + for (int i = 0; i < list.size(); i++) { + assertEquals(0, list.get(i)); + } + + Arrays.fill(list.buffer, 0); + for (int i = 5; i < list.size(); i++) { + list.set(i, key1); + } + list.resize(5); + assertEquals(5, list.size()); + for (int i = list.size(); i < list.buffer.length; i++) { + assertEquals(0, list.buffer[i]); + } + } + + @Test + public void testTrimToSize() { + list.add(asArray(1, 2)); + list.trimToSize(); + assertEquals(2, list.buffer.length); + } + + @Test + public void testRelease() { + list.add(asArray(1, 2)); + list.release(); + assertEquals(0, list.size()); + list.add(asArray(1, 2)); + assertEquals(2, list.size()); + } + + @Test + public void testIterable() { + list.add(asArray(0, 1, 2, 3)); + int count = 0; + for (FloatCursor cursor : list) { + count++; + assertEquals(list.get(cursor.index), cursor.value); + assertEquals(list.buffer[cursor.index], cursor.value); + } + assertEquals(count, list.size()); + + count = 0; + list.resize(0); + for (@SuppressWarnings("unused") FloatCursor cursor : list) { + count++; + } + assertEquals(0, count); + } + + @Test + public void testIterator() { + list.add(asArray(0, 1, 2, 3)); + Iterator iterator = list.iterator(); + int count = 0; + while (iterator.hasNext()) { + iterator.hasNext(); + iterator.hasNext(); + iterator.hasNext(); + iterator.next(); + count++; + } + assertEquals(count, list.size()); + + list.resize(0); + assertFalse(list.iterator().hasNext()); + } + + @Test + public void testClear() { + list.add(asArray(1, 2, 3)); + list.clear(); + assertTrue(list.isEmpty()); + assertEquals(-1, list.indexOf(cast(1))); + } + + @Test + public void testFrom() { + list = FloatArrayList.from(key1, key2, key3); + assertEquals(3, list.size()); + assertListEquals(list.toArray(), 1, 2, 3); + assertEquals(list.size(), list.buffer.length); + } + + @Test + public void testCopyList() { + list.add(asArray(1, 2, 3)); + FloatArrayList copy = new FloatArrayList(list); + assertEquals(3, copy.size()); + assertListEquals(copy.toArray(), 1, 2, 3); + assertEquals(copy.size(), copy.buffer.length); + } + + @Test + public void testHashCodeEquals() { + FloatArrayList l0 = FloatArrayList.from(); + assertEquals(1, l0.hashCode()); + assertEquals(l0, FloatArrayList.from()); + + FloatArrayList l1 = FloatArrayList.from(key1, key2, key3); + FloatArrayList l2 = FloatArrayList.from(key1, key2); + l2.add(key3); + + assertEquals(l1.hashCode(), l2.hashCode()); + assertEquals(l1, l2); + } + + @Test + public void testEqualElements() { + FloatArrayList l1 = FloatArrayList.from(key1, key2, key3); + FloatArrayList l2 = FloatArrayList.from(key1, key2); + l2.add(key3); + + assertEquals(l1.hashCode(), l2.hashCode()); + assertTrue(l2.equalElements(l1)); + } + + @Test + public void testToArray() { + FloatArrayList l1 = FloatArrayList.from(key1, key2, key3); + l1.ensureCapacity(100); + float[] result = l1.toArray(); + assertArrayEquals(new float[] {key1, key2, key3}, result); + } + + @Test + public void testClone() { + list.add(key1, key2, key3); + + FloatArrayList cloned = list.clone(); + cloned.removeAt(cloned.indexOf(key1)); + + assertSortedListEquals(list.toArray(), key1, key2, key3); + assertSortedListEquals(cloned.toArray(), key2, key3); + } + + @Test + public void testToString() { + assertEquals( + "[" + key1 + ", " + key2 + ", " + key3 + "]", + FloatArrayList.from(key1, key2, key3).toString()); + } + + @Test + public void testEqualsSameClass() { + FloatArrayList l1 = FloatArrayList.from(key1, key2, key3); + FloatArrayList l2 = FloatArrayList.from(key1, key2, key3); + FloatArrayList l3 = FloatArrayList.from(key1, key3, key2); + + assertEquals(l1, l2); + assertEquals(l1.hashCode(), l2.hashCode()); + assertNotEquals(l1, l3); + } + + @Test + public void testEqualsSubClass() { + class Sub extends FloatArrayList {} + ; + + FloatArrayList l1 = FloatArrayList.from(key1, key2, key3); + FloatArrayList l2 = new Sub(); + FloatArrayList l3 = new Sub(); + l2.addAll(l1); + l3.addAll(l1); + + assertEquals(l2, l3); + assertNotEquals(l1, l3); + } + + @Test + public void testSort() { + list.add(key3, key1, key3, key2); + FloatArrayList list2 = new FloatArrayList(); + list2.ensureCapacity(100); + list2.addAll(list); + assertSame(list2, list2.sort()); + assertEquals(FloatArrayList.from(key1, key2, key3, key3), list2); + } + + @Test + public void testReverse() { + for (int i = 0; i < 10; i++) { + float[] elements = new float[i]; + for (int j = 0; j < i; j++) { + elements[j] = cast(j); + } + FloatArrayList list = new FloatArrayList(); + list.ensureCapacity(30); + list.add(elements); + assertSame(list, list.reverse()); + assertEquals(elements.length, list.size()); + int reverseIndex = elements.length - 1; + for (FloatCursor cursor : list) { + assertEquals(elements[reverseIndex--], cursor.value); + } + } + } + + /** Check if the array's content is identical to a given sequence of elements. */ + private static void assertListEquals(float[] array, float... elements) { + assertEquals(elements.length, array.length); + assertArrayEquals(elements, array); + } + + private static float[] asArray(float... elements) { + return elements; + } + + /** Check if the array's content is identical to a given sequence of elements. */ + private static void assertSortedListEquals(float[] array, float... elements) { + assertEquals(elements.length, array.length); + Arrays.sort(array); + Arrays.sort(elements); + assertArrayEquals(elements, array); + } + + private static void assertEquals(float f1, float f2) { + assertEquals(f1, f2, 0f); + } + + private static void assertArrayEquals(float[] f1, float[] f2) { + assertArrayEquals(f1, f2, 0f); + } +} diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntArrayList.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntArrayList.java index dd1371aea77a..cf1bed5e772d 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntArrayList.java +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntArrayList.java @@ -418,13 +418,12 @@ public void testStream() { @Test public void testSort() { - list.add(key1, key3, key2); - + list.add(key3, key1, key3, key2); IntArrayList list2 = new IntArrayList(); list2.ensureCapacity(100); list2.addAll(list); assertSame(list2, list2.sort()); - assertEquals(IntArrayList.from(list.stream().sorted().toArray()), list2); + assertEquals(IntArrayList.from(key1, key2, key3, key3), list2); } @Test diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntDoubleHashMap.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntDoubleHashMap.java new file mode 100644 index 000000000000..7cf43175bef0 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntDoubleHashMap.java @@ -0,0 +1,654 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Random; +import java.util.concurrent.atomic.AtomicInteger; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.junit.Test; + +/** + * Tests for {@link IntDoubleHashMap}. + * + *

Mostly forked and trimmed from com.carrotsearch.hppc.IntDoubleHashMapTest + * + *

github: https://github.com/carrotsearch/hppc release: 0.9.0 + */ +public class TestIntDoubleHashMap extends LuceneTestCase { + /* Ready to use key values. */ + + private final int keyE = 0; + private final int key1 = cast(1); + private final int key2 = cast(2); + private final int key3 = cast(3); + private final int key4 = cast(4); + + /** Convert to target type from an integer used to test stuff. */ + private int cast(int v) { + return v; + } + + /** Create a new array of a given type and copy the arguments to this array. */ + private int[] newArray(int... elements) { + return elements; + } + + private static int randomIntBetween(int min, int max) { + return min + random().nextInt(max + 1 - min); + } + + /** Check if the array's content is identical to a given sequence of elements. */ + private static void assertSortedListEquals(int[] array, int... elements) { + assertEquals(elements.length, array.length); + Arrays.sort(array); + Arrays.sort(elements); + assertArrayEquals(elements, array); + } + + /** Check if the array's content is identical to a given sequence of elements. */ + private static void assertSortedListEquals(double[] array, double... elements) { + assertEquals(elements.length, array.length); + Arrays.sort(array); + Arrays.sort(elements); + assertArrayEquals(elements, array); + } + + private final int value0 = vcast(0); + private final int value1 = vcast(1); + private final int value2 = vcast(2); + private final int value3 = vcast(3); + private final int value4 = vcast(4); + + /** Per-test fresh initialized instance. */ + private IntDoubleHashMap map = newInstance(); + + private IntDoubleHashMap newInstance() { + return new IntDoubleHashMap(); + } + + /** Convert to target type from an integer used to test stuff. */ + private int vcast(int value) { + return value; + } + + /** Create a new array of a given type and copy the arguments to this array. */ + /* */ + private double[] newvArray(int... elements) { + double[] v = new double[elements.length]; + for (int i = 0; i < elements.length; i++) { + v[i] = elements[i]; + } + return v; + } + + private void assertSameMap(final IntDoubleHashMap c1, final IntDoubleHashMap c2) { + assertEquals(c1.size(), c2.size()); + + for (IntDoubleHashMap.IntDoubleCursor entry : c1) { + assertTrue(c2.containsKey(entry.key)); + assertEquals2(entry.value, c2.get(entry.key)); + } + } + + private static void assertEquals2(double v1, double v2) { + assertEquals(v1, v2, 0f); + } + + private static void assertArrayEquals(double[] v1, double[] v2) { + assertArrayEquals(v1, v2, 0f); + } + + /* */ + @Test + public void testEnsureCapacity() { + final AtomicInteger expands = new AtomicInteger(); + IntDoubleHashMap map = + new IntDoubleHashMap(0) { + @Override + protected void allocateBuffers(int arraySize) { + super.allocateBuffers(arraySize); + expands.incrementAndGet(); + } + }; + + // Add some elements. + final int max = rarely() ? 0 : randomIntBetween(0, 250); + for (int i = 0; i < max; i++) { + map.put(cast(i), value0); + } + + final int additions = randomIntBetween(max, max + 5000); + map.ensureCapacity(additions + map.size()); + final int before = expands.get(); + for (int i = 0; i < additions; i++) { + map.put(cast(i), value0); + } + assertEquals(before, expands.get()); + } + + @Test + public void testCursorIndexIsValid() { + map.put(keyE, value1); + map.put(key1, value2); + map.put(key2, value3); + + for (IntDoubleHashMap.IntDoubleCursor c : map) { + assertTrue(map.indexExists(c.index)); + assertEquals2(c.value, map.indexGet(c.index)); + } + } + + @Test + public void testIndexMethods() { + map.put(keyE, value1); + map.put(key1, value2); + + assertTrue(map.indexOf(keyE) >= 0); + assertTrue(map.indexOf(key1) >= 0); + assertTrue(map.indexOf(key2) < 0); + + assertTrue(map.indexExists(map.indexOf(keyE))); + assertTrue(map.indexExists(map.indexOf(key1))); + assertFalse(map.indexExists(map.indexOf(key2))); + + assertEquals2(value1, map.indexGet(map.indexOf(keyE))); + assertEquals2(value2, map.indexGet(map.indexOf(key1))); + + expectThrows( + AssertionError.class, + () -> { + map.indexGet(map.indexOf(key2)); + }); + + assertEquals2(value1, map.indexReplace(map.indexOf(keyE), value3)); + assertEquals2(value2, map.indexReplace(map.indexOf(key1), value4)); + assertEquals2(value3, map.indexGet(map.indexOf(keyE))); + assertEquals2(value4, map.indexGet(map.indexOf(key1))); + + map.indexInsert(map.indexOf(key2), key2, value1); + assertEquals2(value1, map.indexGet(map.indexOf(key2))); + assertEquals(3, map.size()); + + assertEquals2(value3, map.indexRemove(map.indexOf(keyE))); + assertEquals(2, map.size()); + assertEquals2(value1, map.indexRemove(map.indexOf(key2))); + assertEquals(1, map.size()); + assertTrue(map.indexOf(keyE) < 0); + assertTrue(map.indexOf(key1) >= 0); + assertTrue(map.indexOf(key2) < 0); + } + + /* */ + @Test + public void testCloningConstructor() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + + assertSameMap(map, new IntDoubleHashMap(map)); + } + + /* */ + @Test + public void testFromArrays() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + + IntDoubleHashMap map2 = + IntDoubleHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3)); + + assertSameMap(map, map2); + } + + @Test + public void testGetOrDefault() { + map.put(key2, value2); + assertTrue(map.containsKey(key2)); + + map.put(key1, value1); + assertEquals2(value1, map.getOrDefault(key1, value3)); + assertEquals2(value3, map.getOrDefault(key3, value3)); + map.remove(key1); + assertEquals2(value3, map.getOrDefault(key1, value3)); + } + + /* */ + @Test + public void testPut() { + map.put(key1, value1); + + assertTrue(map.containsKey(key1)); + assertEquals2(value1, map.get(key1)); + } + + /* */ + @Test + public void testPutOverExistingKey() { + map.put(key1, value1); + assertEquals2(value1, map.put(key1, value3)); + assertEquals2(value3, map.get(key1)); + } + + /* */ + @Test + public void testPutWithExpansions() { + final int COUNT = 10000; + final Random rnd = new Random(random().nextInt()); + final HashSet values = new HashSet(); + + for (int i = 0; i < COUNT; i++) { + final int v = rnd.nextInt(); + final boolean hadKey = values.contains(cast(v)); + values.add(cast(v)); + + assertEquals(hadKey, map.containsKey(cast(v))); + map.put(cast(v), vcast(v)); + assertEquals(values.size(), map.size()); + } + assertEquals(values.size(), map.size()); + } + + /* */ + @Test + public void testPutAll() { + map.put(key1, value1); + map.put(key2, value1); + + IntDoubleHashMap map2 = newInstance(); + + map2.put(key2, value2); + map2.put(keyE, value1); + + // One new key (keyE). + assertEquals(1, map.putAll(map2)); + + // Assert the value under key2 has been replaced. + assertEquals2(value2, map.get(key2)); + + // And key3 has been added. + assertEquals2(value1, map.get(keyE)); + assertEquals(3, map.size()); + } + + /* */ + @Test + public void testPutIfAbsent() { + assertTrue(map.putIfAbsent(key1, value1)); + assertFalse(map.putIfAbsent(key1, value2)); + assertEquals2(value1, map.get(key1)); + } + + @Test + public void testPutOrAdd() { + assertEquals2(value1, map.putOrAdd(key1, value1, value2)); + assertEquals2(value3, map.putOrAdd(key1, value1, value2)); + } + + @Test + public void testAddTo() { + assertEquals2(value1, map.addTo(key1, value1)); + assertEquals2(value3, map.addTo(key1, value2)); + } + + /* */ + @Test + public void testRemove() { + map.put(key1, value1); + assertEquals2(value1, map.remove(key1)); + assertEquals2(0, map.remove(key1)); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + } + + /* */ + @Test + public void testEmptyKey() { + final int empty = 0; + + map.put(empty, value1); + assertEquals(1, map.size()); + assertFalse(map.isEmpty()); + assertEquals2(value1, map.get(empty)); + assertEquals2(value1, map.getOrDefault(empty, value2)); + assertTrue(map.iterator().hasNext()); + assertEquals(empty, map.iterator().next().key); + assertEquals2(value1, map.iterator().next().value); + + map.remove(empty); + assertEquals2(0, map.get(empty)); + assertEquals(0, map.size()); + + assertEquals2(0, map.put(empty, value1)); + assertEquals2(value1, map.put(empty, value2)); + map.clear(); + assertFalse(map.indexExists(map.indexOf(empty))); + assertEquals2(0, map.put(empty, value1)); + map.clear(); + assertEquals2(0, map.remove(empty)); + } + + /* */ + @Test + public void testMapKeySet() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + assertSortedListEquals(map.keys().toArray(), key1, key2, key3); + } + + /* */ + @Test + public void testMapKeySetIterator() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + int counted = 0; + for (IntCursor c : map.keys()) { + assertEquals(map.keys[c.index], c.value); + counted++; + } + assertEquals(counted, map.size()); + } + + /* */ + @Test + public void testClear() { + map.put(key1, value1); + map.put(key2, value1); + map.clear(); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + + // Check values are cleared. + assertEquals2(0, map.put(key1, value1)); + assertEquals2(0, map.remove(key2)); + map.clear(); + + // Check if the map behaves properly upon subsequent use. + testPutWithExpansions(); + } + + /* */ + @Test + public void testRelease() { + map.put(key1, value1); + map.put(key2, value1); + map.release(); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + + // Check if the map behaves properly upon subsequent use. + testPutWithExpansions(); + } + + /* */ + @Test + public void testIterable() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + map.remove(key2); + + int count = 0; + for (IntDoubleHashMap.IntDoubleCursor cursor : map) { + count++; + assertTrue(map.containsKey(cursor.key)); + assertEquals2(cursor.value, map.get(cursor.key)); + + assertEquals2(cursor.value, map.values[cursor.index]); + assertEquals(cursor.key, map.keys[cursor.index]); + } + assertEquals(count, map.size()); + + map.clear(); + assertFalse(map.iterator().hasNext()); + } + + /* */ + @Test + public void testBug_HPPC73_FullCapacityGet() { + final AtomicInteger reallocations = new AtomicInteger(); + final int elements = 0x7F; + map = + new IntDoubleHashMap(elements, 1f) { + @Override + protected double verifyLoadFactor(double loadFactor) { + // Skip load factor sanity range checking. + return loadFactor; + } + + @Override + protected void allocateBuffers(int arraySize) { + super.allocateBuffers(arraySize); + reallocations.incrementAndGet(); + } + }; + + int reallocationsBefore = reallocations.get(); + assertEquals(reallocationsBefore, 1); + for (int i = 1; i <= elements; i++) { + map.put(cast(i), value1); + } + + // Non-existent key. + int outOfSet = cast(elements + 1); + map.remove(outOfSet); + assertFalse(map.containsKey(outOfSet)); + assertEquals(reallocationsBefore, reallocations.get()); + + // Should not expand because we're replacing an existing element. + map.put(key1, value2); + assertEquals(reallocationsBefore, reallocations.get()); + + // Remove from a full map. + map.remove(key1); + assertEquals(reallocationsBefore, reallocations.get()); + map.put(key1, value2); + + // Check expand on "last slot of a full map" condition. + map.put(outOfSet, value1); + assertEquals(reallocationsBefore + 1, reallocations.get()); + } + + @Test + public void testHashCodeEquals() { + IntDoubleHashMap l0 = newInstance(); + assertEquals(0, l0.hashCode()); + assertEquals(l0, newInstance()); + + IntDoubleHashMap l1 = + IntDoubleHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3)); + + IntDoubleHashMap l2 = + IntDoubleHashMap.from(newArray(key2, key1, key3), newvArray(value2, value1, value3)); + + IntDoubleHashMap l3 = IntDoubleHashMap.from(newArray(key1, key2), newvArray(value2, value1)); + + assertEquals(l1.hashCode(), l2.hashCode()); + assertEquals(l1, l2); + + assertNotEquals(l1, l3); + assertNotEquals(l2, l3); + } + + @Test + public void testBug_HPPC37() { + IntDoubleHashMap l1 = IntDoubleHashMap.from(newArray(key1), newvArray(value1)); + + IntDoubleHashMap l2 = IntDoubleHashMap.from(newArray(key2), newvArray(value1)); + + assertNotEquals(l1, l2); + assertNotEquals(l2, l1); + } + + /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ + @Test + @SuppressWarnings({"rawtypes", "unchecked"}) + public void testAgainstHashMap() { + final Random rnd = RandomizedTest.getRandom(); + final HashMap other = new HashMap(); + + for (int size = 1000; size < 20000; size += 4000) { + other.clear(); + map.clear(); + + for (int round = 0; round < size * 20; round++) { + int key = cast(rnd.nextInt(size)); + if (rnd.nextInt(50) == 0) { + key = 0; + } + + double value = vcast(rnd.nextInt()); + + boolean hadOldValue = map.containsKey(key); + if (rnd.nextBoolean()) { + double previousValue; + if (rnd.nextBoolean()) { + int index = map.indexOf(key); + if (map.indexExists(index)) { + previousValue = map.indexReplace(index, value); + } else { + map.indexInsert(index, key, value); + previousValue = 0; + } + } else { + previousValue = map.put(key, value); + } + assertEquals( + other.put(key, value), ((previousValue) == 0) && !hadOldValue ? null : previousValue); + + assertEquals2(value, map.get(key)); + assertEquals2(value, map.indexGet(map.indexOf(key))); + assertTrue(map.containsKey(key)); + assertTrue(map.indexExists(map.indexOf(key))); + } else { + assertEquals(other.containsKey(key), map.containsKey(key)); + double previousValue = + map.containsKey(key) && rnd.nextBoolean() + ? map.indexRemove(map.indexOf(key)) + : map.remove(key); + assertEquals( + other.remove(key), ((previousValue) == 0) && !hadOldValue ? null : previousValue); + } + + assertEquals(other.size(), map.size()); + } + } + } + + /* + * + */ + @Test + public void testClone() { + this.map.put(key1, value1); + this.map.put(key2, value2); + this.map.put(key3, value3); + + IntDoubleHashMap cloned = map.clone(); + cloned.remove(key1); + + assertSortedListEquals(map.keys().toArray(), key1, key2, key3); + assertSortedListEquals(cloned.keys().toArray(), key2, key3); + } + + /* */ + @Test + public void testMapValues() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + assertSortedListEquals(map.values().toArray(), value1, value2, value3); + + map.clear(); + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value2); + assertSortedListEquals(map.values().toArray(), value1, value2, value2); + } + + /* */ + @Test + public void testMapValuesIterator() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + int counted = 0; + for (DoubleCursor c : map.values()) { + assertEquals2(map.values[c.index], c.value); + counted++; + } + assertEquals(counted, map.size()); + } + + /* */ + @Test + public void testEqualsSameClass() { + IntDoubleHashMap l1 = newInstance(); + l1.put(key1, value0); + l1.put(key2, value1); + l1.put(key3, value2); + + IntDoubleHashMap l2 = new IntDoubleHashMap(l1); + l2.putAll(l1); + + IntDoubleHashMap l3 = new IntDoubleHashMap(l2); + l3.putAll(l2); + l3.put(key4, value0); + + assertEquals(l2, l1); + assertEquals(l2.hashCode(), l1.hashCode()); + assertNotEquals(l1, l3); + } + + /* */ + @Test + public void testEqualsSubClass() { + class Sub extends IntDoubleHashMap {} + + IntDoubleHashMap l1 = newInstance(); + l1.put(key1, value0); + l1.put(key2, value1); + l1.put(key3, value2); + + IntDoubleHashMap l2 = new Sub(); + l2.putAll(l1); + l2.put(key4, value3); + + IntDoubleHashMap l3 = new Sub(); + l3.putAll(l2); + + assertNotEquals(l1, l2); + assertEquals(l3.hashCode(), l2.hashCode()); + assertEquals(l3, l2); + } +} diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntFloatHashMap.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntFloatHashMap.java new file mode 100644 index 000000000000..491be681f97b --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntFloatHashMap.java @@ -0,0 +1,654 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Random; +import java.util.concurrent.atomic.AtomicInteger; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.junit.Test; + +/** + * Tests for {@link IntFloatHashMap}. + * + *

Mostly forked and trimmed from com.carrotsearch.hppc.IntFloatHashMapTest + * + *

github: https://github.com/carrotsearch/hppc release: 0.9.0 + */ +public class TestIntFloatHashMap extends LuceneTestCase { + /* Ready to use key values. */ + + private final int keyE = 0; + private final int key1 = cast(1); + private final int key2 = cast(2); + private final int key3 = cast(3); + private final int key4 = cast(4); + + /** Convert to target type from an integer used to test stuff. */ + private int cast(int v) { + return v; + } + + /** Create a new array of a given type and copy the arguments to this array. */ + private int[] newArray(int... elements) { + return elements; + } + + private static int randomIntBetween(int min, int max) { + return min + random().nextInt(max + 1 - min); + } + + /** Check if the array's content is identical to a given sequence of elements. */ + private static void assertSortedListEquals(int[] array, int... elements) { + assertEquals(elements.length, array.length); + Arrays.sort(array); + Arrays.sort(elements); + assertArrayEquals(elements, array); + } + + /** Check if the array's content is identical to a given sequence of elements. */ + private static void assertSortedListEquals(float[] array, float... elements) { + assertEquals(elements.length, array.length); + Arrays.sort(array); + Arrays.sort(elements); + assertArrayEquals(elements, array); + } + + private final int value0 = vcast(0); + private final int value1 = vcast(1); + private final int value2 = vcast(2); + private final int value3 = vcast(3); + private final int value4 = vcast(4); + + /** Per-test fresh initialized instance. */ + private IntFloatHashMap map = newInstance(); + + private IntFloatHashMap newInstance() { + return new IntFloatHashMap(); + } + + /** Convert to target type from an integer used to test stuff. */ + private int vcast(int value) { + return value; + } + + /** Create a new array of a given type and copy the arguments to this array. */ + /* */ + private float[] newvArray(int... elements) { + float[] v = new float[elements.length]; + for (int i = 0; i < elements.length; i++) { + v[i] = elements[i]; + } + return v; + } + + private void assertSameMap(final IntFloatHashMap c1, final IntFloatHashMap c2) { + assertEquals(c1.size(), c2.size()); + + for (IntFloatHashMap.IntFloatCursor entry : c1) { + assertTrue(c2.containsKey(entry.key)); + assertEquals2(entry.value, c2.get(entry.key)); + } + } + + private static void assertEquals2(float v1, float v2) { + assertEquals(v1, v2, 0f); + } + + private static void assertArrayEquals(float[] v1, float[] v2) { + assertArrayEquals(v1, v2, 0f); + } + + /* */ + @Test + public void testEnsureCapacity() { + final AtomicInteger expands = new AtomicInteger(); + IntFloatHashMap map = + new IntFloatHashMap(0) { + @Override + protected void allocateBuffers(int arraySize) { + super.allocateBuffers(arraySize); + expands.incrementAndGet(); + } + }; + + // Add some elements. + final int max = rarely() ? 0 : randomIntBetween(0, 250); + for (int i = 0; i < max; i++) { + map.put(cast(i), value0); + } + + final int additions = randomIntBetween(max, max + 5000); + map.ensureCapacity(additions + map.size()); + final int before = expands.get(); + for (int i = 0; i < additions; i++) { + map.put(cast(i), value0); + } + assertEquals(before, expands.get()); + } + + @Test + public void testCursorIndexIsValid() { + map.put(keyE, value1); + map.put(key1, value2); + map.put(key2, value3); + + for (IntFloatHashMap.IntFloatCursor c : map) { + assertTrue(map.indexExists(c.index)); + assertEquals2(c.value, map.indexGet(c.index)); + } + } + + @Test + public void testIndexMethods() { + map.put(keyE, value1); + map.put(key1, value2); + + assertTrue(map.indexOf(keyE) >= 0); + assertTrue(map.indexOf(key1) >= 0); + assertTrue(map.indexOf(key2) < 0); + + assertTrue(map.indexExists(map.indexOf(keyE))); + assertTrue(map.indexExists(map.indexOf(key1))); + assertFalse(map.indexExists(map.indexOf(key2))); + + assertEquals2(value1, map.indexGet(map.indexOf(keyE))); + assertEquals2(value2, map.indexGet(map.indexOf(key1))); + + expectThrows( + AssertionError.class, + () -> { + map.indexGet(map.indexOf(key2)); + }); + + assertEquals2(value1, map.indexReplace(map.indexOf(keyE), value3)); + assertEquals2(value2, map.indexReplace(map.indexOf(key1), value4)); + assertEquals2(value3, map.indexGet(map.indexOf(keyE))); + assertEquals2(value4, map.indexGet(map.indexOf(key1))); + + map.indexInsert(map.indexOf(key2), key2, value1); + assertEquals2(value1, map.indexGet(map.indexOf(key2))); + assertEquals(3, map.size()); + + assertEquals2(value3, map.indexRemove(map.indexOf(keyE))); + assertEquals(2, map.size()); + assertEquals2(value1, map.indexRemove(map.indexOf(key2))); + assertEquals(1, map.size()); + assertTrue(map.indexOf(keyE) < 0); + assertTrue(map.indexOf(key1) >= 0); + assertTrue(map.indexOf(key2) < 0); + } + + /* */ + @Test + public void testCloningConstructor() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + + assertSameMap(map, new IntFloatHashMap(map)); + } + + /* */ + @Test + public void testFromArrays() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + + IntFloatHashMap map2 = + IntFloatHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3)); + + assertSameMap(map, map2); + } + + @Test + public void testGetOrDefault() { + map.put(key2, value2); + assertTrue(map.containsKey(key2)); + + map.put(key1, value1); + assertEquals2(value1, map.getOrDefault(key1, value3)); + assertEquals2(value3, map.getOrDefault(key3, value3)); + map.remove(key1); + assertEquals2(value3, map.getOrDefault(key1, value3)); + } + + /* */ + @Test + public void testPut() { + map.put(key1, value1); + + assertTrue(map.containsKey(key1)); + assertEquals2(value1, map.get(key1)); + } + + /* */ + @Test + public void testPutOverExistingKey() { + map.put(key1, value1); + assertEquals2(value1, map.put(key1, value3)); + assertEquals2(value3, map.get(key1)); + } + + /* */ + @Test + public void testPutWithExpansions() { + final int COUNT = 10000; + final Random rnd = new Random(random().nextInt()); + final HashSet values = new HashSet(); + + for (int i = 0; i < COUNT; i++) { + final int v = rnd.nextInt(); + final boolean hadKey = values.contains(cast(v)); + values.add(cast(v)); + + assertEquals(hadKey, map.containsKey(cast(v))); + map.put(cast(v), vcast(v)); + assertEquals(values.size(), map.size()); + } + assertEquals(values.size(), map.size()); + } + + /* */ + @Test + public void testPutAll() { + map.put(key1, value1); + map.put(key2, value1); + + IntFloatHashMap map2 = newInstance(); + + map2.put(key2, value2); + map2.put(keyE, value1); + + // One new key (keyE). + assertEquals(1, map.putAll(map2)); + + // Assert the value under key2 has been replaced. + assertEquals2(value2, map.get(key2)); + + // And key3 has been added. + assertEquals2(value1, map.get(keyE)); + assertEquals(3, map.size()); + } + + /* */ + @Test + public void testPutIfAbsent() { + assertTrue(map.putIfAbsent(key1, value1)); + assertFalse(map.putIfAbsent(key1, value2)); + assertEquals2(value1, map.get(key1)); + } + + @Test + public void testPutOrAdd() { + assertEquals2(value1, map.putOrAdd(key1, value1, value2)); + assertEquals2(value3, map.putOrAdd(key1, value1, value2)); + } + + @Test + public void testAddTo() { + assertEquals2(value1, map.addTo(key1, value1)); + assertEquals2(value3, map.addTo(key1, value2)); + } + + /* */ + @Test + public void testRemove() { + map.put(key1, value1); + assertEquals2(value1, map.remove(key1)); + assertEquals2(0, map.remove(key1)); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + } + + /* */ + @Test + public void testEmptyKey() { + final int empty = 0; + + map.put(empty, value1); + assertEquals(1, map.size()); + assertFalse(map.isEmpty()); + assertEquals2(value1, map.get(empty)); + assertEquals2(value1, map.getOrDefault(empty, value2)); + assertTrue(map.iterator().hasNext()); + assertEquals(empty, map.iterator().next().key); + assertEquals2(value1, map.iterator().next().value); + + map.remove(empty); + assertEquals2(0, map.get(empty)); + assertEquals(0, map.size()); + + assertEquals2(0, map.put(empty, value1)); + assertEquals2(value1, map.put(empty, value2)); + map.clear(); + assertFalse(map.indexExists(map.indexOf(empty))); + assertEquals2(0, map.put(empty, value1)); + map.clear(); + assertEquals2(0, map.remove(empty)); + } + + /* */ + @Test + public void testMapKeySet() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + assertSortedListEquals(map.keys().toArray(), key1, key2, key3); + } + + /* */ + @Test + public void testMapKeySetIterator() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + int counted = 0; + for (IntCursor c : map.keys()) { + assertEquals(map.keys[c.index], c.value); + counted++; + } + assertEquals(counted, map.size()); + } + + /* */ + @Test + public void testClear() { + map.put(key1, value1); + map.put(key2, value1); + map.clear(); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + + // Check values are cleared. + assertEquals2(0, map.put(key1, value1)); + assertEquals2(0, map.remove(key2)); + map.clear(); + + // Check if the map behaves properly upon subsequent use. + testPutWithExpansions(); + } + + /* */ + @Test + public void testRelease() { + map.put(key1, value1); + map.put(key2, value1); + map.release(); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + + // Check if the map behaves properly upon subsequent use. + testPutWithExpansions(); + } + + /* */ + @Test + public void testIterable() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + map.remove(key2); + + int count = 0; + for (IntFloatHashMap.IntFloatCursor cursor : map) { + count++; + assertTrue(map.containsKey(cursor.key)); + assertEquals2(cursor.value, map.get(cursor.key)); + + assertEquals2(cursor.value, map.values[cursor.index]); + assertEquals(cursor.key, map.keys[cursor.index]); + } + assertEquals(count, map.size()); + + map.clear(); + assertFalse(map.iterator().hasNext()); + } + + /* */ + @Test + public void testBug_HPPC73_FullCapacityGet() { + final AtomicInteger reallocations = new AtomicInteger(); + final int elements = 0x7F; + map = + new IntFloatHashMap(elements, 1f) { + @Override + protected double verifyLoadFactor(double loadFactor) { + // Skip load factor sanity range checking. + return loadFactor; + } + + @Override + protected void allocateBuffers(int arraySize) { + super.allocateBuffers(arraySize); + reallocations.incrementAndGet(); + } + }; + + int reallocationsBefore = reallocations.get(); + assertEquals(reallocationsBefore, 1); + for (int i = 1; i <= elements; i++) { + map.put(cast(i), value1); + } + + // Non-existent key. + int outOfSet = cast(elements + 1); + map.remove(outOfSet); + assertFalse(map.containsKey(outOfSet)); + assertEquals(reallocationsBefore, reallocations.get()); + + // Should not expand because we're replacing an existing element. + map.put(key1, value2); + assertEquals(reallocationsBefore, reallocations.get()); + + // Remove from a full map. + map.remove(key1); + assertEquals(reallocationsBefore, reallocations.get()); + map.put(key1, value2); + + // Check expand on "last slot of a full map" condition. + map.put(outOfSet, value1); + assertEquals(reallocationsBefore + 1, reallocations.get()); + } + + @Test + public void testHashCodeEquals() { + IntFloatHashMap l0 = newInstance(); + assertEquals(0, l0.hashCode()); + assertEquals(l0, newInstance()); + + IntFloatHashMap l1 = + IntFloatHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3)); + + IntFloatHashMap l2 = + IntFloatHashMap.from(newArray(key2, key1, key3), newvArray(value2, value1, value3)); + + IntFloatHashMap l3 = IntFloatHashMap.from(newArray(key1, key2), newvArray(value2, value1)); + + assertEquals(l1.hashCode(), l2.hashCode()); + assertEquals(l1, l2); + + assertNotEquals(l1, l3); + assertNotEquals(l2, l3); + } + + @Test + public void testBug_HPPC37() { + IntFloatHashMap l1 = IntFloatHashMap.from(newArray(key1), newvArray(value1)); + + IntFloatHashMap l2 = IntFloatHashMap.from(newArray(key2), newvArray(value1)); + + assertNotEquals(l1, l2); + assertNotEquals(l2, l1); + } + + /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ + @Test + @SuppressWarnings({"rawtypes", "unchecked"}) + public void testAgainstHashMap() { + final Random rnd = RandomizedTest.getRandom(); + final HashMap other = new HashMap(); + + for (int size = 1000; size < 20000; size += 4000) { + other.clear(); + map.clear(); + + for (int round = 0; round < size * 20; round++) { + int key = cast(rnd.nextInt(size)); + if (rnd.nextInt(50) == 0) { + key = 0; + } + + float value = vcast(rnd.nextInt()); + + boolean hadOldValue = map.containsKey(key); + if (rnd.nextBoolean()) { + float previousValue; + if (rnd.nextBoolean()) { + int index = map.indexOf(key); + if (map.indexExists(index)) { + previousValue = map.indexReplace(index, value); + } else { + map.indexInsert(index, key, value); + previousValue = 0; + } + } else { + previousValue = map.put(key, value); + } + assertEquals( + other.put(key, value), ((previousValue) == 0) && !hadOldValue ? null : previousValue); + + assertEquals2(value, map.get(key)); + assertEquals2(value, map.indexGet(map.indexOf(key))); + assertTrue(map.containsKey(key)); + assertTrue(map.indexExists(map.indexOf(key))); + } else { + assertEquals(other.containsKey(key), map.containsKey(key)); + float previousValue = + map.containsKey(key) && rnd.nextBoolean() + ? map.indexRemove(map.indexOf(key)) + : map.remove(key); + assertEquals( + other.remove(key), ((previousValue) == 0) && !hadOldValue ? null : previousValue); + } + + assertEquals(other.size(), map.size()); + } + } + } + + /* + * + */ + @Test + public void testClone() { + this.map.put(key1, value1); + this.map.put(key2, value2); + this.map.put(key3, value3); + + IntFloatHashMap cloned = map.clone(); + cloned.remove(key1); + + assertSortedListEquals(map.keys().toArray(), key1, key2, key3); + assertSortedListEquals(cloned.keys().toArray(), key2, key3); + } + + /* */ + @Test + public void testMapValues() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + assertSortedListEquals(map.values().toArray(), value1, value2, value3); + + map.clear(); + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value2); + assertSortedListEquals(map.values().toArray(), value1, value2, value2); + } + + /* */ + @Test + public void testMapValuesIterator() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + int counted = 0; + for (FloatCursor c : map.values()) { + assertEquals2(map.values[c.index], c.value); + counted++; + } + assertEquals(counted, map.size()); + } + + /* */ + @Test + public void testEqualsSameClass() { + IntFloatHashMap l1 = newInstance(); + l1.put(key1, value0); + l1.put(key2, value1); + l1.put(key3, value2); + + IntFloatHashMap l2 = new IntFloatHashMap(l1); + l2.putAll(l1); + + IntFloatHashMap l3 = new IntFloatHashMap(l2); + l3.putAll(l2); + l3.put(key4, value0); + + assertEquals(l2, l1); + assertEquals(l2.hashCode(), l1.hashCode()); + assertNotEquals(l1, l3); + } + + /* */ + @Test + public void testEqualsSubClass() { + class Sub extends IntFloatHashMap {} + + IntFloatHashMap l1 = newInstance(); + l1.put(key1, value0); + l1.put(key2, value1); + l1.put(key3, value2); + + IntFloatHashMap l2 = new Sub(); + l2.putAll(l1); + l2.put(key4, value3); + + IntFloatHashMap l3 = new Sub(); + l3.putAll(l2); + + assertNotEquals(l1, l2); + assertEquals(l3.hashCode(), l2.hashCode()); + assertEquals(l3, l2); + } +} diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntIntHashMap.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntIntHashMap.java index f1c036d24581..a0ffdd9c451e 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntIntHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntIntHashMap.java @@ -24,7 +24,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.tests.util.LuceneTestCase; -import org.junit.After; import org.junit.Test; /** @@ -78,23 +77,6 @@ private IntIntHashMap newInstance() { return new IntIntHashMap(); } - @After - public void checkEmptySlotsUninitialized() { - if (map != null) { - int occupied = 0; - for (int i = 0; i <= map.mask; i++) { - if (((map.keys[i]) == 0)) { - - } else { - occupied++; - } - } - assertEquals(occupied, map.assigned); - - if (!map.hasEmptyKey) {} - } - } - /** Convert to target type from an integer used to test stuff. */ private int vcast(int value) { return value; @@ -326,10 +308,10 @@ public void testEmptyKey() { map.put(empty, value1); assertEquals(1, map.size()); - assertEquals(false, map.isEmpty()); + assertFalse(map.isEmpty()); assertEquals(value1, map.get(empty)); assertEquals(value1, map.getOrDefault(empty, value2)); - assertEquals(true, map.iterator().hasNext()); + assertTrue(map.iterator().hasNext()); assertEquals(empty, map.iterator().next().key); assertEquals(value1, map.iterator().next().value); @@ -492,8 +474,8 @@ public void testHashCodeEquals() { assertEquals(l1.hashCode(), l2.hashCode()); assertEquals(l1, l2); - assertFalse(l1.equals(l3)); - assertFalse(l2.equals(l3)); + assertNotEquals(l1, l3); + assertNotEquals(l2, l3); } @Test @@ -502,8 +484,8 @@ public void testBug_HPPC37() { IntIntHashMap l2 = IntIntHashMap.from(newArray(key2), newvArray(value1)); - assertFalse(l1.equals(l2)); - assertFalse(l2.equals(l1)); + assertNotEquals(l1, l2); + assertNotEquals(l2, l1); } /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntObjectHashMap.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntObjectHashMap.java index e450e17f7cb0..9dfaaff713ba 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntObjectHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntObjectHashMap.java @@ -24,7 +24,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.tests.util.LuceneTestCase; -import org.junit.After; import org.junit.Test; /** @@ -86,23 +85,6 @@ private IntObjectHashMap newInstance() { return new IntObjectHashMap(); } - @After - public void checkEmptySlotsUninitialized() { - if (map != null) { - int occupied = 0; - for (int i = 0; i <= map.mask; i++) { - if (((map.keys[i]) == 0)) { - - } else { - occupied++; - } - } - assertEquals(occupied, map.assigned); - - if (!map.hasEmptyKey) {} - } - } - /** Convert to target type from an integer used to test stuff. */ private int vcast(int value) { return value; @@ -324,7 +306,7 @@ public void testPutIfAbsent() { public void testRemove() { map.put(key1, value1); assertEquals(value1, map.remove(key1)); - assertEquals(null, map.remove(key1)); + assertNull(map.remove(key1)); assertEquals(0, map.size()); // These are internals, but perhaps worth asserting too. @@ -338,15 +320,15 @@ public void testEmptyKey() { map.put(empty, value1); assertEquals(1, map.size()); - assertEquals(false, map.isEmpty()); + assertFalse(map.isEmpty()); assertEquals(value1, map.get(empty)); assertEquals(value1, map.getOrDefault(empty, value2)); - assertEquals(true, map.iterator().hasNext()); + assertTrue(map.iterator().hasNext()); assertEquals(empty, map.iterator().next().key); assertEquals(value1, map.iterator().next().value); map.remove(empty); - assertEquals(null, map.get(empty)); + assertNull(map.get(empty)); assertEquals(0, map.size()); map.put(empty, null); @@ -359,13 +341,13 @@ public void testEmptyKey() { assertFalse(map.containsKey(empty)); assertNull(map.get(empty)); - assertEquals(null, map.put(empty, value1)); + assertNull(map.put(empty, value1)); assertEquals(value1, map.put(empty, value2)); map.clear(); assertFalse(map.indexExists(map.indexOf(empty))); - assertEquals(null, map.put(empty, value1)); + assertNull(map.put(empty, value1)); map.clear(); - assertEquals(null, map.remove(empty)); + assertNull(map.remove(empty)); } /* */ @@ -405,8 +387,8 @@ public void testClear() { assertEquals(0, map.assigned); // Check values are cleared. - assertEquals(null, map.put(key1, value1)); - assertEquals(null, map.remove(key2)); + assertNull(map.put(key1, value1)); + assertNull(map.remove(key2)); map.clear(); // Check if the map behaves properly upon subsequent use. @@ -514,8 +496,8 @@ public void testHashCodeEquals() { assertEquals(l1.hashCode(), l2.hashCode()); assertEquals(l1, l2); - assertFalse(l1.equals(l3)); - assertFalse(l2.equals(l3)); + assertNotEquals(l1, l3); + assertNotEquals(l2, l3); } @Test @@ -524,8 +506,8 @@ public void testBug_HPPC37() { IntObjectHashMap l2 = IntObjectHashMap.from(newArray(key2), newvArray(value1)); - assertFalse(l1.equals(l2)); - assertFalse(l2.equals(l1)); + assertNotEquals(l1, l2); + assertNotEquals(l2, l1); } /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongArrayList.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongArrayList.java index 0f53a9f37491..c5a37b90eeae 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongArrayList.java +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongArrayList.java @@ -418,13 +418,12 @@ public void testStream() { @Test public void testSort() { - list.add(key1, key3, key2); - + list.add(key3, key1, key3, key2); LongArrayList list2 = new LongArrayList(); list2.ensureCapacity(100); list2.addAll(list); assertSame(list2, list2.sort()); - assertEquals(LongArrayList.from(list.stream().sorted().toArray()), list2); + assertEquals(LongArrayList.from(key1, key2, key3, key3), list2); } @Test diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongFloatHashMap.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongFloatHashMap.java new file mode 100644 index 000000000000..0f747749b5a0 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongFloatHashMap.java @@ -0,0 +1,654 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.util.hppc; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Random; +import java.util.concurrent.atomic.AtomicInteger; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.junit.Test; + +/** + * Tests for {@link LongFloatHashMap}. + * + *

Mostly forked and trimmed from com.carrotsearch.hppc.LongFloatHashMapTest + * + *

github: https://github.com/carrotsearch/hppc release: 0.9.0 + */ +public class TestLongFloatHashMap extends LuceneTestCase { + /* Ready to use key values. */ + + private final long keyE = 0; + private final long key1 = cast(1); + private final long key2 = cast(2); + private final long key3 = cast(3); + private final long key4 = cast(4); + + /** Convert to target type from an integer used to test stuff. */ + private long cast(int v) { + return v; + } + + /** Create a new array of a given type and copy the arguments to this array. */ + private long[] newArray(long... elements) { + return elements; + } + + private static int randomIntBetween(int min, int max) { + return min + random().nextInt(max + 1 - min); + } + + /** Check if the array's content is identical to a given sequence of elements. */ + private static void assertSortedListEquals(long[] array, long... elements) { + assertEquals(elements.length, array.length); + Arrays.sort(array); + Arrays.sort(elements); + assertArrayEquals(elements, array); + } + + /** Check if the array's content is identical to a given sequence of elements. */ + private static void assertSortedListEquals(float[] array, float... elements) { + assertEquals(elements.length, array.length); + Arrays.sort(array); + Arrays.sort(elements); + assertArrayEquals(elements, array); + } + + private final int value0 = vcast(0); + private final int value1 = vcast(1); + private final int value2 = vcast(2); + private final int value3 = vcast(3); + private final int value4 = vcast(4); + + /** Per-test fresh initialized instance. */ + private LongFloatHashMap map = newInstance(); + + private LongFloatHashMap newInstance() { + return new LongFloatHashMap(); + } + + /** Convert to target type from an integer used to test stuff. */ + private int vcast(int value) { + return value; + } + + /** Create a new array of a given type and copy the arguments to this array. */ + /* */ + private float[] newvArray(int... elements) { + float[] v = new float[elements.length]; + for (int i = 0; i < elements.length; i++) { + v[i] = elements[i]; + } + return v; + } + + private void assertSameMap(final LongFloatHashMap c1, final LongFloatHashMap c2) { + assertEquals(c1.size(), c2.size()); + + for (LongFloatHashMap.LongFloatCursor entry : c1) { + assertTrue(c2.containsKey(entry.key)); + assertEquals2(entry.value, c2.get(entry.key)); + } + } + + private static void assertEquals2(float v1, float v2) { + assertEquals(v1, v2, 0f); + } + + private static void assertArrayEquals(float[] v1, float[] v2) { + assertArrayEquals(v1, v2, 0f); + } + + /* */ + @Test + public void testEnsureCapacity() { + final AtomicInteger expands = new AtomicInteger(); + LongFloatHashMap map = + new LongFloatHashMap(0) { + @Override + protected void allocateBuffers(int arraySize) { + super.allocateBuffers(arraySize); + expands.incrementAndGet(); + } + }; + + // Add some elements. + final int max = rarely() ? 0 : randomIntBetween(0, 250); + for (int i = 0; i < max; i++) { + map.put(cast(i), value0); + } + + final int additions = randomIntBetween(max, max + 5000); + map.ensureCapacity(additions + map.size()); + final int before = expands.get(); + for (int i = 0; i < additions; i++) { + map.put(cast(i), value0); + } + assertEquals(before, expands.get()); + } + + @Test + public void testCursorIndexIsValid() { + map.put(keyE, value1); + map.put(key1, value2); + map.put(key2, value3); + + for (LongFloatHashMap.LongFloatCursor c : map) { + assertTrue(map.indexExists(c.index)); + assertEquals2(c.value, map.indexGet(c.index)); + } + } + + @Test + public void testIndexMethods() { + map.put(keyE, value1); + map.put(key1, value2); + + assertTrue(map.indexOf(keyE) >= 0); + assertTrue(map.indexOf(key1) >= 0); + assertTrue(map.indexOf(key2) < 0); + + assertTrue(map.indexExists(map.indexOf(keyE))); + assertTrue(map.indexExists(map.indexOf(key1))); + assertFalse(map.indexExists(map.indexOf(key2))); + + assertEquals2(value1, map.indexGet(map.indexOf(keyE))); + assertEquals2(value2, map.indexGet(map.indexOf(key1))); + + expectThrows( + AssertionError.class, + () -> { + map.indexGet(map.indexOf(key2)); + }); + + assertEquals2(value1, map.indexReplace(map.indexOf(keyE), value3)); + assertEquals2(value2, map.indexReplace(map.indexOf(key1), value4)); + assertEquals2(value3, map.indexGet(map.indexOf(keyE))); + assertEquals2(value4, map.indexGet(map.indexOf(key1))); + + map.indexInsert(map.indexOf(key2), key2, value1); + assertEquals2(value1, map.indexGet(map.indexOf(key2))); + assertEquals(3, map.size()); + + assertEquals2(value3, map.indexRemove(map.indexOf(keyE))); + assertEquals(2, map.size()); + assertEquals2(value1, map.indexRemove(map.indexOf(key2))); + assertEquals(1, map.size()); + assertTrue(map.indexOf(keyE) < 0); + assertTrue(map.indexOf(key1) >= 0); + assertTrue(map.indexOf(key2) < 0); + } + + /* */ + @Test + public void testCloningConstructor() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + + assertSameMap(map, new LongFloatHashMap(map)); + } + + /* */ + @Test + public void testFromArrays() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + + LongFloatHashMap map2 = + LongFloatHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3)); + + assertSameMap(map, map2); + } + + @Test + public void testGetOrDefault() { + map.put(key2, value2); + assertTrue(map.containsKey(key2)); + + map.put(key1, value1); + assertEquals2(value1, map.getOrDefault(key1, value3)); + assertEquals2(value3, map.getOrDefault(key3, value3)); + map.remove(key1); + assertEquals2(value3, map.getOrDefault(key1, value3)); + } + + /* */ + @Test + public void testPut() { + map.put(key1, value1); + + assertTrue(map.containsKey(key1)); + assertEquals2(value1, map.get(key1)); + } + + /* */ + @Test + public void testPutOverExistingKey() { + map.put(key1, value1); + assertEquals2(value1, map.put(key1, value3)); + assertEquals2(value3, map.get(key1)); + } + + /* */ + @Test + public void testPutWithExpansions() { + final int COUNT = 10000; + final Random rnd = new Random(random().nextLong()); + final HashSet values = new HashSet(); + + for (int i = 0; i < COUNT; i++) { + final int v = rnd.nextInt(); + final boolean hadKey = values.contains(cast(v)); + values.add(cast(v)); + + assertEquals(hadKey, map.containsKey(cast(v))); + map.put(cast(v), vcast(v)); + assertEquals(values.size(), map.size()); + } + assertEquals(values.size(), map.size()); + } + + /* */ + @Test + public void testPutAll() { + map.put(key1, value1); + map.put(key2, value1); + + LongFloatHashMap map2 = newInstance(); + + map2.put(key2, value2); + map2.put(keyE, value1); + + // One new key (keyE). + assertEquals(1, map.putAll(map2)); + + // Assert the value under key2 has been replaced. + assertEquals2(value2, map.get(key2)); + + // And key3 has been added. + assertEquals2(value1, map.get(keyE)); + assertEquals(3, map.size()); + } + + /* */ + @Test + public void testPutIfAbsent() { + assertTrue(map.putIfAbsent(key1, value1)); + assertFalse(map.putIfAbsent(key1, value2)); + assertEquals2(value1, map.get(key1)); + } + + @Test + public void testPutOrAdd() { + assertEquals2(value1, map.putOrAdd(key1, value1, value2)); + assertEquals2(value3, map.putOrAdd(key1, value1, value2)); + } + + @Test + public void testAddTo() { + assertEquals2(value1, map.addTo(key1, value1)); + assertEquals2(value3, map.addTo(key1, value2)); + } + + /* */ + @Test + public void testRemove() { + map.put(key1, value1); + assertEquals2(value1, map.remove(key1)); + assertEquals2(0, map.remove(key1)); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + } + + /* */ + @Test + public void testEmptyKey() { + final int empty = 0; + + map.put(empty, value1); + assertEquals(1, map.size()); + assertFalse(map.isEmpty()); + assertEquals2(value1, map.get(empty)); + assertEquals2(value1, map.getOrDefault(empty, value2)); + assertTrue(map.iterator().hasNext()); + assertEquals(empty, map.iterator().next().key); + assertEquals2(value1, map.iterator().next().value); + + map.remove(empty); + assertEquals2(0, map.get(empty)); + assertEquals(0, map.size()); + + assertEquals2(0, map.put(empty, value1)); + assertEquals2(value1, map.put(empty, value2)); + map.clear(); + assertFalse(map.indexExists(map.indexOf(empty))); + assertEquals2(0, map.put(empty, value1)); + map.clear(); + assertEquals2(0, map.remove(empty)); + } + + /* */ + @Test + public void testMapKeySet() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + assertSortedListEquals(map.keys().toArray(), key1, key2, key3); + } + + /* */ + @Test + public void testMapKeySetIterator() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + int counted = 0; + for (LongCursor c : map.keys()) { + assertEquals(map.keys[c.index], c.value); + counted++; + } + assertEquals(counted, map.size()); + } + + /* */ + @Test + public void testClear() { + map.put(key1, value1); + map.put(key2, value1); + map.clear(); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + + // Check values are cleared. + assertEquals2(0, map.put(key1, value1)); + assertEquals2(0, map.remove(key2)); + map.clear(); + + // Check if the map behaves properly upon subsequent use. + testPutWithExpansions(); + } + + /* */ + @Test + public void testRelease() { + map.put(key1, value1); + map.put(key2, value1); + map.release(); + assertEquals(0, map.size()); + + // These are internals, but perhaps worth asserting too. + assertEquals(0, map.assigned); + + // Check if the map behaves properly upon subsequent use. + testPutWithExpansions(); + } + + /* */ + @Test + public void testIterable() { + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value3); + map.remove(key2); + + int count = 0; + for (LongFloatHashMap.LongFloatCursor cursor : map) { + count++; + assertTrue(map.containsKey(cursor.key)); + assertEquals2(cursor.value, map.get(cursor.key)); + + assertEquals2(cursor.value, map.values[cursor.index]); + assertEquals(cursor.key, map.keys[cursor.index]); + } + assertEquals(count, map.size()); + + map.clear(); + assertFalse(map.iterator().hasNext()); + } + + /* */ + @Test + public void testBug_HPPC73_FullCapacityGet() { + final AtomicInteger reallocations = new AtomicInteger(); + final int elements = 0x7F; + map = + new LongFloatHashMap(elements, 1f) { + @Override + protected double verifyLoadFactor(double loadFactor) { + // Skip load factor sanity range checking. + return loadFactor; + } + + @Override + protected void allocateBuffers(int arraySize) { + super.allocateBuffers(arraySize); + reallocations.incrementAndGet(); + } + }; + + int reallocationsBefore = reallocations.get(); + assertEquals(reallocationsBefore, 1); + for (int i = 1; i <= elements; i++) { + map.put(cast(i), value1); + } + + // Non-existent key. + long outOfSet = cast(elements + 1); + map.remove(outOfSet); + assertFalse(map.containsKey(outOfSet)); + assertEquals(reallocationsBefore, reallocations.get()); + + // Should not expand because we're replacing an existing element. + map.put(key1, value2); + assertEquals(reallocationsBefore, reallocations.get()); + + // Remove from a full map. + map.remove(key1); + assertEquals(reallocationsBefore, reallocations.get()); + map.put(key1, value2); + + // Check expand on "last slot of a full map" condition. + map.put(outOfSet, value1); + assertEquals(reallocationsBefore + 1, reallocations.get()); + } + + @Test + public void testHashCodeEquals() { + LongFloatHashMap l0 = newInstance(); + assertEquals(0, l0.hashCode()); + assertEquals(l0, newInstance()); + + LongFloatHashMap l1 = + LongFloatHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3)); + + LongFloatHashMap l2 = + LongFloatHashMap.from(newArray(key2, key1, key3), newvArray(value2, value1, value3)); + + LongFloatHashMap l3 = LongFloatHashMap.from(newArray(key1, key2), newvArray(value2, value1)); + + assertEquals(l1.hashCode(), l2.hashCode()); + assertEquals(l1, l2); + + assertNotEquals(l1, l3); + assertNotEquals(l2, l3); + } + + @Test + public void testBug_HPPC37() { + LongFloatHashMap l1 = LongFloatHashMap.from(newArray(key1), newvArray(value1)); + + LongFloatHashMap l2 = LongFloatHashMap.from(newArray(key2), newvArray(value1)); + + assertNotEquals(l1, l2); + assertNotEquals(l2, l1); + } + + /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ + @Test + @SuppressWarnings({"rawtypes", "unchecked"}) + public void testAgainstHashMap() { + final Random rnd = RandomizedTest.getRandom(); + final HashMap other = new HashMap(); + + for (int size = 1000; size < 20000; size += 4000) { + other.clear(); + map.clear(); + + for (int round = 0; round < size * 20; round++) { + long key = cast(rnd.nextInt(size)); + if (rnd.nextInt(50) == 0) { + key = 0; + } + + float value = vcast(rnd.nextInt()); + + boolean hadOldValue = map.containsKey(key); + if (rnd.nextBoolean()) { + float previousValue; + if (rnd.nextBoolean()) { + int index = map.indexOf(key); + if (map.indexExists(index)) { + previousValue = map.indexReplace(index, value); + } else { + map.indexInsert(index, key, value); + previousValue = 0; + } + } else { + previousValue = map.put(key, value); + } + assertEquals( + other.put(key, value), ((previousValue) == 0) && !hadOldValue ? null : previousValue); + + assertEquals2(value, map.get(key)); + assertEquals2(value, map.indexGet(map.indexOf(key))); + assertTrue(map.containsKey(key)); + assertTrue(map.indexExists(map.indexOf(key))); + } else { + assertEquals(other.containsKey(key), map.containsKey(key)); + float previousValue = + map.containsKey(key) && rnd.nextBoolean() + ? map.indexRemove(map.indexOf(key)) + : map.remove(key); + assertEquals( + other.remove(key), ((previousValue) == 0) && !hadOldValue ? null : previousValue); + } + + assertEquals(other.size(), map.size()); + } + } + } + + /* + * + */ + @Test + public void testClone() { + this.map.put(key1, value1); + this.map.put(key2, value2); + this.map.put(key3, value3); + + LongFloatHashMap cloned = map.clone(); + cloned.remove(key1); + + assertSortedListEquals(map.keys().toArray(), key1, key2, key3); + assertSortedListEquals(cloned.keys().toArray(), key2, key3); + } + + /* */ + @Test + public void testMapValues() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + assertSortedListEquals(map.values().toArray(), value1, value2, value3); + + map.clear(); + map.put(key1, value1); + map.put(key2, value2); + map.put(key3, value2); + assertSortedListEquals(map.values().toArray(), value1, value2, value2); + } + + /* */ + @Test + public void testMapValuesIterator() { + map.put(key1, value3); + map.put(key2, value2); + map.put(key3, value1); + + int counted = 0; + for (FloatCursor c : map.values()) { + assertEquals2(map.values[c.index], c.value); + counted++; + } + assertEquals(counted, map.size()); + } + + /* */ + @Test + public void testEqualsSameClass() { + LongFloatHashMap l1 = newInstance(); + l1.put(key1, value0); + l1.put(key2, value1); + l1.put(key3, value2); + + LongFloatHashMap l2 = new LongFloatHashMap(l1); + l2.putAll(l1); + + LongFloatHashMap l3 = new LongFloatHashMap(l2); + l3.putAll(l2); + l3.put(key4, value0); + + assertEquals(l2, l1); + assertEquals(l2.hashCode(), l1.hashCode()); + assertNotEquals(l1, l3); + } + + /* */ + @Test + public void testEqualsSubClass() { + class Sub extends LongFloatHashMap {} + + LongFloatHashMap l1 = newInstance(); + l1.put(key1, value0); + l1.put(key2, value1); + l1.put(key3, value2); + + LongFloatHashMap l2 = new Sub(); + l2.putAll(l1); + l2.put(key4, value3); + + LongFloatHashMap l3 = new Sub(); + l3.putAll(l2); + + assertNotEquals(l1, l2); + assertEquals(l3.hashCode(), l2.hashCode()); + assertEquals(l3, l2); + } +} diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongIntHashMap.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongIntHashMap.java index 0d3468c4b5bc..6bb511a86f58 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongIntHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongIntHashMap.java @@ -24,7 +24,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.tests.util.LuceneTestCase; -import org.junit.After; import org.junit.Test; /** @@ -86,23 +85,6 @@ private LongIntHashMap newInstance() { return new LongIntHashMap(); } - @After - public void checkEmptySlotsUninitialized() { - if (map != null) { - int occupied = 0; - for (int i = 0; i <= map.mask; i++) { - if (((map.keys[i]) == 0)) { - - } else { - occupied++; - } - } - assertEquals(occupied, map.assigned); - - if (!map.hasEmptyKey) {} - } - } - /** Convert to target type from an integer used to test stuff. */ private int vcast(int value) { return value; @@ -335,10 +317,10 @@ public void testEmptyKey() { map.put(empty, value1); assertEquals(1, map.size()); - assertEquals(false, map.isEmpty()); + assertFalse(map.isEmpty()); assertEquals(value1, map.get(empty)); assertEquals(value1, map.getOrDefault(empty, value2)); - assertEquals(true, map.iterator().hasNext()); + assertTrue(map.iterator().hasNext()); assertEquals(empty, map.iterator().next().key); assertEquals(value1, map.iterator().next().value); @@ -501,8 +483,8 @@ public void testHashCodeEquals() { assertEquals(l1.hashCode(), l2.hashCode()); assertEquals(l1, l2); - assertFalse(l1.equals(l3)); - assertFalse(l2.equals(l3)); + assertNotEquals(l1, l3); + assertNotEquals(l2, l3); } @Test @@ -511,8 +493,8 @@ public void testBug_HPPC37() { LongIntHashMap l2 = LongIntHashMap.from(newArray(key2), newvArray(value1)); - assertFalse(l1.equals(l2)); - assertFalse(l2.equals(l1)); + assertNotEquals(l1, l2); + assertNotEquals(l2, l1); } /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongObjectHashMap.java b/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongObjectHashMap.java index 10b661c258a0..ee35df176dfd 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongObjectHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongObjectHashMap.java @@ -24,7 +24,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.tests.util.LuceneTestCase; -import org.junit.After; import org.junit.Test; /** @@ -86,23 +85,6 @@ private LongObjectHashMap newInstance() { return new LongObjectHashMap(); } - @After - public void checkEmptySlotsUninitialized() { - if (map != null) { - int occupied = 0; - for (int i = 0; i <= map.mask; i++) { - if (((map.keys[i]) == 0)) { - - } else { - occupied++; - } - } - assertEquals(occupied, map.assigned); - - if (!map.hasEmptyKey) {} - } - } - /** Convert to target type from an integer used to test stuff. */ private int vcast(int value) { return value; @@ -325,7 +307,7 @@ public void testPutIfAbsent() { public void testRemove() { map.put(key1, value1); assertEquals(value1, map.remove(key1)); - assertEquals(null, map.remove(key1)); + assertNull(map.remove(key1)); assertEquals(0, map.size()); // These are internals, but perhaps worth asserting too. @@ -339,15 +321,15 @@ public void testEmptyKey() { map.put(empty, value1); assertEquals(1, map.size()); - assertEquals(false, map.isEmpty()); + assertFalse(map.isEmpty()); assertEquals(value1, map.get(empty)); assertEquals(value1, map.getOrDefault(empty, value2)); - assertEquals(true, map.iterator().hasNext()); + assertTrue(map.iterator().hasNext()); assertEquals(empty, map.iterator().next().key); assertEquals(value1, map.iterator().next().value); map.remove(empty); - assertEquals(null, map.get(empty)); + assertNull(map.get(empty)); assertEquals(0, map.size()); map.put(empty, null); @@ -360,13 +342,13 @@ public void testEmptyKey() { assertFalse(map.containsKey(empty)); assertNull(map.get(empty)); - assertEquals(null, map.put(empty, value1)); + assertNull(map.put(empty, value1)); assertEquals(value1, map.put(empty, value2)); map.clear(); assertFalse(map.indexExists(map.indexOf(empty))); - assertEquals(null, map.put(empty, value1)); + assertNull(map.put(empty, value1)); map.clear(); - assertEquals(null, map.remove(empty)); + assertNull(map.remove(empty)); } /* */ @@ -406,8 +388,8 @@ public void testClear() { assertEquals(0, map.assigned); // Check values are cleared. - assertEquals(null, map.put(key1, value1)); - assertEquals(null, map.remove(key2)); + assertNull(map.put(key1, value1)); + assertNull(map.remove(key2)); map.clear(); // Check if the map behaves properly upon subsequent use. @@ -515,8 +497,8 @@ public void testHashCodeEquals() { assertEquals(l1.hashCode(), l2.hashCode()); assertEquals(l1, l2); - assertFalse(l1.equals(l3)); - assertFalse(l2.equals(l3)); + assertNotEquals(l1, l3); + assertNotEquals(l2, l3); } @Test @@ -525,8 +507,8 @@ public void testBug_HPPC37() { LongObjectHashMap l2 = LongObjectHashMap.from(newArray(key2), newvArray(value1)); - assertFalse(l1.equals(l2)); - assertFalse(l2.equals(l1)); + assertNotEquals(l1, l2); + assertNotEquals(l2, l1); } /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ diff --git a/lucene/facet/build.gradle b/lucene/facet/build.gradle index 87b1950fcc35..ad7dc4bf8644 100644 --- a/lucene/facet/build.gradle +++ b/lucene/facet/build.gradle @@ -22,7 +22,6 @@ description = 'Faceted indexing and search capabilities' dependencies { moduleApi project(':lucene:core') - moduleImplementation 'com.carrotsearch:hppc' moduleTestImplementation project(':lucene:test-framework') moduleTestImplementation project(':lucene:queries') diff --git a/lucene/facet/src/java/module-info.java b/lucene/facet/src/java/module-info.java index b1583142747b..2aa1e3e494e5 100644 --- a/lucene/facet/src/java/module-info.java +++ b/lucene/facet/src/java/module-info.java @@ -16,9 +16,7 @@ */ /** Faceted indexing and search capabilities */ -@SuppressWarnings({"requires-automatic"}) module org.apache.lucene.facet { - requires com.carrotsearch.hppc; requires org.apache.lucene.core; exports org.apache.lucene.facet; diff --git a/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java index d39b0e847a70..cbbe805d96ef 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java @@ -17,8 +17,6 @@ package org.apache.lucene.facet; -import com.carrotsearch.hppc.LongIntHashMap; -import com.carrotsearch.hppc.cursors.LongIntCursor; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -37,6 +35,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.PriorityQueue; +import org.apache.lucene.util.hppc.LongIntHashMap; /** * {@link Facets} implementation that computes counts for all unique long values, more efficiently @@ -391,7 +390,7 @@ public FacetResult getAllChildren(String dim, String... path) throws IOException } } if (hashCounts.size() != 0) { - for (LongIntCursor c : hashCounts) { + for (LongIntHashMap.LongIntCursor c : hashCounts) { int count = c.value; if (count != 0) { labelValues.add(new LabelAndValue(Long.toString(c.key), c.value)); @@ -443,7 +442,7 @@ protected boolean lessThan(Entry a, Entry b) { if (hashCounts.size() != 0) { childCount += hashCounts.size(); - for (LongIntCursor c : hashCounts) { + for (LongIntHashMap.LongIntCursor c : hashCounts) { int count = c.value; if (count != 0) { if (e == null) { @@ -493,7 +492,7 @@ public FacetResult getAllChildrenSortByValue() { long[] hashValues = new long[this.hashCounts.size()]; int upto = 0; - for (LongIntCursor c : this.hashCounts) { + for (LongIntHashMap.LongIntCursor c : this.hashCounts) { if (c.value != 0) { hashCounts[upto] = c.value; hashValues[upto] = c.key; @@ -592,7 +591,7 @@ public String toString() { } if (hashCounts.size() != 0) { - for (LongIntCursor c : hashCounts) { + for (LongIntHashMap.LongIntCursor c : hashCounts) { if (c.value != 0) { b.append(" "); b.append(c.key); diff --git a/lucene/facet/src/java/org/apache/lucene/facet/StringValueFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/StringValueFacetCounts.java index 048582d6e436..c3b03e1bcd8e 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/StringValueFacetCounts.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/StringValueFacetCounts.java @@ -16,8 +16,6 @@ */ package org.apache.lucene.facet; -import com.carrotsearch.hppc.IntIntHashMap; -import com.carrotsearch.hppc.cursors.IntIntCursor; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -37,6 +35,7 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongValues; +import org.apache.lucene.util.hppc.IntIntHashMap; /** * Compute facet counts from a previously indexed {@link SortedSetDocValues} or {@link @@ -150,7 +149,7 @@ public FacetResult getAllChildren(String dim, String... path) throws IOException List labelValues = new ArrayList<>(); if (sparseCounts != null) { - for (IntIntCursor sparseCount : sparseCounts) { + for (IntIntHashMap.IntIntCursor sparseCount : sparseCounts) { int count = sparseCount.value; final BytesRef term = docValues.lookupOrd(sparseCount.key); labelValues.add(new LabelAndValue(term.utf8ToString(), count)); @@ -186,7 +185,7 @@ public FacetResult getTopChildren(int topN, String dim, String... path) throws I int childCount = 0; // total number of labels with non-zero count if (sparseCounts != null) { - for (IntIntCursor sparseCount : sparseCounts) { + for (IntIntHashMap.IntIntCursor sparseCount : sparseCounts) { childCount++; // every count in sparseValues should be non-zero int ord = sparseCount.key; int count = sparseCount.value; diff --git a/lucene/facet/src/java/org/apache/lucene/facet/range/OverlappingLongRangeCounter.java b/lucene/facet/src/java/org/apache/lucene/facet/range/OverlappingLongRangeCounter.java index 9c09f29841fa..045d8d8e2202 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/range/OverlappingLongRangeCounter.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/range/OverlappingLongRangeCounter.java @@ -16,14 +16,14 @@ */ package org.apache.lucene.facet.range; -import com.carrotsearch.hppc.IntArrayList; -import com.carrotsearch.hppc.LongArrayList; -import com.carrotsearch.hppc.LongIntHashMap; -import com.carrotsearch.hppc.cursors.IntCursor; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.hppc.IntArrayList; +import org.apache.lucene.util.hppc.IntCursor; +import org.apache.lucene.util.hppc.LongArrayList; +import org.apache.lucene.util.hppc.LongIntHashMap; /** * This implementation supports requested ranges that overlap. Because of this, we use a @@ -260,7 +260,8 @@ private static List buildElementaryIntervals(LongRange[] ranges) } } - LongArrayList endsList = new LongArrayList(endsMap.keys()); + LongArrayList endsList = new LongArrayList(endsMap.size()); + endsList.addAll(endsMap.keys()); Arrays.sort(endsList.buffer, 0, endsList.size()); // Build elementaryIntervals (a 1D Venn diagram): diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatTaxonomyFacets.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatTaxonomyFacets.java index 784d3d36f939..928663481f32 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatTaxonomyFacets.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatTaxonomyFacets.java @@ -16,12 +16,12 @@ */ package org.apache.lucene.facet.taxonomy; -import com.carrotsearch.hppc.IntFloatHashMap; import java.io.IOException; import org.apache.lucene.facet.FacetsCollector; import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.TopOrdAndFloatQueue; import org.apache.lucene.facet.TopOrdAndNumberQueue; +import org.apache.lucene.util.hppc.IntFloatHashMap; /** Base class for all taxonomy-based facets that aggregate to float. */ abstract class FloatTaxonomyFacets extends TaxonomyFacets { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntTaxonomyFacets.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntTaxonomyFacets.java index e32dd052d818..af08e5445c85 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntTaxonomyFacets.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntTaxonomyFacets.java @@ -16,13 +16,13 @@ */ package org.apache.lucene.facet.taxonomy; -import com.carrotsearch.hppc.IntIntHashMap; import java.io.IOException; import java.util.Comparator; import org.apache.lucene.facet.FacetsCollector; import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.TopOrdAndIntQueue; import org.apache.lucene.facet.TopOrdAndNumberQueue; +import org.apache.lucene.util.hppc.IntIntHashMap; /** Base class for all taxonomy-based facets that aggregate to int. */ abstract class IntTaxonomyFacets extends TaxonomyFacets { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/OrdinalMappingLeafReader.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/OrdinalMappingLeafReader.java index 90c73b7d7a0b..13916eb4a0a1 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/OrdinalMappingLeafReader.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/OrdinalMappingLeafReader.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.facet.taxonomy; -import com.carrotsearch.hppc.IntArrayList; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; @@ -29,6 +28,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.util.hppc.IntArrayList; /** * A {@link org.apache.lucene.index.FilterLeafReader} for updating facets ordinal references, based diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java index ce2a97f7add7..f9f873dc2593 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java @@ -17,9 +17,6 @@ package org.apache.lucene.facet.taxonomy; -import com.carrotsearch.hppc.IntArrayList; -import com.carrotsearch.hppc.IntIntHashMap; -import com.carrotsearch.hppc.cursors.IntIntCursor; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -38,6 +35,8 @@ import org.apache.lucene.facet.TopOrdAndIntQueue; import org.apache.lucene.facet.TopOrdAndNumberQueue; import org.apache.lucene.util.PriorityQueue; +import org.apache.lucene.util.hppc.IntArrayList; +import org.apache.lucene.util.hppc.IntIntHashMap; /** Base class for all taxonomy-based facets impls. */ abstract class TaxonomyFacets extends Facets { @@ -370,7 +369,7 @@ public FacetResult getAllChildren(String dim, String... path) throws IOException List ordValues = new ArrayList<>(); if (sparseCounts != null) { - for (IntIntCursor ordAndCount : sparseCounts) { + for (IntIntHashMap.IntIntCursor ordAndCount : sparseCounts) { int ord = ordAndCount.key; int count = ordAndCount.value; Number value = getAggregationValue(ord); @@ -486,7 +485,7 @@ protected TopChildrenForPath getTopChildrenForPath(DimConfig dimConfig, int path // TODO: would be faster if we had a "get the following children" API? then we // can make a single pass over the hashmap if (sparseCounts != null) { - for (IntIntCursor c : sparseCounts) { + for (IntIntHashMap.IntIntCursor c : sparseCounts) { int ord = c.key; int count = c.value; if (parents.get(ord) == pathOrd && count > 0) { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java index fd18835dd177..3ef2a6329b3c 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java @@ -16,8 +16,6 @@ */ package org.apache.lucene.facet.taxonomy.directory; -import com.carrotsearch.hppc.IntArrayList; -import com.carrotsearch.hppc.cursors.IntCursor; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -53,6 +51,8 @@ import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.StringSorter; +import org.apache.lucene.util.hppc.IntArrayList; +import org.apache.lucene.util.hppc.IntCursor; /** * A {@link TaxonomyReader} which retrieves stored taxonomy information from a {@link Directory}. diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/TaxonomyIndexArrays.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/TaxonomyIndexArrays.java index 0aa6d9b973a0..2d7975b4bb43 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/TaxonomyIndexArrays.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/TaxonomyIndexArrays.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.facet.taxonomy.directory; -import com.carrotsearch.hppc.IntHashSet; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -32,6 +31,7 @@ import org.apache.lucene.util.Accountables; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.hppc.IntHashSet; /** * A {@link ParallelTaxonomyArrays} that are initialized from the taxonomy index. diff --git a/lucene/join/build.gradle b/lucene/join/build.gradle index 5cf8f80bb79c..6ed9da28e3d1 100644 --- a/lucene/join/build.gradle +++ b/lucene/join/build.gradle @@ -21,7 +21,6 @@ description = 'Index-time and Query-time joins for normalized content' dependencies { moduleApi project(':lucene:core') - moduleImplementation 'com.carrotsearch:hppc' moduleTestImplementation project(':lucene:test-framework') } \ No newline at end of file diff --git a/lucene/join/src/java/module-info.java b/lucene/join/src/java/module-info.java index 9ab3c8152dda..80d2261b5a54 100644 --- a/lucene/join/src/java/module-info.java +++ b/lucene/join/src/java/module-info.java @@ -16,10 +16,8 @@ */ /** Index-time and Query-time joins for normalized content */ -@SuppressWarnings({"requires-automatic"}) module org.apache.lucene.join { requires org.apache.lucene.core; - requires com.carrotsearch.hppc; exports org.apache.lucene.search.join; } diff --git a/lucene/join/src/java/org/apache/lucene/search/join/DiversifyingNearestChildrenKnnCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/DiversifyingNearestChildrenKnnCollector.java index f79958e812b1..b71adb4f0141 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/DiversifyingNearestChildrenKnnCollector.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/DiversifyingNearestChildrenKnnCollector.java @@ -17,13 +17,13 @@ package org.apache.lucene.search.join; -import com.carrotsearch.hppc.IntIntHashMap; import org.apache.lucene.search.AbstractKnnCollector; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.hppc.IntIntHashMap; /** * This collects the nearest children vectors. Diversifying the results over the provided parent diff --git a/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java b/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java index 93b25e424096..a22d920b83ab 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java @@ -16,12 +16,6 @@ */ package org.apache.lucene.search.join; -import com.carrotsearch.hppc.LongArrayList; -import com.carrotsearch.hppc.LongFloatHashMap; -import com.carrotsearch.hppc.LongHashSet; -import com.carrotsearch.hppc.LongIntHashMap; -import com.carrotsearch.hppc.cursors.LongCursor; -import com.carrotsearch.hppc.procedures.LongFloatProcedure; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; @@ -48,6 +42,11 @@ import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.join.DocValuesTermsCollector.Function; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.hppc.LongArrayList; +import org.apache.lucene.util.hppc.LongCursor; +import org.apache.lucene.util.hppc.LongFloatHashMap; +import org.apache.lucene.util.hppc.LongHashSet; +import org.apache.lucene.util.hppc.LongIntHashMap; /** * Utility for query time joining. @@ -292,7 +291,8 @@ public org.apache.lucene.search.ScoreMode scoreMode() { } fromSearcher.search(fromQuery, collector); - LongArrayList joinValuesList = new LongArrayList(joinValues); + LongArrayList joinValuesList = new LongArrayList(joinValues.size()); + joinValuesList.addAll(joinValues); Arrays.sort(joinValuesList.buffer, 0, joinValuesList.size()); Iterator iterator = joinValuesList.iterator(); @@ -588,4 +588,10 @@ private interface LongFloatFunction { float apply(long value); } + + /** Similar to {@link java.util.function.BiConsumer} for primitive arguments. */ + private interface LongFloatProcedure { + + void apply(long key, float value); + } } diff --git a/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java index 40f11aa48005..218730087a8f 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java @@ -17,8 +17,6 @@ package org.apache.lucene.search.join; -import com.carrotsearch.hppc.FloatArrayList; -import com.carrotsearch.hppc.cursors.FloatCursor; import java.io.IOException; import java.util.Iterator; import java.util.function.BiFunction; @@ -49,6 +47,8 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.hppc.FloatArrayList; +import org.apache.lucene.util.hppc.FloatCursor; // A TermsIncludingScoreQuery variant for point values: abstract class PointInSetIncludingScoreQuery extends Query implements Accountable { diff --git a/lucene/spatial-extras/build.gradle b/lucene/spatial-extras/build.gradle index 7ba6648eede3..baa772f3053f 100644 --- a/lucene/spatial-extras/build.gradle +++ b/lucene/spatial-extras/build.gradle @@ -30,8 +30,6 @@ dependencies { moduleApi 'org.locationtech.spatial4j:spatial4j' moduleApi 'io.sgr:s2-geometry-library-java' - moduleImplementation 'com.carrotsearch:hppc' - moduleTestImplementation project(':lucene:test-framework') moduleTestImplementation project(':lucene:spatial-test-fixtures') moduleTestImplementation 'org.locationtech.jts:jts-core' diff --git a/lucene/spatial-extras/src/java/module-info.java b/lucene/spatial-extras/src/java/module-info.java index 608684764fd6..30fb97668efa 100644 --- a/lucene/spatial-extras/src/java/module-info.java +++ b/lucene/spatial-extras/src/java/module-info.java @@ -20,7 +20,6 @@ module org.apache.lucene.spatial_extras { requires spatial4j; requires s2.geometry.library.java; - requires com.carrotsearch.hppc; requires org.apache.lucene.core; requires org.apache.lucene.spatial3d; diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/CachingDoubleValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/CachingDoubleValueSource.java index 234efc45de5b..2f641fccd565 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/CachingDoubleValueSource.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/CachingDoubleValueSource.java @@ -16,13 +16,13 @@ */ package org.apache.lucene.spatial.util; -import com.carrotsearch.hppc.IntDoubleHashMap; import java.io.IOException; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.DoubleValuesSource; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.util.hppc.IntDoubleHashMap; /** * Caches the doubleVal of another value source in a HashMap so that it is computed only once. diff --git a/lucene/spatial3d/build.gradle b/lucene/spatial3d/build.gradle index 184c39cd4bb2..f7e3618bef59 100644 --- a/lucene/spatial3d/build.gradle +++ b/lucene/spatial3d/build.gradle @@ -22,8 +22,6 @@ description = '3D spatial planar geometry APIs' dependencies { moduleApi project(':lucene:core') - moduleImplementation 'com.carrotsearch:hppc' - moduleTestImplementation project(':lucene:test-framework') // We have to exclude ourselves because spatial-test-fixtures depend diff --git a/lucene/spatial3d/src/java/module-info.java b/lucene/spatial3d/src/java/module-info.java index 7fd67a03b55d..9bffb23a4689 100644 --- a/lucene/spatial3d/src/java/module-info.java +++ b/lucene/spatial3d/src/java/module-info.java @@ -16,10 +16,8 @@ */ /** 3D spatial planar geometry APIs */ -@SuppressWarnings({"requires-automatic"}) module org.apache.lucene.spatial3d { requires org.apache.lucene.core; - requires com.carrotsearch.hppc; exports org.apache.lucene.spatial3d; exports org.apache.lucene.spatial3d.geom; diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java index 9385e4ad1db1..1b62e6472df8 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java @@ -16,7 +16,6 @@ */ package org.apache.lucene.spatial3d.geom; -import com.carrotsearch.hppc.IntArrayList; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -25,6 +24,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import org.apache.lucene.util.hppc.IntArrayList; /** * GeoShape representing a path across the surface of the globe, with a specified half-width. Path @@ -1988,9 +1988,9 @@ public PathComponent getRoot() { } private void mergeTop() { - depthStack.remove(depthStack.size() - 1); + depthStack.removeAt(depthStack.size() - 1); PathComponent secondComponent = componentStack.remove(componentStack.size() - 1); - int newDepth = depthStack.remove(depthStack.size() - 1) + 1; + int newDepth = depthStack.removeAt(depthStack.size() - 1) + 1; PathComponent firstComponent = componentStack.remove(componentStack.size() - 1); depthStack.add(newDepth); componentStack.add(new PathNode(firstComponent, secondComponent)); diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java index 22e949bbd97b..f40691d71468 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java @@ -17,9 +17,9 @@ package org.apache.lucene.spatial3d.geom; -import com.carrotsearch.hppc.IntObjectHashMap; import java.util.HashMap; import java.util.Map; +import org.apache.lucene.util.hppc.IntObjectHashMap; /** * Lookup tables for classes that can be serialized using a code. diff --git a/versions.lock b/versions.lock index 4f9f7e2ab31f..b11bf043dad7 100644 --- a/versions.lock +++ b/versions.lock @@ -1,5 +1,4 @@ # Run ./gradlew --write-locks to regenerate this file -com.carrotsearch:hppc:0.9.1 (1 constraints: 0c050736) com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.8.1 (1 constraints: 0d050e36) com.ibm.icu:icu4j:74.2 (1 constraints: e1041731) commons-codec:commons-codec:1.13 (1 constraints: d904f430) diff --git a/versions.props b/versions.props index f55132e18ffd..13243bd65f67 100644 --- a/versions.props +++ b/versions.props @@ -1,5 +1,4 @@ com.carrotsearch.randomizedtesting:*=2.8.1 -com.carrotsearch:hppc=0.9.1 com.carrotsearch:procfork=1.0.6 com.google.errorprone:*=2.18.0 com.ibm.icu:icu4j=74.2 From b056c5a249a1eaabdef1f9c11a667e4e172fea54 Mon Sep 17 00:00:00 2001 From: Bruno Roustant Date: Sun, 26 May 2024 22:19:43 +0200 Subject: [PATCH 02/10] Change fork version to 0.10.0 --- lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java | 2 +- .../src/java/org/apache/lucene/util/hppc/FloatArrayList.java | 2 +- .../core/src/java/org/apache/lucene/util/hppc/IntArrayList.java | 2 +- .../src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java | 2 +- .../src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java | 2 +- .../core/src/java/org/apache/lucene/util/hppc/IntHashSet.java | 2 +- .../src/java/org/apache/lucene/util/hppc/IntIntHashMap.java | 2 +- .../src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java | 2 +- .../src/java/org/apache/lucene/util/hppc/LongArrayList.java | 2 +- .../src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java | 2 +- .../core/src/java/org/apache/lucene/util/hppc/LongHashSet.java | 2 +- .../src/java/org/apache/lucene/util/hppc/LongIntHashMap.java | 2 +- .../src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java b/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java index 36d239a7bfb7..cc9cf3e46289 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java @@ -23,7 +23,7 @@ * *

Forked from com.carrotsearch.hppc.BitMixer * - *

github: https://github.com/carrotsearch/hppc release: 0.9.0 + *

github: https://github.com/carrotsearch/hppc release: 0.10.0 */ public final class BitMixer { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java index 35ad322f3102..6b05a8c052b7 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java @@ -30,7 +30,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.FloatArrayList * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class FloatArrayList implements Iterable, Cloneable, Accountable { private static final long BASE_RAM_BYTES_USED = diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java index 42e2c9544ac8..d4d2da87a71d 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java @@ -31,7 +31,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.IntArrayList * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class IntArrayList implements Iterable, Cloneable, Accountable { private static final long BASE_RAM_BYTES_USED = diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java index 19aac4e7b05e..65febb8e1355 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java @@ -40,7 +40,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.IntDoubleHashMap * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class IntDoubleHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java index 06224fb4a8a3..d0991718e891 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java @@ -40,7 +40,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.IntFloatHashMap * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class IntFloatHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java index 2b2213a9242a..4d09641fc370 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java @@ -31,7 +31,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.IntHashSet * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class IntHashSet implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java index b33024faf965..f75be4168470 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java @@ -30,7 +30,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.IntIntHashMap * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class IntIntHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java index ec4a99291dd1..d5d233fc06dd 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java @@ -30,7 +30,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.IntObjectHashMap * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ @SuppressWarnings("unchecked") public class IntObjectHashMap diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java index a8aa680443e0..ff5f93918c1b 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java @@ -31,7 +31,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.LongArrayList * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class LongArrayList implements Iterable, Cloneable, Accountable { private static final long BASE_RAM_BYTES_USED = diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java index e86fd89f4844..cc9e2923b9d1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java @@ -40,7 +40,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.LongFloatHashMap * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class LongFloatHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java index 696c96083f5e..8c1864d8b63a 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java @@ -30,7 +30,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.LongHashSet * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class LongHashSet implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java index 14c3cf045a60..5993a57be889 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java @@ -30,7 +30,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.LongIntHashMap * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ public class LongIntHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java index 4cfbb63c0b2b..cf7804e2aef3 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java @@ -30,7 +30,7 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.LongObjectHashMap * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 */ @SuppressWarnings("unchecked") public class LongObjectHashMap From a890864fd82b5593571db18e1ef408908693ccb2 Mon Sep 17 00:00:00 2001 From: Bruno Roustant Date: Mon, 27 May 2024 09:05:06 +0200 Subject: [PATCH 03/10] Fix FloatArrayList.resize --- .../src/java/org/apache/lucene/util/hppc/FloatArrayList.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java index 6b05a8c052b7..70c3651acbb1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java @@ -283,9 +283,9 @@ protected void ensureBufferSpace(int expectedAdditions) { public void resize(int newSize) { if (newSize <= buffer.length) { if (newSize < elementsCount) { - Arrays.fill(buffer, newSize, elementsCount, 0L); + Arrays.fill(buffer, newSize, elementsCount, 0f); } else { - Arrays.fill(buffer, elementsCount, newSize, 0L); + Arrays.fill(buffer, elementsCount, newSize, 0f); } } else { ensureCapacity(newSize); From 63bb1c003103c2fc293728f2abb4082a6f74cf78 Mon Sep 17 00:00:00 2001 From: Bruno Roustant Date: Mon, 27 May 2024 09:24:06 +0200 Subject: [PATCH 04/10] Adjust the empty key constant as in hppc upstream. --- .../src/java/org/apache/lucene/util/hppc/FloatArrayList.java | 2 +- .../src/java/org/apache/lucene/util/hppc/LongArrayList.java | 2 +- .../src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java | 2 +- .../src/java/org/apache/lucene/util/hppc/LongIntHashMap.java | 2 +- .../src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java index 70c3651acbb1..762ef608dc06 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java @@ -310,7 +310,7 @@ public void trimToSize() { * #elementsCount} field to zero. */ public void clear() { - Arrays.fill(buffer, 0, elementsCount, 0); + Arrays.fill(buffer, 0, elementsCount, 0f); this.elementsCount = 0; } diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java index ff5f93918c1b..834236b41e7f 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java @@ -311,7 +311,7 @@ public void trimToSize() { * #elementsCount} field to zero. */ public void clear() { - Arrays.fill(buffer, 0, elementsCount, 0); + Arrays.fill(buffer, 0, elementsCount, 0L); this.elementsCount = 0; } diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java index cc9e2923b9d1..dbe3c0e5d1dd 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java @@ -383,7 +383,7 @@ public void clear() { assigned = 0; hasEmptyKey = false; - Arrays.fill(keys, 0); + Arrays.fill(keys, 0L); /* */ } diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java index 5993a57be889..a853d8c969c6 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java @@ -373,7 +373,7 @@ public void clear() { assigned = 0; hasEmptyKey = false; - Arrays.fill(keys, 0); + Arrays.fill(keys, 0L); /* */ } diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java index cf7804e2aef3..64e0db507c11 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java @@ -340,7 +340,7 @@ public void clear() { assigned = 0; hasEmptyKey = false; - Arrays.fill(keys, 0); + Arrays.fill(keys, 0L); /* */ } From 7e424879806799fba4c5458e8b46a5788641e853 Mon Sep 17 00:00:00 2001 From: Bruno Roustant Date: Mon, 27 May 2024 09:32:41 +0200 Subject: [PATCH 05/10] Add @lucene.internal --- .../java/org/apache/lucene/util/hppc/AbstractIterator.java | 2 ++ .../core/src/java/org/apache/lucene/util/hppc/BitMixer.java | 2 ++ .../apache/lucene/util/hppc/BufferAllocationException.java | 6 +++++- .../src/java/org/apache/lucene/util/hppc/CharCursor.java | 6 +++++- .../src/java/org/apache/lucene/util/hppc/CharHashSet.java | 4 +++- .../java/org/apache/lucene/util/hppc/CharObjectHashMap.java | 4 +++- .../src/java/org/apache/lucene/util/hppc/DoubleCursor.java | 6 +++++- .../java/org/apache/lucene/util/hppc/FloatArrayList.java | 2 ++ .../src/java/org/apache/lucene/util/hppc/FloatCursor.java | 6 +++++- .../java/org/apache/lucene/util/hppc/HashContainers.java | 6 +++++- .../src/java/org/apache/lucene/util/hppc/IntArrayList.java | 2 ++ .../src/java/org/apache/lucene/util/hppc/IntCursor.java | 6 +++++- .../java/org/apache/lucene/util/hppc/IntDoubleHashMap.java | 2 ++ .../java/org/apache/lucene/util/hppc/IntFloatHashMap.java | 2 ++ .../src/java/org/apache/lucene/util/hppc/IntHashSet.java | 2 ++ .../src/java/org/apache/lucene/util/hppc/IntIntHashMap.java | 2 ++ .../java/org/apache/lucene/util/hppc/IntObjectHashMap.java | 2 ++ .../src/java/org/apache/lucene/util/hppc/LongArrayList.java | 2 ++ .../src/java/org/apache/lucene/util/hppc/LongCursor.java | 6 +++++- .../java/org/apache/lucene/util/hppc/LongFloatHashMap.java | 2 ++ .../src/java/org/apache/lucene/util/hppc/LongHashSet.java | 2 ++ .../java/org/apache/lucene/util/hppc/LongIntHashMap.java | 2 ++ .../java/org/apache/lucene/util/hppc/LongObjectHashMap.java | 2 ++ .../src/java/org/apache/lucene/util/hppc/ObjectCursor.java | 6 +++++- 24 files changed, 74 insertions(+), 10 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/AbstractIterator.java b/lucene/core/src/java/org/apache/lucene/util/hppc/AbstractIterator.java index 147274ccfe68..e49a2108ac95 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/AbstractIterator.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/AbstractIterator.java @@ -24,6 +24,8 @@ * Simplifies the implementation of iterators a bit. Modeled loosely after Google Guava's API. * *

Forked from com.carrotsearch.hppc.AbstractIterator + * + * @lucene.internal */ public abstract class AbstractIterator implements Iterator { private static final int NOT_CACHED = 0; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java b/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java index cc9cf3e46289..51b29396a0f5 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java @@ -24,6 +24,8 @@ *

Forked from com.carrotsearch.hppc.BitMixer * *

github: https://github.com/carrotsearch/hppc release: 0.10.0 + * + * @lucene.internal */ public final class BitMixer { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/BufferAllocationException.java b/lucene/core/src/java/org/apache/lucene/util/hppc/BufferAllocationException.java index 2c20772b147e..bc00b32cd41b 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/BufferAllocationException.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/BufferAllocationException.java @@ -20,7 +20,11 @@ import java.util.IllegalFormatException; import java.util.Locale; -/** BufferAllocationException forked from HPPC */ +/** + * BufferAllocationException forked from HPPC. + * + * @lucene.internal + */ public class BufferAllocationException extends RuntimeException { public BufferAllocationException(String message) { super(message); diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/CharCursor.java b/lucene/core/src/java/org/apache/lucene/util/hppc/CharCursor.java index 6f18a87b0a1e..ce57dbfa8eea 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/CharCursor.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/CharCursor.java @@ -17,7 +17,11 @@ package org.apache.lucene.util.hppc; -/** Forked from HPPC, holding int index and char value */ +/** + * Forked from HPPC, holding int index and char value. + * + * @lucene.internal + */ public final class CharCursor { /** * The current value's index in the container this cursor belongs to. The meaning of this index is diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/CharHashSet.java b/lucene/core/src/java/org/apache/lucene/util/hppc/CharHashSet.java index c35f87afd8a5..5ac04527123d 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/CharHashSet.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/CharHashSet.java @@ -39,7 +39,9 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.CharHashSet * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class CharHashSet implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/CharObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/CharObjectHashMap.java index 6db9e4affb15..973f799f2cdb 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/CharObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/CharObjectHashMap.java @@ -40,7 +40,9 @@ * *

Mostly forked and trimmed from com.carrotsearch.hppc.CharObjectHashMap * - *

github: https://github.com/carrotsearch/hppc release 0.9.0 + *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ @SuppressWarnings("unchecked") public class CharObjectHashMap diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java b/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java index 20771b9e4df7..cadadac1e5e0 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java @@ -17,7 +17,11 @@ package org.apache.lucene.util.hppc; -/** Forked from HPPC, holding int index and double value */ +/** + * Forked from HPPC, holding int index and double value. + * + * @lucene.internal + */ public final class DoubleCursor { /** * The current value's index in the container this cursor belongs to. The meaning of this index is diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java index 762ef608dc06..e0e17b7b8238 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java @@ -31,6 +31,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.FloatArrayList * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class FloatArrayList implements Iterable, Cloneable, Accountable { private static final long BASE_RAM_BYTES_USED = diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java index 0fdd7889b789..b270f3ba3bf1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java @@ -17,7 +17,11 @@ package org.apache.lucene.util.hppc; -/** Forked from HPPC, holding int index and float value */ +/** + * Forked from HPPC, holding int index and float value. + * + * @lucene.internal + */ public final class FloatCursor { /** * The current value's index in the container this cursor belongs to. The meaning of this index is diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/HashContainers.java b/lucene/core/src/java/org/apache/lucene/util/hppc/HashContainers.java index 7859c457eb9e..ec8d44b40ffa 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/HashContainers.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/HashContainers.java @@ -21,7 +21,11 @@ import java.util.concurrent.atomic.AtomicInteger; -/** Constants for primitive maps. */ +/** + * Constants for primitive maps. + * + * @lucene.internal + */ class HashContainers { static final int DEFAULT_EXPECTED_ELEMENTS = 4; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java index d4d2da87a71d..286ebc0d4472 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java @@ -32,6 +32,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.IntArrayList * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class IntArrayList implements Iterable, Cloneable, Accountable { private static final long BASE_RAM_BYTES_USED = diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntCursor.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntCursor.java index 72a83e0d416f..cc3b0b63399b 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntCursor.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntCursor.java @@ -17,7 +17,11 @@ package org.apache.lucene.util.hppc; -/** Forked from HPPC, holding int index and int value */ +/** + * Forked from HPPC, holding int index and int value. + * + * @lucene.internal + */ public final class IntCursor { /** * The current value's index in the container this cursor belongs to. The meaning of this index is diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java index 65febb8e1355..242336495611 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java @@ -41,6 +41,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.IntDoubleHashMap * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class IntDoubleHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java index d0991718e891..536fd0b1d5ce 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java @@ -41,6 +41,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.IntFloatHashMap * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class IntFloatHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java index 4d09641fc370..e67c299dfaf3 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java @@ -32,6 +32,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.IntHashSet * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class IntHashSet implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java index f75be4168470..4e69ce912627 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java @@ -31,6 +31,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.IntIntHashMap * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class IntIntHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java index d5d233fc06dd..79bae37532a1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java @@ -31,6 +31,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.IntObjectHashMap * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ @SuppressWarnings("unchecked") public class IntObjectHashMap diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java index 834236b41e7f..b971cb36f1c5 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java @@ -32,6 +32,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.LongArrayList * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class LongArrayList implements Iterable, Cloneable, Accountable { private static final long BASE_RAM_BYTES_USED = diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongCursor.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongCursor.java index 2a424254306d..b7b37f7160c1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongCursor.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongCursor.java @@ -17,7 +17,11 @@ package org.apache.lucene.util.hppc; -/** Forked from HPPC, holding int index and long value */ +/** + * Forked from HPPC, holding int index and long value. + * + * @lucene.internal + */ public final class LongCursor { /** * The current value's index in the container this cursor belongs to. The meaning of this index is diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java index dbe3c0e5d1dd..9c826272c5fb 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java @@ -41,6 +41,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.LongFloatHashMap * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class LongFloatHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java index 8c1864d8b63a..0bbe9e9d98f6 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java @@ -31,6 +31,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.LongHashSet * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class LongHashSet implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java index a853d8c969c6..a5f4120d6fc8 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java @@ -31,6 +31,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.LongIntHashMap * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ public class LongIntHashMap implements Iterable, Accountable, Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java index 64e0db507c11..3a29e363abf7 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java @@ -31,6 +31,8 @@ *

Mostly forked and trimmed from com.carrotsearch.hppc.LongObjectHashMap * *

github: https://github.com/carrotsearch/hppc release 0.10.0 + * + * @lucene.internal */ @SuppressWarnings("unchecked") public class LongObjectHashMap diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/ObjectCursor.java b/lucene/core/src/java/org/apache/lucene/util/hppc/ObjectCursor.java index 45d0cb56b629..fa033b276e56 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/ObjectCursor.java +++ b/lucene/core/src/java/org/apache/lucene/util/hppc/ObjectCursor.java @@ -17,7 +17,11 @@ package org.apache.lucene.util.hppc; -/** Forked from HPPC, holding int index and Object value */ +/** + * Forked from HPPC, holding int index and Object value. + * + * @lucene.internal + */ public final class ObjectCursor { /** * The current value's index in the container this cursor belongs to. The meaning of this index is From 317c28656cafb0d5545eefe6b4ef4554c9e3d230 Mon Sep 17 00:00:00 2001 From: Dawid Weiss Date: Mon, 27 May 2024 10:16:22 +0200 Subject: [PATCH 06/10] Adding module exports to other Lucene modules. --- lucene/core/src/java/module-info.java | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/lucene/core/src/java/module-info.java b/lucene/core/src/java/module-info.java index 94ff818c499d..34a2b457fbb7 100644 --- a/lucene/core/src/java/module-info.java +++ b/lucene/core/src/java/module-info.java @@ -50,7 +50,6 @@ exports org.apache.lucene.util.fst; exports org.apache.lucene.util.graph; exports org.apache.lucene.util.hnsw; - exports org.apache.lucene.util.hppc; exports org.apache.lucene.util.mutable; exports org.apache.lucene.util.packed; @@ -58,6 +57,25 @@ exports org.apache.lucene.internal.tests to org.apache.lucene.test_framework; + // Export internally repackaged HPPC classes to other Lucene components. + exports org.apache.lucene.util.hppc to + org.apache.lucene.analysis.common, + org.apache.lucene.analysis.icu, + org.apache.lucene.analysis.kuromoji, + org.apache.lucene.analysis.smartcn, + org.apache.lucene.analysis.stempel, + org.apache.lucene.benchmark, + org.apache.lucene.facet, + org.apache.lucene.grouping, + org.apache.lucene.highlighter, + org.apache.lucene.join, + org.apache.lucene.misc, + org.apache.lucene.queryparser, + org.apache.lucene.sandbox, + org.apache.lucene.spatial3d, + org.apache.lucene.spatial_extras, + org.apache.lucene.suggest; + // Open certain packages for the test framework (ram usage tester). opens org.apache.lucene.document to org.apache.lucene.test_framework; From c94b64310c6e712262007f00f8f1354f428d2e4a Mon Sep 17 00:00:00 2001 From: Dawid Weiss Date: Mon, 27 May 2024 10:16:26 +0200 Subject: [PATCH 07/10] Revert "Adding module exports to other Lucene modules." (tests won't run with the current infrastructure). This reverts commit 317c28656cafb0d5545eefe6b4ef4554c9e3d230. --- lucene/core/src/java/module-info.java | 20 +------------------- 1 file changed, 1 insertion(+), 19 deletions(-) diff --git a/lucene/core/src/java/module-info.java b/lucene/core/src/java/module-info.java index 34a2b457fbb7..94ff818c499d 100644 --- a/lucene/core/src/java/module-info.java +++ b/lucene/core/src/java/module-info.java @@ -50,6 +50,7 @@ exports org.apache.lucene.util.fst; exports org.apache.lucene.util.graph; exports org.apache.lucene.util.hnsw; + exports org.apache.lucene.util.hppc; exports org.apache.lucene.util.mutable; exports org.apache.lucene.util.packed; @@ -57,25 +58,6 @@ exports org.apache.lucene.internal.tests to org.apache.lucene.test_framework; - // Export internally repackaged HPPC classes to other Lucene components. - exports org.apache.lucene.util.hppc to - org.apache.lucene.analysis.common, - org.apache.lucene.analysis.icu, - org.apache.lucene.analysis.kuromoji, - org.apache.lucene.analysis.smartcn, - org.apache.lucene.analysis.stempel, - org.apache.lucene.benchmark, - org.apache.lucene.facet, - org.apache.lucene.grouping, - org.apache.lucene.highlighter, - org.apache.lucene.join, - org.apache.lucene.misc, - org.apache.lucene.queryparser, - org.apache.lucene.sandbox, - org.apache.lucene.spatial3d, - org.apache.lucene.spatial_extras, - org.apache.lucene.suggest; - // Open certain packages for the test framework (ram usage tester). opens org.apache.lucene.document to org.apache.lucene.test_framework; From ad4699851116629b79c635c47b4b3686dde7dee5 Mon Sep 17 00:00:00 2001 From: Dawid Weiss Date: Mon, 27 May 2024 10:43:47 +0200 Subject: [PATCH 08/10] Move hppc classes to oal.internal.hppc but export it. --- .../charfilter/MappingCharFilter.java | 2 +- .../analysis/charfilter/NormalizeCharMap.java | 2 +- .../analysis/core/FlattenGraphFilter.java | 4 +- .../lucene/analysis/hunspell/Dictionary.java | 6 +-- .../hunspell/SuggestibleEntryCache.java | 2 +- .../analysis/hunspell/WordFormGenerator.java | 4 +- .../lucene/analysis/hunspell/WordStorage.java | 2 +- .../lucene/analysis/morph/ViterbiNBest.java | 6 +-- .../path/ReversePathHierarchyTokenizer.java | 2 +- .../lucene/analysis/synonym/SynonymMap.java | 4 +- .../icu/segmentation/ICUTokenizerFactory.java | 2 +- .../analysis/ja/JapaneseFilterUtil.java | 2 +- .../ja/JapaneseHiraganaUppercaseFilter.java | 2 +- .../ja/JapaneseKatakanaUppercaseFilter.java | 2 +- .../analysis/cn/smart/hhmm/BiSegGraph.java | 6 +-- .../analysis/cn/smart/hhmm/SegGraph.java | 2 +- .../src/java/org/egothor/stemmer/Gener.java | 2 +- .../src/java/org/egothor/stemmer/Lift.java | 2 +- .../java/org/egothor/stemmer/Optimizer.java | 2 +- .../src/java/org/egothor/stemmer/Reduce.java | 4 +- .../src/java/org/egothor/stemmer/Row.java | 6 +-- .../byTask/feeds/SpatialDocMaker.java | 2 +- .../lucene/benchmark/byTask/utils/Config.java | 2 +- lucene/core/src/java/module-info.java | 19 +++++---- .../analysis/AutomatonToTokenStream.java | 6 +-- .../lucene90/Lucene90DocValuesConsumer.java | 4 +- .../lucene90/Lucene90NormsProducer.java | 2 +- .../codecs/lucene90/Lucene90PointsReader.java | 2 +- .../Lucene90BlockTreeTermsReader.java | 4 +- .../Lucene90CompressingTermVectorsWriter.java | 2 +- .../Lucene99ScalarQuantizedVectorsWriter.java | 2 +- .../apache/lucene/document/LatLonPoint.java | 2 +- .../lucene/document/NearestNeighbor.java | 2 +- .../lucene/index/BufferedUpdatesStream.java | 2 +- .../org/apache/lucene/index/FieldInfos.java | 2 +- .../org/apache/lucene/index/IndexWriter.java | 4 +- .../apache/lucene/index/SegmentDocValues.java | 6 +-- .../index/SegmentDocValuesProducer.java | 2 +- .../apache/lucene/index/SegmentReader.java | 2 +- .../hppc/AbstractIterator.java | 2 +- .../{util => internal}/hppc/BitMixer.java | 2 +- .../hppc/BufferAllocationException.java | 2 +- .../{util => internal}/hppc/CharCursor.java | 2 +- .../{util => internal}/hppc/CharHashSet.java | 36 ++++++---------- .../hppc/CharObjectHashMap.java | 26 ++++++------ .../{util => internal}/hppc/DoubleCursor.java | 2 +- .../hppc/FloatArrayList.java | 4 +- .../{util => internal}/hppc/FloatCursor.java | 2 +- .../hppc/HashContainers.java | 2 +- .../{util => internal}/hppc/IntArrayList.java | 4 +- .../{util => internal}/hppc/IntCursor.java | 2 +- .../hppc/IntDoubleHashMap.java | 26 ++++++------ .../hppc/IntFloatHashMap.java | 42 +++++++------------ .../{util => internal}/hppc/IntHashSet.java | 4 +- .../hppc/IntIntHashMap.java | 4 +- .../hppc/IntObjectHashMap.java | 4 +- .../hppc/LongArrayList.java | 4 +- .../{util => internal}/hppc/LongCursor.java | 2 +- .../hppc/LongFloatHashMap.java | 26 ++++++------ .../{util => internal}/hppc/LongHashSet.java | 4 +- .../hppc/LongIntHashMap.java | 32 +++++++------- .../hppc/LongObjectHashMap.java | 4 +- .../{util => internal}/hppc/ObjectCursor.java | 2 +- .../{util => internal}/hppc/package-info.java | 7 +++- .../lucene/search/MultiPhraseQuery.java | 2 +- .../org/apache/lucene/search/PhraseQuery.java | 2 +- .../lucene/search/SloppyPhraseMatcher.java | 2 +- .../lucene/util/automaton/Automaton.java | 2 +- .../util/automaton/CompiledAutomaton.java | 2 +- .../util/automaton/LevenshteinAutomata.java | 2 +- .../util/automaton/NFARunAutomaton.java | 2 +- .../lucene/util/automaton/Operations.java | 8 ++-- .../lucene/util/automaton/StateSet.java | 6 +-- .../lucene/util/automaton/UTF32ToUTF8.java | 2 +- .../org/apache/lucene/util/bkd/BKDWriter.java | 2 +- .../java/org/apache/lucene/util/fst/Util.java | 4 +- .../graph/GraphTokenStreamFiniteStrings.java | 2 +- .../apache/lucene/util/hnsw/HnswGraph.java | 4 +- .../util/hnsw/IncrementalHnswGraphMerger.java | 2 +- .../lucene/util/hnsw/OnHeapHnswGraph.java | 2 +- .../hppc/TestCharHashSet.java | 2 +- .../hppc/TestCharObjectHashMap.java | 2 +- .../hppc/TestFloatArrayList.java | 2 +- .../hppc/TestIntArrayList.java | 2 +- .../hppc/TestIntDoubleHashMap.java | 2 +- .../hppc/TestIntFloatHashMap.java | 2 +- .../hppc/TestIntHashSet.java | 2 +- .../hppc/TestIntIntHashMap.java | 2 +- .../hppc/TestIntObjectHashMap.java | 2 +- .../hppc/TestLongArrayList.java | 2 +- .../hppc/TestLongFloatHashMap.java | 2 +- .../hppc/TestLongHashSet.java | 2 +- .../hppc/TestLongIntHashMap.java | 2 +- .../hppc/TestLongObjectHashMap.java | 2 +- .../lucene/distribution/TestModularLayer.java | 5 ++- .../apache/lucene/facet/DrillDownQuery.java | 4 +- .../lucene/facet/LongValueFacetCounts.java | 2 +- .../lucene/facet/StringValueFacetCounts.java | 2 +- .../range/OverlappingLongRangeCounter.java | 8 ++-- .../facet/taxonomy/FloatTaxonomyFacets.java | 2 +- .../facet/taxonomy/IntTaxonomyFacets.java | 2 +- .../taxonomy/OrdinalMappingLeafReader.java | 2 +- .../lucene/facet/taxonomy/TaxonomyFacets.java | 4 +- .../directory/DirectoryTaxonomyReader.java | 4 +- .../directory/TaxonomyIndexArrays.java | 2 +- .../search/grouping/TermGroupSelector.java | 2 +- .../SimpleBoundaryScanner.java | 2 +- ...versifyingNearestChildrenKnnCollector.java | 2 +- .../apache/lucene/search/join/JoinUtil.java | 10 ++--- .../join/PointInSetIncludingScoreQuery.java | 4 +- .../lucene/misc/document/LazyDocument.java | 2 +- .../search/DiversifiedTopDocsCollector.java | 2 +- .../checksums/javaccParserClassic.json | 4 +- .../queryparser/classic/QueryParser.java | 2 +- .../lucene/queryparser/classic/QueryParser.jj | 2 +- .../document/FloatPointNearestNeighbor.java | 2 +- .../sandbox/search/QueryProfilerTree.java | 4 +- .../sandbox/search/TermAutomatonQuery.java | 2 +- .../util/CachingDoubleValueSource.java | 2 +- .../spatial3d/geom/GeoStandardPath.java | 2 +- .../spatial3d/geom/StandardObjects.java | 2 +- .../search/suggest/document/ContextQuery.java | 2 +- 122 files changed, 264 insertions(+), 275 deletions(-) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/AbstractIterator.java (98%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/BitMixer.java (98%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/BufferAllocationException.java (97%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/CharCursor.java (96%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/CharHashSet.java (93%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/CharObjectHashMap.java (96%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/DoubleCursor.java (96%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/FloatArrayList.java (99%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/FloatCursor.java (96%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/HashContainers.java (98%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/IntArrayList.java (99%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/IntCursor.java (96%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/IntDoubleHashMap.java (96%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/IntFloatHashMap.java (93%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/IntHashSet.java (99%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/IntIntHashMap.java (99%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/IntObjectHashMap.java (99%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/LongArrayList.java (99%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/LongCursor.java (96%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/LongFloatHashMap.java (96%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/LongHashSet.java (99%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/LongIntHashMap.java (95%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/LongObjectHashMap.java (99%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/ObjectCursor.java (97%) rename lucene/core/src/java/org/apache/lucene/{util => internal}/hppc/package-info.java (82%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestCharHashSet.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestCharObjectHashMap.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestFloatArrayList.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestIntArrayList.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestIntDoubleHashMap.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestIntFloatHashMap.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestIntHashSet.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestIntIntHashMap.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestIntObjectHashMap.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestLongArrayList.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestLongFloatHashMap.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestLongHashSet.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestLongIntHashMap.java (99%) rename lucene/core/src/test/org/apache/lucene/{util => internal}/hppc/TestLongObjectHashMap.java (99%) diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/MappingCharFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/MappingCharFilter.java index 0d56ce960fd9..5c87464245c7 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/MappingCharFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/MappingCharFilter.java @@ -20,11 +20,11 @@ import java.io.Reader; import org.apache.lucene.analysis.CharFilter; // javadocs import org.apache.lucene.analysis.util.RollingCharBuffer; +import org.apache.lucene.internal.hppc.CharObjectHashMap; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.fst.CharSequenceOutputs; import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.Outputs; -import org.apache.lucene.util.hppc.CharObjectHashMap; /** * Simplistic {@link CharFilter} that applies the mappings contained in a {@link NormalizeCharMap} diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/NormalizeCharMap.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/NormalizeCharMap.java index ef0b0141ac88..a6095a09157b 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/NormalizeCharMap.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/charfilter/NormalizeCharMap.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.Map; import java.util.TreeMap; +import org.apache.lucene.internal.hppc.CharObjectHashMap; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.fst.CharSequenceOutputs; @@ -26,7 +27,6 @@ import org.apache.lucene.util.fst.FSTCompiler; import org.apache.lucene.util.fst.Outputs; import org.apache.lucene.util.fst.Util; -import org.apache.lucene.util.hppc.CharObjectHashMap; // TODO: save/load? diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/FlattenGraphFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/FlattenGraphFilter.java index e4c83db24e32..5c0f5e755fe5 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/FlattenGraphFilter.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/core/FlattenGraphFilter.java @@ -26,10 +26,10 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.RollingBuffer; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; /** * Converts an incoming graph token stream, such as one from {@link SynonymGraphFilter}, into a flat diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/Dictionary.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/Dictionary.java index d8aac0c50665..525e39dc389c 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/Dictionary.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/Dictionary.java @@ -50,6 +50,9 @@ import java.util.stream.Collectors; import org.apache.lucene.analysis.hunspell.SortingStrategy.EntryAccumulator; import org.apache.lucene.analysis.hunspell.SortingStrategy.EntrySupplier; +import org.apache.lucene.internal.hppc.CharHashSet; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; import org.apache.lucene.store.Directory; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.IntsRef; @@ -58,9 +61,6 @@ import org.apache.lucene.util.fst.FSTCompiler; import org.apache.lucene.util.fst.IntSequenceOutputs; import org.apache.lucene.util.fst.Util; -import org.apache.lucene.util.hppc.CharHashSet; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; /** In-memory structure for the dictionary (.dic) and affix (.aff) data of a hunspell dictionary. */ public class Dictionary { diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/SuggestibleEntryCache.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/SuggestibleEntryCache.java index d11adb7c5d99..23a594d91f09 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/SuggestibleEntryCache.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/SuggestibleEntryCache.java @@ -17,10 +17,10 @@ package org.apache.lucene.analysis.hunspell; import java.util.function.Consumer; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.IntsRef; -import org.apache.lucene.util.hppc.IntObjectHashMap; /** * A cache allowing for CPU-cache-friendlier iteration over {@link WordStorage} entries that can be diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/WordFormGenerator.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/WordFormGenerator.java index c2e9d05ec0df..bfcccf0640af 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/WordFormGenerator.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/WordFormGenerator.java @@ -39,11 +39,11 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.lucene.analysis.hunspell.AffixedWord.Affix; +import org.apache.lucene.internal.hppc.CharHashSet; +import org.apache.lucene.internal.hppc.CharObjectHashMap; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.IntsRefFSTEnum; -import org.apache.lucene.util.hppc.CharHashSet; -import org.apache.lucene.util.hppc.CharObjectHashMap; /** * A utility class used for generating possible word forms by adding affixes to stems ({@link diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/WordStorage.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/WordStorage.java index 21d4690b1fe5..b107b5d7c254 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/WordStorage.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/hunspell/WordStorage.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.DataOutput; @@ -28,7 +29,6 @@ import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.fst.IntSequenceOutputs; -import org.apache.lucene.util.hppc.IntArrayList; /** * A data structure for memory-efficient word storage and fast lookup/enumeration. Each dictionary diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/morph/ViterbiNBest.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/morph/ViterbiNBest.java index a9d95b86fe28..171d128932ab 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/morph/ViterbiNBest.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/morph/ViterbiNBest.java @@ -20,11 +20,11 @@ import java.util.Arrays; import java.util.Collections; import java.util.EnumMap; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntIntHashMap; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.fst.FST; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; -import org.apache.lucene.util.hppc.IntIntHashMap; /** {@link Viterbi} subclass for n-best path calculation. */ public abstract class ViterbiNBest diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/path/ReversePathHierarchyTokenizer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/path/ReversePathHierarchyTokenizer.java index e8eb01f86f28..52388ba35ef5 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/path/ReversePathHierarchyTokenizer.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/path/ReversePathHierarchyTokenizer.java @@ -21,9 +21,9 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.util.AttributeFactory; import org.apache.lucene.util.IgnoreRandomChains; -import org.apache.lucene.util.hppc.IntArrayList; /** * Tokenizer for domain-like hierarchies. diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymMap.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymMap.java index 7c23cfd2378c..29b40cde07ca 100644 --- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymMap.java +++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymMap.java @@ -26,6 +26,8 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; @@ -37,8 +39,6 @@ import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FSTCompiler; import org.apache.lucene.util.fst.Util; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntHashSet; /** * A map of synonyms, keys and values are phrases. diff --git a/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/segmentation/ICUTokenizerFactory.java b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/segmentation/ICUTokenizerFactory.java index 0383a53a2fd3..0545db0ba19d 100644 --- a/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/segmentation/ICUTokenizerFactory.java +++ b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/segmentation/ICUTokenizerFactory.java @@ -27,11 +27,11 @@ import java.util.List; import java.util.Map; import org.apache.lucene.analysis.TokenizerFactory; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.util.AttributeFactory; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.ResourceLoader; import org.apache.lucene.util.ResourceLoaderAware; -import org.apache.lucene.util.hppc.IntObjectHashMap; /** * Factory for {@link ICUTokenizer}. Words are broken across script boundaries, then segmented diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseFilterUtil.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseFilterUtil.java index 36410b3ea319..73666d3e04ee 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseFilterUtil.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseFilterUtil.java @@ -17,7 +17,7 @@ package org.apache.lucene.analysis.ja; import java.util.Map; -import org.apache.lucene.util.hppc.CharObjectHashMap; +import org.apache.lucene.internal.hppc.CharObjectHashMap; /** Utility methods for Japanese filters. */ class JapaneseFilterUtil { diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseHiraganaUppercaseFilter.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseHiraganaUppercaseFilter.java index e7c0969aed5e..88a01aef56d4 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseHiraganaUppercaseFilter.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseHiraganaUppercaseFilter.java @@ -23,7 +23,7 @@ import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.util.hppc.CharObjectHashMap; +import org.apache.lucene.internal.hppc.CharObjectHashMap; /** * A {@link TokenFilter} that normalizes small letters (捨て仮名) in hiragana into normal letters. For diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseKatakanaUppercaseFilter.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseKatakanaUppercaseFilter.java index 7a96acd4c0ca..9fd0a5f74fcf 100644 --- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseKatakanaUppercaseFilter.java +++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/JapaneseKatakanaUppercaseFilter.java @@ -23,7 +23,7 @@ import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.util.hppc.CharObjectHashMap; +import org.apache.lucene.internal.hppc.CharObjectHashMap; /** * A {@link TokenFilter} that normalizes small letters (捨て仮名) in katakana into normal letters. For diff --git a/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/BiSegGraph.java b/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/BiSegGraph.java index 92cff58002a7..ffd1b04a6e51 100644 --- a/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/BiSegGraph.java +++ b/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/BiSegGraph.java @@ -19,9 +19,9 @@ import java.util.ArrayList; import java.util.List; import org.apache.lucene.analysis.cn.smart.Utility; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntObjectHashMap; -import org.apache.lucene.util.hppc.ObjectCursor; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntObjectHashMap; +import org.apache.lucene.internal.hppc.ObjectCursor; /** * Graph representing possible token pairs (bigrams) at each start offset in the sentence. diff --git a/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/SegGraph.java b/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/SegGraph.java index bf89a63654f1..1a07fd998a20 100644 --- a/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/SegGraph.java +++ b/lucene/analysis/smartcn/src/java/org/apache/lucene/analysis/cn/smart/hhmm/SegGraph.java @@ -18,7 +18,7 @@ import java.util.ArrayList; import java.util.List; -import org.apache.lucene.util.hppc.IntObjectHashMap; +import org.apache.lucene.internal.hppc.IntObjectHashMap; /** * Graph representing possible tokens at each start offset in the sentence. diff --git a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Gener.java b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Gener.java index b66f6f796565..ecd1f4e197e8 100644 --- a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Gener.java +++ b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Gener.java @@ -58,7 +58,7 @@ created by Leo Galambos (Leo.G@seznam.cz). import java.util.Arrays; import java.util.Iterator; import java.util.List; -import org.apache.lucene.util.hppc.ObjectCursor; +import org.apache.lucene.internal.hppc.ObjectCursor; /** * The Gener object helps in the discarding of nodes which break the reduction effort and defend the diff --git a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Lift.java b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Lift.java index 62b2cea821ec..0536c3e16100 100644 --- a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Lift.java +++ b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Lift.java @@ -58,7 +58,7 @@ created by Leo Galambos (Leo.G@seznam.cz). import java.util.Arrays; import java.util.Iterator; import java.util.List; -import org.apache.lucene.util.hppc.ObjectCursor; +import org.apache.lucene.internal.hppc.ObjectCursor; /** * The Lift class is a data structure that is a variation of a Patricia trie. diff --git a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Optimizer.java b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Optimizer.java index 778e863b01cb..bb2321759838 100644 --- a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Optimizer.java +++ b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Optimizer.java @@ -58,7 +58,7 @@ created by Leo Galambos (Leo.G@seznam.cz). import java.util.Arrays; import java.util.Iterator; import java.util.List; -import org.apache.lucene.util.hppc.CharCursor; +import org.apache.lucene.internal.hppc.CharCursor; /** * The Optimizer class is a Trie that will be reduced (have empty rows removed). diff --git a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Reduce.java b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Reduce.java index ae89e1b81ab5..d8a3692e2c8e 100644 --- a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Reduce.java +++ b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Reduce.java @@ -58,8 +58,8 @@ created by Leo Galambos (Leo.G@seznam.cz). import java.util.Arrays; import java.util.Iterator; import java.util.List; -import org.apache.lucene.util.hppc.CharCursor; -import org.apache.lucene.util.hppc.ObjectCursor; +import org.apache.lucene.internal.hppc.CharCursor; +import org.apache.lucene.internal.hppc.ObjectCursor; /** The Reduce object is used to remove gaps in a Trie which stores a dictionary. */ public class Reduce { diff --git a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Row.java b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Row.java index 3c8db8fdbb89..275991a50948 100644 --- a/lucene/analysis/stempel/src/java/org/egothor/stemmer/Row.java +++ b/lucene/analysis/stempel/src/java/org/egothor/stemmer/Row.java @@ -59,9 +59,9 @@ created by Leo Galambos (Leo.G@seznam.cz). import java.io.IOException; import java.io.PrintStream; import java.util.Iterator; -import org.apache.lucene.util.hppc.CharCursor; -import org.apache.lucene.util.hppc.CharObjectHashMap; -import org.apache.lucene.util.hppc.ObjectCursor; +import org.apache.lucene.internal.hppc.CharCursor; +import org.apache.lucene.internal.hppc.CharObjectHashMap; +import org.apache.lucene.internal.hppc.ObjectCursor; /** The Row class represents a row in a matrix representation of a trie. */ public class Row { diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialDocMaker.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialDocMaker.java index bfac6603ab9e..968f83b56e82 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialDocMaker.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/SpatialDocMaker.java @@ -23,6 +23,7 @@ import org.apache.lucene.benchmark.byTask.utils.Config; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.spatial.SpatialStrategy; import org.apache.lucene.spatial.composite.CompositeSpatialStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; @@ -30,7 +31,6 @@ import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTreeFactory; import org.apache.lucene.spatial.serialized.SerializedDVStrategy; -import org.apache.lucene.util.hppc.IntObjectHashMap; import org.locationtech.spatial4j.context.SpatialContext; import org.locationtech.spatial4j.context.SpatialContextFactory; import org.locationtech.spatial4j.shape.Point; diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/utils/Config.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/utils/Config.java index 36de0c5ffcac..25a688dbeee0 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/utils/Config.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/utils/Config.java @@ -27,7 +27,7 @@ import java.util.Map; import java.util.Properties; import java.util.StringTokenizer; -import org.apache.lucene.util.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntArrayList; /** * Perf run configuration properties. diff --git a/lucene/core/src/java/module-info.java b/lucene/core/src/java/module-info.java index 94ff818c499d..f093415d5791 100644 --- a/lucene/core/src/java/module-info.java +++ b/lucene/core/src/java/module-info.java @@ -23,36 +23,39 @@ requires java.logging; requires static jdk.management; // this is optional but explicit declaration is recommended - exports org.apache.lucene.analysis; exports org.apache.lucene.analysis.standard; exports org.apache.lucene.analysis.tokenattributes; - exports org.apache.lucene.codecs; + exports org.apache.lucene.analysis; exports org.apache.lucene.codecs.compressing; + exports org.apache.lucene.codecs.lucene90.blocktree; + exports org.apache.lucene.codecs.lucene90.compressing; exports org.apache.lucene.codecs.lucene90; exports org.apache.lucene.codecs.lucene94; exports org.apache.lucene.codecs.lucene95; exports org.apache.lucene.codecs.lucene99; - exports org.apache.lucene.codecs.lucene90.blocktree; - exports org.apache.lucene.codecs.lucene90.compressing; exports org.apache.lucene.codecs.perfield; + exports org.apache.lucene.codecs; exports org.apache.lucene.document; exports org.apache.lucene.geo; exports org.apache.lucene.index; - exports org.apache.lucene.search; exports org.apache.lucene.search.comparators; - exports org.apache.lucene.search.similarities; exports org.apache.lucene.search.knn; + exports org.apache.lucene.search.similarities; + exports org.apache.lucene.search; exports org.apache.lucene.store; - exports org.apache.lucene.util; exports org.apache.lucene.util.automaton; exports org.apache.lucene.util.bkd; exports org.apache.lucene.util.compress; exports org.apache.lucene.util.fst; exports org.apache.lucene.util.graph; exports org.apache.lucene.util.hnsw; - exports org.apache.lucene.util.hppc; exports org.apache.lucene.util.mutable; exports org.apache.lucene.util.packed; + exports org.apache.lucene.util; + + // Temporarily export HPPC to all modules (eventually, this + // should be restricted to only Lucene modules) + exports org.apache.lucene.internal.hppc; // Only export internal packages to the test framework. exports org.apache.lucene.internal.tests to diff --git a/lucene/core/src/java/org/apache/lucene/analysis/AutomatonToTokenStream.java b/lucene/core/src/java/org/apache/lucene/analysis/AutomatonToTokenStream.java index 61bbcae48e7e..d60d386ec5b8 100644 --- a/lucene/core/src/java/org/apache/lucene/analysis/AutomatonToTokenStream.java +++ b/lucene/core/src/java/org/apache/lucene/analysis/AutomatonToTokenStream.java @@ -25,11 +25,11 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntIntHashMap; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Transition; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; -import org.apache.lucene.util.hppc.IntIntHashMap; /** Converts an Automaton into a TokenStream. */ public class AutomatonToTokenStream { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumer.java index 57caf34d1a64..d64dce985c01 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumer.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumer.java @@ -36,6 +36,8 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.internal.hppc.LongHashSet; +import org.apache.lucene.internal.hppc.LongIntHashMap; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.store.ByteArrayDataOutput; @@ -50,8 +52,6 @@ import org.apache.lucene.util.MathUtil; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.compress.LZ4; -import org.apache.lucene.util.hppc.LongHashSet; -import org.apache.lucene.util.hppc.LongIntHashMap; import org.apache.lucene.util.packed.DirectMonotonicWriter; import org.apache.lucene.util.packed.DirectWriter; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90NormsProducer.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90NormsProducer.java index cf17d52bb12a..0b996c5d9525 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90NormsProducer.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90NormsProducer.java @@ -29,11 +29,11 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.RandomAccessInput; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.hppc.IntObjectHashMap; /** Reader for {@link Lucene90NormsFormat} */ final class Lucene90NormsProducer extends NormsProducer implements Cloneable { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90PointsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90PointsReader.java index 9e0d597e8d5b..75805d798bba 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90PointsReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/Lucene90PointsReader.java @@ -24,12 +24,12 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.bkd.BKDReader; -import org.apache.lucene.util.hppc.IntObjectHashMap; /** Reads point values previously written with {@link Lucene90PointsWriter} */ public class Lucene90PointsReader extends PointsReader { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/Lucene90BlockTreeTermsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/Lucene90BlockTreeTermsReader.java index 1fc1fb3a7fae..3871945e0a49 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/Lucene90BlockTreeTermsReader.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/blocktree/Lucene90BlockTreeTermsReader.java @@ -31,6 +31,8 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.Terms; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.ReadAdvice; @@ -38,8 +40,6 @@ import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.fst.ByteSequenceOutputs; import org.apache.lucene.util.fst.Outputs; -import org.apache.lucene.util.hppc.IntCursor; -import org.apache.lucene.util.hppc.IntObjectHashMap; /** * A block-based terms index and dictionary that assigns terms to variable length blocks according diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/compressing/Lucene90CompressingTermVectorsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/compressing/Lucene90CompressingTermVectorsWriter.java index 7a37e8c9300c..a070c6be47b5 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene90/compressing/Lucene90CompressingTermVectorsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene90/compressing/Lucene90CompressingTermVectorsWriter.java @@ -39,6 +39,7 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.store.ByteBuffersDataInput; import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.DataInput; @@ -51,7 +52,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.StringHelper; -import org.apache.lucene.util.hppc.IntHashSet; import org.apache.lucene.util.packed.BlockPackedWriter; import org.apache.lucene.util.packed.DirectWriter; import org.apache.lucene.util.packed.PackedInts; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene99/Lucene99ScalarQuantizedVectorsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene99/Lucene99ScalarQuantizedVectorsWriter.java index 8e919b90faaa..7fbe503748fa 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene99/Lucene99ScalarQuantizedVectorsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene99/Lucene99ScalarQuantizedVectorsWriter.java @@ -48,6 +48,7 @@ import org.apache.lucene.index.Sorter; import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.store.IndexInput; @@ -59,7 +60,6 @@ import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.hppc.IntArrayList; import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.QuantizedVectorsReader; import org.apache.lucene.util.quantization.ScalarQuantizer; diff --git a/lucene/core/src/java/org/apache/lucene/document/LatLonPoint.java b/lucene/core/src/java/org/apache/lucene/document/LatLonPoint.java index 13953d2d79d0..42bf37b265a0 100644 --- a/lucene/core/src/java/org/apache/lucene/document/LatLonPoint.java +++ b/lucene/core/src/java/org/apache/lucene/document/LatLonPoint.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PointValues; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -53,7 +54,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.SloppyMath; -import org.apache.lucene.util.hppc.IntArrayList; /** * An indexed location field. diff --git a/lucene/core/src/java/org/apache/lucene/document/NearestNeighbor.java b/lucene/core/src/java/org/apache/lucene/document/NearestNeighbor.java index c5a62dd12cc8..12f4afffd0d5 100644 --- a/lucene/core/src/java/org/apache/lucene/document/NearestNeighbor.java +++ b/lucene/core/src/java/org/apache/lucene/document/NearestNeighbor.java @@ -28,9 +28,9 @@ import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.index.PointValues.PointTree; import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.util.Bits; import org.apache.lucene.util.SloppyMath; -import org.apache.lucene.util.hppc.IntArrayList; /** KNN search on top of 2D lat/lon indexed points. */ class NearestNeighbor { diff --git a/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java b/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java index 78f862514c6b..d0ed8df6c099 100644 --- a/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java +++ b/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java @@ -26,13 +26,13 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import org.apache.lucene.internal.hppc.LongHashSet; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOConsumer; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.InfoStream; -import org.apache.lucene.util.hppc.LongHashSet; /** * Tracks the stream of {@link FrozenBufferedUpdates}. When DocumentsWriterPerThread flushes, its diff --git a/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java b/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java index e2703c60b12a..b28ed59f7e94 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java +++ b/lucene/core/src/java/org/apache/lucene/index/FieldInfos.java @@ -33,8 +33,8 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.hppc.IntObjectHashMap; /** * Collection of {@link FieldInfo}s (accessible by number or by name). diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java index 65e318a57c9d..44d8bee84607 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java @@ -58,6 +58,8 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.MergePolicy.MergeReader; import org.apache.lucene.index.Sorter.DocMap; +import org.apache.lucene.internal.hppc.LongObjectHashMap; +import org.apache.lucene.internal.hppc.ObjectCursor; import org.apache.lucene.internal.tests.IndexPackageAccess; import org.apache.lucene.internal.tests.IndexWriterAccess; import org.apache.lucene.internal.tests.TestSecrets; @@ -90,8 +92,6 @@ import org.apache.lucene.util.ThreadInterruptedException; import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.Version; -import org.apache.lucene.util.hppc.LongObjectHashMap; -import org.apache.lucene.util.hppc.ObjectCursor; /** * An IndexWriter creates and maintains an index. diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java index a24a1a1c5ef5..82caf6ab0ef8 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValues.java @@ -19,12 +19,12 @@ import java.io.IOException; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.internal.hppc.LongArrayList; +import org.apache.lucene.internal.hppc.LongCursor; +import org.apache.lucene.internal.hppc.LongObjectHashMap; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.RefCount; -import org.apache.lucene.util.hppc.LongArrayList; -import org.apache.lucene.util.hppc.LongCursor; -import org.apache.lucene.util.hppc.LongObjectHashMap; /** * Manages the {@link DocValuesProducer} held by {@link SegmentReader} and keeps track of their diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValuesProducer.java b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValuesProducer.java index f0979ddd3e73..63baddb4174d 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentDocValuesProducer.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentDocValuesProducer.java @@ -23,8 +23,8 @@ import java.util.Map; import java.util.Set; import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.internal.hppc.LongArrayList; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.hppc.LongArrayList; /** Encapsulates multiple producers when there are docvalues updates as one producer */ // TODO: try to clean up close? no-op? diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java index 98cb01ac6d9b..979d4a7712f5 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java @@ -28,13 +28,13 @@ import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.TermVectorsReader; +import org.apache.lucene.internal.hppc.LongArrayList; import org.apache.lucene.internal.tests.SegmentReaderAccess; import org.apache.lucene.internal.tests.TestSecrets; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Bits; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.hppc.LongArrayList; /** * IndexReader implementation over a single segment. diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/AbstractIterator.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/AbstractIterator.java similarity index 98% rename from lucene/core/src/java/org/apache/lucene/util/hppc/AbstractIterator.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/AbstractIterator.java index e49a2108ac95..77601ff6e2db 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/AbstractIterator.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/AbstractIterator.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import java.util.Iterator; import java.util.NoSuchElementException; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/BitMixer.java similarity index 98% rename from lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/BitMixer.java index 51b29396a0f5..fbdf2a2597ca 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/BitMixer.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/BitMixer.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; /** * Bit mixing utilities. The purpose of these methods is to evenly distribute key space over int32 diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/BufferAllocationException.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/BufferAllocationException.java similarity index 97% rename from lucene/core/src/java/org/apache/lucene/util/hppc/BufferAllocationException.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/BufferAllocationException.java index bc00b32cd41b..02ba39bc7d28 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/BufferAllocationException.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/BufferAllocationException.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import java.util.IllegalFormatException; import java.util.Locale; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/CharCursor.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/CharCursor.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/util/hppc/CharCursor.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/CharCursor.java index ce57dbfa8eea..c11fe77e0923 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/CharCursor.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/CharCursor.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; /** * Forked from HPPC, holding int index and char value. diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/CharHashSet.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/CharHashSet.java similarity index 93% rename from lucene/core/src/java/org/apache/lucene/util/hppc/CharHashSet.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/CharHashSet.java index 5ac04527123d..4c13a34da39c 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/CharHashSet.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/CharHashSet.java @@ -15,18 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; - -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; -import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; -import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; -import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; -import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; -import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; +package org.apache.lucene.internal.hppc; import java.util.Arrays; import java.util.Iterator; @@ -78,7 +67,7 @@ public class CharHashSet implements Iterable, Accountable, Cloneable /** New instance with sane defaults. */ public CharHashSet() { - this(DEFAULT_EXPECTED_ELEMENTS); + this(HashContainers.DEFAULT_EXPECTED_ELEMENTS); } /** @@ -88,7 +77,7 @@ public CharHashSet() { * (inclusive). */ public CharHashSet(int expectedElements) { - this(expectedElements, DEFAULT_LOAD_FACTOR); + this(expectedElements, HashContainers.DEFAULT_LOAD_FACTOR); } /** @@ -101,7 +90,7 @@ public CharHashSet(int expectedElements) { */ public CharHashSet(int expectedElements, double loadFactor) { this.loadFactor = verifyLoadFactor(loadFactor); - iterationSeed = ITERATION_SEED.incrementAndGet(); + iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet(); ensureCapacity(expectedElements); } @@ -195,7 +184,7 @@ public char[] toArray() { final char[] keys = this.keys; int seed = nextIterationSeed(); - int inc = iterationIncrement(seed); + int inc = HashContainers.iterationIncrement(seed); for (int i = 0, mask = this.mask, slot = seed & mask; i <= mask; i++, slot = (slot + inc) & mask) { @@ -294,7 +283,7 @@ public void release() { assigned = 0; hasEmptyKey = false; keys = null; - ensureCapacity(DEFAULT_EXPECTED_ELEMENTS); + ensureCapacity(HashContainers.DEFAULT_EXPECTED_ELEMENTS); } public boolean isEmpty() { @@ -310,7 +299,7 @@ public boolean isEmpty() { public void ensureCapacity(int expectedElements) { if (expectedElements > resizeAt || keys == null) { final char[] prevKeys = this.keys; - allocateBuffers(minBufferSize(expectedElements, loadFactor)); + allocateBuffers(HashContainers.minBufferSize(expectedElements, loadFactor)); if (prevKeys != null && !isEmpty()) { rehash(prevKeys); } @@ -362,7 +351,7 @@ public CharHashSet clone() { CharHashSet cloned = (CharHashSet) super.clone(); cloned.keys = keys.clone(); cloned.hasEmptyKey = hasEmptyKey; - cloned.iterationSeed = ITERATION_SEED.incrementAndGet(); + cloned.iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet(); return cloned; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); @@ -398,7 +387,7 @@ protected final class EntryIterator extends AbstractIterator { public EntryIterator() { cursor = new CharCursor(); int seed = nextIterationSeed(); - increment = iterationIncrement(seed); + increment = HashContainers.iterationIncrement(seed); slot = seed & mask; } @@ -591,7 +580,8 @@ public void indexRemove(int index) { * factors. */ protected double verifyLoadFactor(double loadFactor) { - checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); + HashContainers.checkLoadFactor( + loadFactor, HashContainers.MIN_LOAD_FACTOR, HashContainers.MAX_LOAD_FACTOR); return loadFactor; } @@ -633,7 +623,7 @@ protected void allocateBuffers(int arraySize) { e, this.keys == null ? 0 : size(), arraySize); } - this.resizeAt = expandAtCount(arraySize, loadFactor); + this.resizeAt = HashContainers.expandAtCount(arraySize, loadFactor); this.mask = arraySize - 1; } @@ -650,7 +640,7 @@ protected void allocateThenInsertThenRehash(int slot, char pendingKey) { // Try to allocate new buffers first. If we OOM, we leave in a consistent state. final char[] prevKeys = this.keys; - allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); + allocateBuffers(HashContainers.nextBufferSize(mask + 1, size(), loadFactor)); assert this.keys.length > prevKeys.length; // We have succeeded at allocating new data so insert the pending key/value at diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/CharObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/CharObjectHashMap.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/util/hppc/CharObjectHashMap.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/CharObjectHashMap.java index 973f799f2cdb..b1fad7017b5d 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/CharObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/CharObjectHashMap.java @@ -15,19 +15,19 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; - -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; -import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; -import static org.apache.lucene.util.hppc.HashContainers.checkPowerOfTwo; -import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; -import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; -import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; -import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; +package org.apache.lucene.internal.hppc; + +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.ITERATION_SEED; +import static org.apache.lucene.internal.hppc.HashContainers.MAX_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.MIN_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.checkLoadFactor; +import static org.apache.lucene.internal.hppc.HashContainers.checkPowerOfTwo; +import static org.apache.lucene.internal.hppc.HashContainers.expandAtCount; +import static org.apache.lucene.internal.hppc.HashContainers.iterationIncrement; +import static org.apache.lucene.internal.hppc.HashContainers.minBufferSize; +import static org.apache.lucene.internal.hppc.HashContainers.nextBufferSize; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/DoubleCursor.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/DoubleCursor.java index cadadac1e5e0..7c2bbf53097b 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/DoubleCursor.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/DoubleCursor.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; /** * Forked from HPPC, holding int index and double value. diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/FloatArrayList.java similarity index 99% rename from lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/FloatArrayList.java index e0e17b7b8238..728c0aa96d1b 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/FloatArrayList.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/FloatCursor.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/FloatCursor.java index b270f3ba3bf1..1851aa7572b3 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/FloatCursor.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/FloatCursor.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; /** * Forked from HPPC, holding int index and float value. diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/HashContainers.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/HashContainers.java similarity index 98% rename from lucene/core/src/java/org/apache/lucene/util/hppc/HashContainers.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/HashContainers.java index ec8d44b40ffa..f65f93d2db59 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/HashContainers.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/HashContainers.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import static org.apache.lucene.util.BitUtil.nextHighestPowerOfTwo; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntArrayList.java similarity index 99% rename from lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/IntArrayList.java index 286ebc0d4472..992c924630d3 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntArrayList.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntCursor.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntCursor.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/util/hppc/IntCursor.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/IntCursor.java index cc3b0b63399b..bb79c9167e31 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntCursor.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntCursor.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; /** * Forked from HPPC, holding int index and int value. diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntDoubleHashMap.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/IntDoubleHashMap.java index 242336495611..60c4876d9ae5 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntDoubleHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntDoubleHashMap.java @@ -15,19 +15,19 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; - -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; -import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; -import static org.apache.lucene.util.hppc.HashContainers.checkPowerOfTwo; -import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; -import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; -import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; -import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; +package org.apache.lucene.internal.hppc; + +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.ITERATION_SEED; +import static org.apache.lucene.internal.hppc.HashContainers.MAX_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.MIN_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.checkLoadFactor; +import static org.apache.lucene.internal.hppc.HashContainers.checkPowerOfTwo; +import static org.apache.lucene.internal.hppc.HashContainers.expandAtCount; +import static org.apache.lucene.internal.hppc.HashContainers.iterationIncrement; +import static org.apache.lucene.internal.hppc.HashContainers.minBufferSize; +import static org.apache.lucene.internal.hppc.HashContainers.nextBufferSize; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntFloatHashMap.java similarity index 93% rename from lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/IntFloatHashMap.java index 536fd0b1d5ce..045585e7087e 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntFloatHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntFloatHashMap.java @@ -15,19 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; - -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; -import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; -import static org.apache.lucene.util.hppc.HashContainers.checkPowerOfTwo; -import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; -import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; -import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; -import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; +package org.apache.lucene.internal.hppc; import java.util.Arrays; import java.util.Iterator; @@ -81,7 +69,7 @@ public class IntFloatHashMap /** New instance with sane defaults. */ public IntFloatHashMap() { - this(DEFAULT_EXPECTED_ELEMENTS); + this(HashContainers.DEFAULT_EXPECTED_ELEMENTS); } /** @@ -91,7 +79,7 @@ public IntFloatHashMap() { * expansion (inclusive). */ public IntFloatHashMap(int expectedElements) { - this(expectedElements, DEFAULT_LOAD_FACTOR); + this(expectedElements, HashContainers.DEFAULT_LOAD_FACTOR); } /** @@ -104,7 +92,7 @@ public IntFloatHashMap(int expectedElements) { */ public IntFloatHashMap(int expectedElements, double loadFactor) { this.loadFactor = verifyLoadFactor(loadFactor); - iterationSeed = ITERATION_SEED.incrementAndGet(); + iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet(); ensureCapacity(expectedElements); } @@ -396,7 +384,7 @@ public void release() { keys = null; values = null; - ensureCapacity(DEFAULT_EXPECTED_ELEMENTS); + ensureCapacity(HashContainers.DEFAULT_EXPECTED_ELEMENTS); } public int size() { @@ -448,7 +436,7 @@ public void ensureCapacity(int expectedElements) { if (expectedElements > resizeAt || keys == null) { final int[] prevKeys = this.keys; final float[] prevValues = this.values; - allocateBuffers(minBufferSize(expectedElements, loadFactor)); + allocateBuffers(HashContainers.minBufferSize(expectedElements, loadFactor)); if (prevKeys != null && !isEmpty()) { rehash(prevKeys, prevValues); } @@ -479,7 +467,7 @@ private final class EntryIterator extends AbstractIterator { public EntryIterator() { cursor = new IntFloatCursor(); int seed = nextIterationSeed(); - increment = iterationIncrement(seed); + increment = HashContainers.iterationIncrement(seed); slot = seed & mask; } @@ -551,7 +539,7 @@ private final class KeysIterator extends AbstractIterator { public KeysIterator() { cursor = new IntCursor(); int seed = nextIterationSeed(); - increment = iterationIncrement(seed); + increment = HashContainers.iterationIncrement(seed); slot = seed & mask; } @@ -618,7 +606,7 @@ private final class ValuesIterator extends AbstractIterator { public ValuesIterator() { cursor = new FloatCursor(); int seed = nextIterationSeed(); - increment = iterationIncrement(seed); + increment = HashContainers.iterationIncrement(seed); slot = seed & mask; } @@ -653,7 +641,7 @@ public IntFloatHashMap clone() { cloned.keys = keys.clone(); cloned.values = values.clone(); cloned.hasEmptyKey = hasEmptyKey; - cloned.iterationSeed = ITERATION_SEED.incrementAndGet(); + cloned.iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet(); return cloned; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); @@ -710,13 +698,15 @@ protected int hashKey(int key) { * factors. */ protected double verifyLoadFactor(double loadFactor) { - checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); + HashContainers.checkLoadFactor( + loadFactor, HashContainers.MIN_LOAD_FACTOR, HashContainers.MAX_LOAD_FACTOR); return loadFactor; } /** Rehash from old buffers to new buffers. */ protected void rehash(int[] fromKeys, float[] fromValues) { - assert fromKeys.length == fromValues.length && checkPowerOfTwo(fromKeys.length - 1); + assert fromKeys.length == fromValues.length + && HashContainers.checkPowerOfTwo(fromKeys.length - 1); // Rehash all stored key/value pairs into the new buffers. final int[] keys = this.keys; @@ -762,7 +752,7 @@ protected void allocateBuffers(int arraySize) { e, this.mask + 1, arraySize); } - this.resizeAt = expandAtCount(arraySize, loadFactor); + this.resizeAt = HashContainers.expandAtCount(arraySize, loadFactor); this.mask = arraySize - 1; } @@ -780,7 +770,7 @@ protected void allocateThenInsertThenRehash(int slot, int pendingKey, float pend // Try to allocate new buffers first. If we OOM, we leave in a consistent state. final int[] prevKeys = this.keys; final float[] prevValues = this.values; - allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); + allocateBuffers(HashContainers.nextBufferSize(mask + 1, size(), loadFactor)); assert this.keys.length > prevKeys.length; // We have succeeded at allocating new data so insert the pending key/value at diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntHashSet.java similarity index 99% rename from lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/IntHashSet.java index e67c299dfaf3..82f3299f31eb 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntHashSet.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntHashSet.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; -import static org.apache.lucene.util.hppc.HashContainers.*; +import static org.apache.lucene.internal.hppc.HashContainers.*; import java.util.Arrays; import java.util.Collection; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntIntHashMap.java similarity index 99% rename from lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/IntIntHashMap.java index 4e69ce912627..70a19bb2d84b 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntIntHashMap.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; -import static org.apache.lucene.util.hppc.HashContainers.*; +import static org.apache.lucene.internal.hppc.HashContainers.*; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntObjectHashMap.java similarity index 99% rename from lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/IntObjectHashMap.java index 79bae37532a1..180bb3249f35 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/IntObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/IntObjectHashMap.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; -import static org.apache.lucene.util.hppc.HashContainers.*; +import static org.apache.lucene.internal.hppc.HashContainers.*; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongArrayList.java similarity index 99% rename from lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/LongArrayList.java index b971cb36f1c5..dcc37c1a5277 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongArrayList.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongArrayList.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongCursor.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongCursor.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/util/hppc/LongCursor.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/LongCursor.java index b7b37f7160c1..73ebb60f0ff1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongCursor.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongCursor.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; /** * Forked from HPPC, holding int index and long value. diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongFloatHashMap.java similarity index 96% rename from lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/LongFloatHashMap.java index 9c826272c5fb..bdc77c8a8ddc 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongFloatHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongFloatHashMap.java @@ -15,19 +15,19 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; - -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; -import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; -import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; -import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; -import static org.apache.lucene.util.hppc.HashContainers.checkPowerOfTwo; -import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; -import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; -import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; -import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; +package org.apache.lucene.internal.hppc; + +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; +import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.ITERATION_SEED; +import static org.apache.lucene.internal.hppc.HashContainers.MAX_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.MIN_LOAD_FACTOR; +import static org.apache.lucene.internal.hppc.HashContainers.checkLoadFactor; +import static org.apache.lucene.internal.hppc.HashContainers.checkPowerOfTwo; +import static org.apache.lucene.internal.hppc.HashContainers.expandAtCount; +import static org.apache.lucene.internal.hppc.HashContainers.iterationIncrement; +import static org.apache.lucene.internal.hppc.HashContainers.minBufferSize; +import static org.apache.lucene.internal.hppc.HashContainers.nextBufferSize; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongHashSet.java similarity index 99% rename from lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/LongHashSet.java index 0bbe9e9d98f6..744b334e34ee 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongHashSet.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongHashSet.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; -import static org.apache.lucene.util.hppc.HashContainers.*; +import static org.apache.lucene.internal.hppc.HashContainers.*; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongIntHashMap.java similarity index 95% rename from lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/LongIntHashMap.java index a5f4120d6fc8..d012e4101ad1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongIntHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongIntHashMap.java @@ -15,9 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; - -import static org.apache.lucene.util.hppc.HashContainers.*; +package org.apache.lucene.internal.hppc; import java.util.Arrays; import java.util.Iterator; @@ -71,7 +69,7 @@ public class LongIntHashMap /** New instance with sane defaults. */ public LongIntHashMap() { - this(DEFAULT_EXPECTED_ELEMENTS); + this(HashContainers.DEFAULT_EXPECTED_ELEMENTS); } /** @@ -81,7 +79,7 @@ public LongIntHashMap() { * expansion (inclusive). */ public LongIntHashMap(int expectedElements) { - this(expectedElements, DEFAULT_LOAD_FACTOR); + this(expectedElements, HashContainers.DEFAULT_LOAD_FACTOR); } /** @@ -94,7 +92,7 @@ public LongIntHashMap(int expectedElements) { */ public LongIntHashMap(int expectedElements, double loadFactor) { this.loadFactor = verifyLoadFactor(loadFactor); - iterationSeed = ITERATION_SEED.incrementAndGet(); + iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet(); ensureCapacity(expectedElements); } @@ -386,7 +384,7 @@ public void release() { keys = null; values = null; - ensureCapacity(DEFAULT_EXPECTED_ELEMENTS); + ensureCapacity(HashContainers.DEFAULT_EXPECTED_ELEMENTS); } public int size() { @@ -438,7 +436,7 @@ public void ensureCapacity(int expectedElements) { if (expectedElements > resizeAt || keys == null) { final long[] prevKeys = this.keys; final int[] prevValues = this.values; - allocateBuffers(minBufferSize(expectedElements, loadFactor)); + allocateBuffers(HashContainers.minBufferSize(expectedElements, loadFactor)); if (prevKeys != null && !isEmpty()) { rehash(prevKeys, prevValues); } @@ -469,7 +467,7 @@ private final class EntryIterator extends AbstractIterator { public EntryIterator() { cursor = new LongIntCursor(); int seed = nextIterationSeed(); - increment = iterationIncrement(seed); + increment = HashContainers.iterationIncrement(seed); slot = seed & mask; } @@ -541,7 +539,7 @@ private final class KeysIterator extends AbstractIterator { public KeysIterator() { cursor = new LongCursor(); int seed = nextIterationSeed(); - increment = iterationIncrement(seed); + increment = HashContainers.iterationIncrement(seed); slot = seed & mask; } @@ -608,7 +606,7 @@ private final class ValuesIterator extends AbstractIterator { public ValuesIterator() { cursor = new IntCursor(); int seed = nextIterationSeed(); - increment = iterationIncrement(seed); + increment = HashContainers.iterationIncrement(seed); slot = seed & mask; } @@ -643,7 +641,7 @@ public LongIntHashMap clone() { cloned.keys = keys.clone(); cloned.values = values.clone(); cloned.hasEmptyKey = hasEmptyKey; - cloned.iterationSeed = ITERATION_SEED.incrementAndGet(); + cloned.iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet(); return cloned; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); @@ -700,13 +698,15 @@ protected int hashKey(long key) { * factors. */ protected double verifyLoadFactor(double loadFactor) { - checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); + HashContainers.checkLoadFactor( + loadFactor, HashContainers.MIN_LOAD_FACTOR, HashContainers.MAX_LOAD_FACTOR); return loadFactor; } /** Rehash from old buffers to new buffers. */ protected void rehash(long[] fromKeys, int[] fromValues) { - assert fromKeys.length == fromValues.length && checkPowerOfTwo(fromKeys.length - 1); + assert fromKeys.length == fromValues.length + && HashContainers.checkPowerOfTwo(fromKeys.length - 1); // Rehash all stored key/value pairs into the new buffers. final long[] keys = this.keys; @@ -752,7 +752,7 @@ protected void allocateBuffers(int arraySize) { e, this.mask + 1, arraySize); } - this.resizeAt = expandAtCount(arraySize, loadFactor); + this.resizeAt = HashContainers.expandAtCount(arraySize, loadFactor); this.mask = arraySize - 1; } @@ -770,7 +770,7 @@ protected void allocateThenInsertThenRehash(int slot, long pendingKey, int pendi // Try to allocate new buffers first. If we OOM, we leave in a consistent state. final long[] prevKeys = this.keys; final int[] prevValues = this.values; - allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); + allocateBuffers(HashContainers.nextBufferSize(mask + 1, size(), loadFactor)); assert this.keys.length > prevKeys.length; // We have succeeded at allocating new data so insert the pending key/value at diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongObjectHashMap.java similarity index 99% rename from lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/LongObjectHashMap.java index 3a29e363abf7..4bc890b80b1a 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/LongObjectHashMap.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/LongObjectHashMap.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; -import static org.apache.lucene.util.hppc.HashContainers.*; +import static org.apache.lucene.internal.hppc.HashContainers.*; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/ObjectCursor.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/ObjectCursor.java similarity index 97% rename from lucene/core/src/java/org/apache/lucene/util/hppc/ObjectCursor.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/ObjectCursor.java index fa033b276e56..873bca13fc72 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/ObjectCursor.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/ObjectCursor.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; /** * Forked from HPPC, holding int index and Object value. diff --git a/lucene/core/src/java/org/apache/lucene/util/hppc/package-info.java b/lucene/core/src/java/org/apache/lucene/internal/hppc/package-info.java similarity index 82% rename from lucene/core/src/java/org/apache/lucene/util/hppc/package-info.java rename to lucene/core/src/java/org/apache/lucene/internal/hppc/package-info.java index 6f395a68a48e..c739206da9ad 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hppc/package-info.java +++ b/lucene/core/src/java/org/apache/lucene/internal/hppc/package-info.java @@ -15,5 +15,8 @@ * limitations under the License. */ -/** package holding hppc related classes. */ -package org.apache.lucene.util.hppc; +/** + * Internal copy of a subset of classes from the HPPC library. + */ +package org.apache.lucene.internal.hppc; diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java index 8ad2f5afb180..315b94a2acda 100644 --- a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java @@ -33,12 +33,12 @@ import org.apache.lucene.index.TermStates; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity.SimScorer; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; -import org.apache.lucene.util.hppc.IntArrayList; /** * A generalized version of {@link PhraseQuery}, with the possibility of adding more than one term diff --git a/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java index 5c771aba2cff..c5a5ee36fd44 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java @@ -33,11 +33,11 @@ import org.apache.lucene.index.TermStates; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity.SimScorer; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.hppc.IntArrayList; /** * A Query that matches documents containing a particular sequence of terms. A PhraseQuery is built diff --git a/lucene/core/src/java/org/apache/lucene/search/SloppyPhraseMatcher.java b/lucene/core/src/java/org/apache/lucene/search/SloppyPhraseMatcher.java index 863035da17ba..5f043fb7d9d4 100644 --- a/lucene/core/src/java/org/apache/lucene/search/SloppyPhraseMatcher.java +++ b/lucene/core/src/java/org/apache/lucene/search/SloppyPhraseMatcher.java @@ -29,9 +29,9 @@ import org.apache.lucene.index.Impacts; import org.apache.lucene.index.ImpactsSource; import org.apache.lucene.index.Term; +import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.search.similarities.Similarity.SimScorer; import org.apache.lucene.util.FixedBitSet; -import org.apache.lucene.util.hppc.IntHashSet; /** * Find all slop-valid position-combinations (matches) encountered while traversing/hopping the diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java b/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java index 0b3c444ab157..7ad9eedda6f3 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java @@ -19,12 +19,12 @@ import java.util.Arrays; import java.util.BitSet; import java.util.Objects; +import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.Sorter; -import org.apache.lucene.util.hppc.IntHashSet; // TODO // - could use packed int arrays instead diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java b/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java index 24d147ae5d8d..c10ff4f28de8 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.Accountable; @@ -31,7 +32,6 @@ import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.UnicodeUtil; -import org.apache.lucene.util.hppc.IntArrayList; /** * Immutable class holding compiled details for a given Automaton. The Automaton could either be diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/LevenshteinAutomata.java b/lucene/core/src/java/org/apache/lucene/util/automaton/LevenshteinAutomata.java index b59bb80e8c59..2a5ba4879458 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/LevenshteinAutomata.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/LevenshteinAutomata.java @@ -17,8 +17,8 @@ package org.apache.lucene.util.automaton; import java.util.Arrays; +import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.util.UnicodeUtil; -import org.apache.lucene.util.hppc.IntHashSet; /** * Class to construct DFAs that match a word within some edit distance. diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/NFARunAutomaton.java b/lucene/core/src/java/org/apache/lucene/util/automaton/NFARunAutomaton.java index 02c14ed457de..9737098b37c2 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/NFARunAutomaton.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/NFARunAutomaton.java @@ -20,8 +20,8 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; +import org.apache.lucene.internal.hppc.BitMixer; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.hppc.BitMixer; /** * A RunAutomaton that does not require DFA. It will lazily determinize on-demand, memorizing the diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java b/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java index e8a9ad21a15b..2052b1c50bf5 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java @@ -39,6 +39,10 @@ import java.util.List; import java.util.Map; import java.util.Set; +import org.apache.lucene.internal.hppc.BitMixer; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntHashSet; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; @@ -47,10 +51,6 @@ import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.RamUsageEstimator; -import org.apache.lucene.util.hppc.BitMixer; -import org.apache.lucene.util.hppc.IntCursor; -import org.apache.lucene.util.hppc.IntHashSet; -import org.apache.lucene.util.hppc.IntObjectHashMap; /** * Automata operations. diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/StateSet.java b/lucene/core/src/java/org/apache/lucene/util/automaton/StateSet.java index f21ed0a1d1ef..b07055ad1ec1 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/StateSet.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/StateSet.java @@ -18,9 +18,9 @@ package org.apache.lucene.util.automaton; import java.util.Arrays; -import org.apache.lucene.util.hppc.BitMixer; -import org.apache.lucene.util.hppc.IntCursor; -import org.apache.lucene.util.hppc.IntIntHashMap; +import org.apache.lucene.internal.hppc.BitMixer; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntIntHashMap; /** * A thin wrapper of {@link IntIntHashMap} Maps from state in integer representation to its diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/UTF32ToUTF8.java b/lucene/core/src/java/org/apache/lucene/util/automaton/UTF32ToUTF8.java index a1734cbad3c4..99648a618a37 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/UTF32ToUTF8.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/UTF32ToUTF8.java @@ -17,7 +17,7 @@ package org.apache.lucene.util.automaton; import java.util.Arrays; -import org.apache.lucene.util.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntArrayList; // TODO // - do we really need the .bits...? if not we can make util in UnicodeUtil to convert 1 char diff --git a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java index db0158635b50..1f4a7b294ac9 100644 --- a/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java +++ b/lucene/core/src/java/org/apache/lucene/util/bkd/BKDWriter.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.index.PointValues.Relation; +import org.apache.lucene.internal.hppc.LongArrayList; import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.DataOutput; @@ -44,7 +45,6 @@ import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.bkd.BKDUtil.ByteArrayPredicate; -import org.apache.lucene.util.hppc.LongArrayList; // TODO // - allow variable length byte[] (across docs and dims), but this is quite a bit more hairy diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/Util.java b/lucene/core/src/java/org/apache/lucene/util/fst/Util.java index 4ee07a2feda3..15f763f62b29 100644 --- a/lucene/core/src/java/org/apache/lucene/util/fst/Util.java +++ b/lucene/core/src/java/org/apache/lucene/util/fst/Util.java @@ -26,14 +26,14 @@ import java.util.Iterator; import java.util.List; import java.util.TreeSet; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.fst.FST.Arc; import org.apache.lucene.util.fst.FST.BytesReader; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; /** * Static helper methods. diff --git a/lucene/core/src/java/org/apache/lucene/util/graph/GraphTokenStreamFiniteStrings.java b/lucene/core/src/java/org/apache/lucene/util/graph/GraphTokenStreamFiniteStrings.java index f8ad4806da1a..fa5895e30085 100644 --- a/lucene/core/src/java/org/apache/lucene/util/graph/GraphTokenStreamFiniteStrings.java +++ b/lucene/core/src/java/org/apache/lucene/util/graph/GraphTokenStreamFiniteStrings.java @@ -30,6 +30,7 @@ import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.Term; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.IntsRef; @@ -37,7 +38,6 @@ import org.apache.lucene.util.automaton.FiniteStringsIterator; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Transition; -import org.apache.lucene.util.hppc.IntArrayList; /** * Consumes a TokenStream and creates an {@link Automaton} where the transition labels are terms diff --git a/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraph.java b/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraph.java index d0d15b36ae51..ba0b714fdd7c 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraph.java +++ b/lucene/core/src/java/org/apache/lucene/util/hnsw/HnswGraph.java @@ -25,8 +25,8 @@ import java.util.NoSuchElementException; import java.util.PrimitiveIterator; import org.apache.lucene.index.FloatVectorValues; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; /** * Hierarchical Navigable Small World graph. Provides efficient approximate nearest neighbor search diff --git a/lucene/core/src/java/org/apache/lucene/util/hnsw/IncrementalHnswGraphMerger.java b/lucene/core/src/java/org/apache/lucene/util/hnsw/IncrementalHnswGraphMerger.java index 1909af420156..7331111d45a9 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hnsw/IncrementalHnswGraphMerger.java +++ b/lucene/core/src/java/org/apache/lucene/util/hnsw/IncrementalHnswGraphMerger.java @@ -26,12 +26,12 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.MergeState; +import org.apache.lucene.internal.hppc.IntIntHashMap; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.InfoStream; -import org.apache.lucene.util.hppc.IntIntHashMap; /** * This selects the biggest Hnsw graph from the provided merge state and initializes a new diff --git a/lucene/core/src/java/org/apache/lucene/util/hnsw/OnHeapHnswGraph.java b/lucene/core/src/java/org/apache/lucene/util/hnsw/OnHeapHnswGraph.java index fbd20b8b9133..d50c96c48720 100644 --- a/lucene/core/src/java/org/apache/lucene/util/hnsw/OnHeapHnswGraph.java +++ b/lucene/core/src/java/org/apache/lucene/util/hnsw/OnHeapHnswGraph.java @@ -21,10 +21,10 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.RamUsageEstimator; -import org.apache.lucene.util.hppc.IntArrayList; /** * An {@link HnswGraph} where all nodes and connections are held in memory. This class is used to diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestCharHashSet.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestCharHashSet.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestCharHashSet.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestCharHashSet.java index b98c7c854dbe..1ac24ab34f3a 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestCharHashSet.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestCharHashSet.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestCharObjectHashMap.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestCharObjectHashMap.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestCharObjectHashMap.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestCharObjectHashMap.java index 1a3e58f51a4d..4cc036dcfe65 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestCharObjectHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestCharObjectHashMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Arrays; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestFloatArrayList.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestFloatArrayList.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestFloatArrayList.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestFloatArrayList.java index f6e156116934..a45f82b258c4 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestFloatArrayList.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestFloatArrayList.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntArrayList.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntArrayList.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestIntArrayList.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntArrayList.java index cf1bed5e772d..a89da9a1d25b 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntArrayList.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntArrayList.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntDoubleHashMap.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntDoubleHashMap.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestIntDoubleHashMap.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntDoubleHashMap.java index 7cf43175bef0..a873f8df174d 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntDoubleHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntDoubleHashMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Arrays; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntFloatHashMap.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntFloatHashMap.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestIntFloatHashMap.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntFloatHashMap.java index 491be681f97b..842ca75e4b59 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntFloatHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntFloatHashMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Arrays; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntHashSet.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntHashSet.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestIntHashSet.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntHashSet.java index 2e76b18a6bcc..eb4ce02b577d 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntHashSet.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntHashSet.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntIntHashMap.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntIntHashMap.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestIntIntHashMap.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntIntHashMap.java index a0ffdd9c451e..6869ab827108 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntIntHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntIntHashMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Arrays; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntObjectHashMap.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntObjectHashMap.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestIntObjectHashMap.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntObjectHashMap.java index 9dfaaff713ba..6c6c0872ede5 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestIntObjectHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestIntObjectHashMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Arrays; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongArrayList.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongArrayList.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestLongArrayList.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongArrayList.java index c5a37b90eeae..0a3c6a9a4869 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongArrayList.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongArrayList.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import java.util.Arrays; import java.util.Iterator; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongFloatHashMap.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongFloatHashMap.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestLongFloatHashMap.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongFloatHashMap.java index 0f747749b5a0..a482966a5d52 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongFloatHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongFloatHashMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Arrays; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongHashSet.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongHashSet.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestLongHashSet.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongHashSet.java index 4c309cea8e3a..71761df5d4e5 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongHashSet.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongHashSet.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import static org.hamcrest.Matchers.*; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongIntHashMap.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongIntHashMap.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestLongIntHashMap.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongIntHashMap.java index 6bb511a86f58..ba6f8e587554 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongIntHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongIntHashMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Arrays; diff --git a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongObjectHashMap.java b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongObjectHashMap.java similarity index 99% rename from lucene/core/src/test/org/apache/lucene/util/hppc/TestLongObjectHashMap.java rename to lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongObjectHashMap.java index ee35df176dfd..f5d6176e24b3 100644 --- a/lucene/core/src/test/org/apache/lucene/util/hppc/TestLongObjectHashMap.java +++ b/lucene/core/src/test/org/apache/lucene/internal/hppc/TestLongObjectHashMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.lucene.util.hppc; +package org.apache.lucene.internal.hppc; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Arrays; diff --git a/lucene/distribution.tests/src/test/org/apache/lucene/distribution/TestModularLayer.java b/lucene/distribution.tests/src/test/org/apache/lucene/distribution/TestModularLayer.java index 84d5df2256aa..e5f80ec6cc8d 100644 --- a/lucene/distribution.tests/src/test/org/apache/lucene/distribution/TestModularLayer.java +++ b/lucene/distribution.tests/src/test/org/apache/lucene/distribution/TestModularLayer.java @@ -116,7 +116,7 @@ public static void cleanup() { public void testExpectedDistributionModuleNames() { Assertions.assertThat( allLuceneModules.stream().map(module -> module.descriptor().name()).sorted()) - .containsExactly( + .containsOnly( "org.apache.lucene.analysis.common", "org.apache.lucene.analysis.icu", "org.apache.lucene.analysis.kuromoji", @@ -353,6 +353,9 @@ public void testAllExportedPackagesInSync() throws IOException { moduleExports.removeIf( export -> { boolean isInternal = export.source().startsWith("org.apache.lucene.internal"); + if (isInternal && export.source().equals("org.apache.lucene.internal.hppc")) { + return true; + } if (isInternal) { Assertions.assertThat(export.targets()) .containsExactlyInAnyOrder("org.apache.lucene.test_framework"); diff --git a/lucene/facet/src/java/org/apache/lucene/facet/DrillDownQuery.java b/lucene/facet/src/java/org/apache/lucene/facet/DrillDownQuery.java index d0120d47ec85..6f3b1d572d3d 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/DrillDownQuery.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/DrillDownQuery.java @@ -23,6 +23,8 @@ import java.util.Map; import java.util.Objects; import org.apache.lucene.index.Term; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -31,8 +33,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.util.hppc.IntCursor; -import org.apache.lucene.util.hppc.IntHashSet; /** * A {@link Query} for drill-down over facet categories. You should call {@link #add(String, diff --git a/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java index cbbe805d96ef..f83b64912a38 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/LongValueFacetCounts.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.internal.hppc.LongIntHashMap; import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LongValues; @@ -35,7 +36,6 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.PriorityQueue; -import org.apache.lucene.util.hppc.LongIntHashMap; /** * {@link Facets} implementation that computes counts for all unique long values, more efficiently diff --git a/lucene/facet/src/java/org/apache/lucene/facet/StringValueFacetCounts.java b/lucene/facet/src/java/org/apache/lucene/facet/StringValueFacetCounts.java index c3b03e1bcd8e..67ce953067f6 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/StringValueFacetCounts.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/StringValueFacetCounts.java @@ -29,13 +29,13 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.internal.hppc.IntIntHashMap; import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongValues; -import org.apache.lucene.util.hppc.IntIntHashMap; /** * Compute facet counts from a previously indexed {@link SortedSetDocValues} or {@link diff --git a/lucene/facet/src/java/org/apache/lucene/facet/range/OverlappingLongRangeCounter.java b/lucene/facet/src/java/org/apache/lucene/facet/range/OverlappingLongRangeCounter.java index 045d8d8e2202..c980eff0dd66 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/range/OverlappingLongRangeCounter.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/range/OverlappingLongRangeCounter.java @@ -19,11 +19,11 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.LongArrayList; +import org.apache.lucene.internal.hppc.LongIntHashMap; import org.apache.lucene.util.FixedBitSet; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; -import org.apache.lucene.util.hppc.LongArrayList; -import org.apache.lucene.util.hppc.LongIntHashMap; /** * This implementation supports requested ranges that overlap. Because of this, we use a diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatTaxonomyFacets.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatTaxonomyFacets.java index 928663481f32..0b38a6caefd1 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatTaxonomyFacets.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/FloatTaxonomyFacets.java @@ -21,7 +21,7 @@ import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.TopOrdAndFloatQueue; import org.apache.lucene.facet.TopOrdAndNumberQueue; -import org.apache.lucene.util.hppc.IntFloatHashMap; +import org.apache.lucene.internal.hppc.IntFloatHashMap; /** Base class for all taxonomy-based facets that aggregate to float. */ abstract class FloatTaxonomyFacets extends TaxonomyFacets { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntTaxonomyFacets.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntTaxonomyFacets.java index af08e5445c85..0cfc726ac75f 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntTaxonomyFacets.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/IntTaxonomyFacets.java @@ -22,7 +22,7 @@ import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.TopOrdAndIntQueue; import org.apache.lucene.facet.TopOrdAndNumberQueue; -import org.apache.lucene.util.hppc.IntIntHashMap; +import org.apache.lucene.internal.hppc.IntIntHashMap; /** Base class for all taxonomy-based facets that aggregate to int. */ abstract class IntTaxonomyFacets extends TaxonomyFacets { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/OrdinalMappingLeafReader.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/OrdinalMappingLeafReader.java index 13916eb4a0a1..13be69535772 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/OrdinalMappingLeafReader.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/OrdinalMappingLeafReader.java @@ -27,8 +27,8 @@ import org.apache.lucene.index.FilterSortedNumericDocValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.util.hppc.IntArrayList; /** * A {@link org.apache.lucene.index.FilterLeafReader} for updating facets ordinal references, based diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java index f9f873dc2593..79f24e28c7cc 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyFacets.java @@ -34,9 +34,9 @@ import org.apache.lucene.facet.LabelAndValue; import org.apache.lucene.facet.TopOrdAndIntQueue; import org.apache.lucene.facet.TopOrdAndNumberQueue; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntIntHashMap; import org.apache.lucene.util.PriorityQueue; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntIntHashMap; /** Base class for all taxonomy-based facets impls. */ abstract class TaxonomyFacets extends Facets { diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java index 3ef2a6329b3c..418fcceca14b 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java @@ -39,6 +39,8 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Accountable; @@ -51,8 +53,6 @@ import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.StringSorter; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; /** * A {@link TaxonomyReader} which retrieves stored taxonomy information from a {@link Directory}. diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/TaxonomyIndexArrays.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/TaxonomyIndexArrays.java index 2d7975b4bb43..51a7aba16137 100644 --- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/TaxonomyIndexArrays.java +++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/TaxonomyIndexArrays.java @@ -27,11 +27,11 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.RamUsageEstimator; -import org.apache.lucene.util.hppc.IntHashSet; /** * A {@link ParallelTaxonomyArrays} that are initialized from the taxonomy index. diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java index 2d3677ef1df2..9a96ec898084 100644 --- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java +++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/TermGroupSelector.java @@ -22,10 +22,10 @@ import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.internal.hppc.IntIntHashMap; import org.apache.lucene.search.Scorable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash; -import org.apache.lucene.util.hppc.IntIntHashMap; /** A GroupSelector implementation that groups via SortedDocValues */ public class TermGroupSelector extends GroupSelector { diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/SimpleBoundaryScanner.java b/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/SimpleBoundaryScanner.java index b46d7a5d08d5..a2fa030d15e0 100644 --- a/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/SimpleBoundaryScanner.java +++ b/lucene/highlighter/src/java/org/apache/lucene/search/vectorhighlight/SimpleBoundaryScanner.java @@ -18,7 +18,7 @@ import java.util.Iterator; import java.util.Set; -import org.apache.lucene.util.hppc.CharHashSet; +import org.apache.lucene.internal.hppc.CharHashSet; /** * Simple boundary scanner implementation that divides fragments based on a set of separator diff --git a/lucene/join/src/java/org/apache/lucene/search/join/DiversifyingNearestChildrenKnnCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/DiversifyingNearestChildrenKnnCollector.java index b71adb4f0141..afab8ac57d90 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/DiversifyingNearestChildrenKnnCollector.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/DiversifyingNearestChildrenKnnCollector.java @@ -17,13 +17,13 @@ package org.apache.lucene.search.join; +import org.apache.lucene.internal.hppc.IntIntHashMap; import org.apache.lucene.search.AbstractKnnCollector; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.hppc.IntIntHashMap; /** * This collects the nearest children vectors. Diversifying the results over the provided parent diff --git a/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java b/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java index a22d920b83ab..079baa13942d 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java @@ -33,6 +33,11 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.internal.hppc.LongArrayList; +import org.apache.lucene.internal.hppc.LongCursor; +import org.apache.lucene.internal.hppc.LongFloatHashMap; +import org.apache.lucene.internal.hppc.LongHashSet; +import org.apache.lucene.internal.hppc.LongIntHashMap; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; @@ -42,11 +47,6 @@ import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.join.DocValuesTermsCollector.Function; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.hppc.LongArrayList; -import org.apache.lucene.util.hppc.LongCursor; -import org.apache.lucene.util.hppc.LongFloatHashMap; -import org.apache.lucene.util.hppc.LongHashSet; -import org.apache.lucene.util.hppc.LongIntHashMap; /** * Utility for query time joining. diff --git a/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java index 218730087a8f..64b481d1dbab 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/PointInSetIncludingScoreQuery.java @@ -32,6 +32,8 @@ import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.PrefixCodedTerms.TermIterator; +import org.apache.lucene.internal.hppc.FloatArrayList; +import org.apache.lucene.internal.hppc.FloatCursor; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -47,8 +49,6 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; -import org.apache.lucene.util.hppc.FloatArrayList; -import org.apache.lucene.util.hppc.FloatCursor; // A TermsIncludingScoreQuery variant for point values: abstract class PointInSetIncludingScoreQuery extends Query implements Accountable { diff --git a/lucene/misc/src/java/org/apache/lucene/misc/document/LazyDocument.java b/lucene/misc/src/java/org/apache/lucene/misc/document/LazyDocument.java index b6818d5e72ae..f09989c02944 100644 --- a/lucene/misc/src/java/org/apache/lucene/misc/document/LazyDocument.java +++ b/lucene/misc/src/java/org/apache/lucene/misc/document/LazyDocument.java @@ -31,8 +31,8 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.hppc.IntObjectHashMap; /** * Defers actually loading a field's value until you ask for it. You must not use the returned Field diff --git a/lucene/misc/src/java/org/apache/lucene/misc/search/DiversifiedTopDocsCollector.java b/lucene/misc/src/java/org/apache/lucene/misc/search/DiversifiedTopDocsCollector.java index e7a24964c961..1aaa6937051b 100644 --- a/lucene/misc/src/java/org/apache/lucene/misc/search/DiversifiedTopDocsCollector.java +++ b/lucene/misc/src/java/org/apache/lucene/misc/search/DiversifiedTopDocsCollector.java @@ -21,6 +21,7 @@ import java.util.Deque; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.internal.hppc.LongObjectHashMap; import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Scorable; @@ -30,7 +31,6 @@ import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.PriorityQueue; -import org.apache.lucene.util.hppc.LongObjectHashMap; /** * A {@link TopDocsCollector} that controls diversity in results by ensuring no more than diff --git a/lucene/queryparser/src/generated/checksums/javaccParserClassic.json b/lucene/queryparser/src/generated/checksums/javaccParserClassic.json index eeb0d527027c..0659521cdcfc 100644 --- a/lucene/queryparser/src/generated/checksums/javaccParserClassic.json +++ b/lucene/queryparser/src/generated/checksums/javaccParserClassic.json @@ -1,7 +1,7 @@ { "lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/ParseException.java": "7a8a8fd5b2ea78f9a17f54cbae8b0e4496e8988e", - "lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java": "380daae0f6e27b3872d117fc4aef955b1e4296ca", - "lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.jj": "968452b64449655b035fffb45944086c3032732b", + "lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java": "a2b7d21092d21cbac290cb1ddde5ac161824fb83", + "lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.jj": "ed9f248e1a48cadeeab8f0a79e77e986e34ff721", "lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserConstants.java": "e59a3fd38b66a3d56779c55955c1e014225a1f50", "lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserTokenManager.java": "dc99a1083bfa50e429d40e114fabe7dd5d434693", "lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/Token.java": "310665ba37d982327fcb55cc3523d629ef29ef54", diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java index 9299377a9089..d390894170e6 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.queryparser.charstream.CharStream; import org.apache.lucene.queryparser.charstream.FastCharStream; -import org.apache.lucene.util.hppc.IntHashSet; +import org.apache.lucene.internal.hppc.IntHashSet; /** * This class is generated by JavaCC. The most important method is diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.jj b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.jj index af558d556cac..2a67dcd68ec2 100644 --- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.jj +++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParser.jj @@ -37,7 +37,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.queryparser.charstream.CharStream; import org.apache.lucene.queryparser.charstream.FastCharStream; -import org.apache.lucene.util.hppc.IntHashSet; +import org.apache.lucene.internal.hppc.IntHashSet; /** * This class is generated by JavaCC. The most important method is diff --git a/lucene/sandbox/src/java/org/apache/lucene/sandbox/document/FloatPointNearestNeighbor.java b/lucene/sandbox/src/java/org/apache/lucene/sandbox/document/FloatPointNearestNeighbor.java index 3f0ced4f5204..bb18ecc1bcb6 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/sandbox/document/FloatPointNearestNeighbor.java +++ b/lucene/sandbox/src/java/org/apache/lucene/sandbox/document/FloatPointNearestNeighbor.java @@ -25,13 +25,13 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PointValues.PointTree; +import org.apache.lucene.internal.hppc.IntArrayList; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.Bits; -import org.apache.lucene.util.hppc.IntArrayList; /** * KNN search on top of N dimensional indexed float points. diff --git a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/QueryProfilerTree.java b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/QueryProfilerTree.java index 1044b0aa44ca..83335e5334bf 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/QueryProfilerTree.java +++ b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/QueryProfilerTree.java @@ -22,9 +22,9 @@ import java.util.Collections; import java.util.Deque; import java.util.List; +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; import org.apache.lucene.search.Query; -import org.apache.lucene.util.hppc.IntArrayList; -import org.apache.lucene.util.hppc.IntCursor; /** * This class tracks the dependency tree for queries (scoring and rewriting) and generates {@link diff --git a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonQuery.java index eafa8ed6a01c..1e8855dfda35 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/sandbox/search/TermAutomatonQuery.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermStates; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.internal.hppc.IntObjectHashMap; import org.apache.lucene.queries.spans.SpanNearQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.Explanation; @@ -54,7 +55,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Transition; -import org.apache.lucene.util.hppc.IntObjectHashMap; // TODO // - compare perf to PhraseQuery exact and sloppy diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/CachingDoubleValueSource.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/CachingDoubleValueSource.java index 2f641fccd565..223ef2bfd9e7 100644 --- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/CachingDoubleValueSource.java +++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/util/CachingDoubleValueSource.java @@ -18,11 +18,11 @@ import java.io.IOException; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.internal.hppc.IntDoubleHashMap; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.DoubleValuesSource; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.util.hppc.IntDoubleHashMap; /** * Caches the doubleVal of another value source in a HashMap so that it is computed only once. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java index 1b62e6472df8..7c3de452e246 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoStandardPath.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.apache.lucene.util.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntArrayList; /** * GeoShape representing a path across the surface of the globe, with a specified half-width. Path diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java index f40691d71468..d9efdecb4ffb 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/StandardObjects.java @@ -19,7 +19,7 @@ import java.util.HashMap; import java.util.Map; -import org.apache.lucene.util.hppc.IntObjectHashMap; +import org.apache.lucene.internal.hppc.IntObjectHashMap; /** * Lookup tables for classes that can be serialized using a code. diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java index 999cf97832e6..c727b1e621c7 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/document/ContextQuery.java @@ -21,6 +21,7 @@ import java.util.HashMap; import java.util.Map; import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter; +import org.apache.lucene.internal.hppc.IntHashSet; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; @@ -36,7 +37,6 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.fst.Util; -import org.apache.lucene.util.hppc.IntHashSet; /** * A {@link CompletionQuery} that matches documents specified by a wrapped {@link CompletionQuery} From ae4095dbd675b11bd1272d985aeb3dfd983d3536 Mon Sep 17 00:00:00 2001 From: Dawid Weiss Date: Mon, 27 May 2024 10:55:24 +0200 Subject: [PATCH 09/10] Delete hppc license since it's no longer a dependency. --- lucene/licenses/hppc-0.9.1.jar.sha1 | 1 - lucene/licenses/hppc-LICENSE-ASL.txt | 177 --------------------------- lucene/licenses/hppc-NOTICE.txt | 0 3 files changed, 178 deletions(-) delete mode 100644 lucene/licenses/hppc-0.9.1.jar.sha1 delete mode 100644 lucene/licenses/hppc-LICENSE-ASL.txt delete mode 100644 lucene/licenses/hppc-NOTICE.txt diff --git a/lucene/licenses/hppc-0.9.1.jar.sha1 b/lucene/licenses/hppc-0.9.1.jar.sha1 deleted file mode 100644 index 5cedaaa64339..000000000000 --- a/lucene/licenses/hppc-0.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4bf4c51e06aec600894d841c4c004566b20dd357 diff --git a/lucene/licenses/hppc-LICENSE-ASL.txt b/lucene/licenses/hppc-LICENSE-ASL.txt deleted file mode 100644 index f433b1a53f5b..000000000000 --- a/lucene/licenses/hppc-LICENSE-ASL.txt +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/lucene/licenses/hppc-NOTICE.txt b/lucene/licenses/hppc-NOTICE.txt deleted file mode 100644 index e69de29bb2d1..000000000000 From 82b41fa1a6e8db86d2bda693634f300acdf9ba98 Mon Sep 17 00:00:00 2001 From: Dawid Weiss Date: Mon, 27 May 2024 11:38:54 +0200 Subject: [PATCH 10/10] Added changes entry. I think it's ready to be merged and backported. --- lucene/CHANGES.txt | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index f75c74d48f8e..6fa2895f2214 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -236,6 +236,11 @@ API Changes * GITHUB#13145: Deprecate ByteBufferIndexInput as it will be removed in Lucene 10.0. (Uwe Schindler) +* GITHUB#13422: an explicit dependency on the HPPC library is removed in favor of an internal repackaged copy in + oal.internal.hppc. If you relied on HPPC as a transitive dependency, you'll have to add it to your project explicitly. + The HPPC classes now bundled in Lucene core are internal and will have restricted access in future releases, please do + not use them. (Bruno Roustant, Dawid Weiss, Uwe Schindler, Chris Hegarty) + New Features ---------------------