Skip to content

Commit

Permalink
Revert "Replace Map<String,Object> with IntObjectHashMap for DV produ…
Browse files Browse the repository at this point in the history
…cer (#13686) (#13810)

Reverts "Replace Map<String,Object> with IntObjectHashMap for DV producer (#13686)

relates #13809
  • Loading branch information
ChrisHegarty authored Sep 20, 2024
1 parent e4ac577 commit 7ef7122
Show file tree
Hide file tree
Showing 6 changed files with 79 additions and 78 deletions.
2 changes: 0 additions & 2 deletions lucene/CHANGES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -420,8 +420,6 @@ Optimizations

* GITHUB#13742: Reorder checks in LRUQueryCache#count (Shubham Chaudhary)

* GITHUB#13686: Replace Map<String,Object> with IntObjectHashMap for DV producer (Pan Guixin)

* GITHUB#13697: Add a bulk scorer to ToParentBlockJoinQuery, which delegates to the bulk scorer of the child query.
This should speed up query evaluation when the child query has a specialized bulk scorer, such as disjunctive queries.
(Mike Pellegrini)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
package org.apache.lucene.backward_codecs.lucene80;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicReader;
import org.apache.lucene.backward_codecs.packed.LegacyDirectReader;
import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil;
Expand All @@ -39,7 +41,6 @@
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataInput;
Expand All @@ -52,11 +53,11 @@

/** reader for {@link Lucene80DocValuesFormat} */
final class Lucene80DocValuesProducer extends DocValuesProducer {
private final IntObjectHashMap<NumericEntry> numerics = new IntObjectHashMap<>();
private final IntObjectHashMap<BinaryEntry> binaries = new IntObjectHashMap<>();
private final IntObjectHashMap<SortedEntry> sorted = new IntObjectHashMap<>();
private final IntObjectHashMap<SortedSetEntry> sortedSets = new IntObjectHashMap<>();
private final IntObjectHashMap<SortedNumericEntry> sortedNumerics = new IntObjectHashMap<>();
private final Map<String, NumericEntry> numerics = new HashMap<>();
private final Map<String, BinaryEntry> binaries = new HashMap<>();
private final Map<String, SortedEntry> sorted = new HashMap<>();
private final Map<String, SortedSetEntry> sortedSets = new HashMap<>();
private final Map<String, SortedNumericEntry> sortedNumerics = new HashMap<>();
private final IndexInput data;
private final int maxDoc;
private int version = -1;
Expand Down Expand Up @@ -138,7 +139,7 @@ private void readFields(String segmentName, IndexInput meta, FieldInfos infos)
}
byte type = meta.readByte();
if (type == Lucene80DocValuesFormat.NUMERIC) {
numerics.put(info.number, readNumeric(meta));
numerics.put(info.name, readNumeric(meta));
} else if (type == Lucene80DocValuesFormat.BINARY) {
final boolean compressed;
if (version >= Lucene80DocValuesFormat.VERSION_CONFIGURABLE_COMPRESSION) {
Expand All @@ -157,13 +158,13 @@ private void readFields(String segmentName, IndexInput meta, FieldInfos infos)
} else {
compressed = version >= Lucene80DocValuesFormat.VERSION_BIN_COMPRESSED;
}
binaries.put(info.number, readBinary(meta, compressed));
binaries.put(info.name, readBinary(meta, compressed));
} else if (type == Lucene80DocValuesFormat.SORTED) {
sorted.put(info.number, readSorted(meta));
sorted.put(info.name, readSorted(meta));
} else if (type == Lucene80DocValuesFormat.SORTED_SET) {
sortedSets.put(info.number, readSortedSet(meta));
sortedSets.put(info.name, readSortedSet(meta));
} else if (type == Lucene80DocValuesFormat.SORTED_NUMERIC) {
sortedNumerics.put(info.number, readSortedNumeric(meta));
sortedNumerics.put(info.name, readSortedNumeric(meta));
} else {
throw new CorruptIndexException("invalid type: " + type, meta);
}
Expand Down Expand Up @@ -425,7 +426,7 @@ private static class SortedNumericEntry extends NumericEntry {

@Override
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericEntry entry = numerics.get(field.number);
NumericEntry entry = numerics.get(field.name);
return getNumeric(entry);
}

Expand Down Expand Up @@ -914,7 +915,7 @@ BytesRef decode(int docNumber) throws IOException {

@Override
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryEntry entry = binaries.get(field.number);
BinaryEntry entry = binaries.get(field.name);
if (entry.compressed) {
return getCompressedBinary(entry);
} else {
Expand Down Expand Up @@ -972,7 +973,7 @@ public BytesRef binaryValue() throws IOException {

@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
SortedEntry entry = sorted.get(field.number);
SortedEntry entry = sorted.get(field.name);
return getSorted(entry);
}

Expand Down Expand Up @@ -1406,7 +1407,7 @@ public int docFreq() throws IOException {

@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
SortedNumericEntry entry = sortedNumerics.get(field.number);
SortedNumericEntry entry = sortedNumerics.get(field.name);
if (entry.numValues == entry.numDocsWithField) {
return DocValues.singleton(getNumeric(entry));
}
Expand Down Expand Up @@ -1542,7 +1543,7 @@ private void set() {

@Override
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
SortedSetEntry entry = sortedSets.get(field.number);
SortedSetEntry entry = sortedSets.get(field.name);
if (entry.singleValueEntry != null) {
return DocValues.singleton(getSorted(entry.singleValueEntry));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
import static org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.BaseTermsEnum;
Expand All @@ -41,7 +43,6 @@
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TermsEnum.SeekStatus;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ChecksumIndexInput;
Expand All @@ -58,12 +59,12 @@

/** reader for {@link Lucene90DocValuesFormat} */
final class Lucene90DocValuesProducer extends DocValuesProducer {
private final IntObjectHashMap<NumericEntry> numerics;
private final IntObjectHashMap<BinaryEntry> binaries;
private final IntObjectHashMap<SortedEntry> sorted;
private final IntObjectHashMap<SortedSetEntry> sortedSets;
private final IntObjectHashMap<SortedNumericEntry> sortedNumerics;
private final IntObjectHashMap<DocValuesSkipperEntry> skippers;
private final Map<String, NumericEntry> numerics;
private final Map<String, BinaryEntry> binaries;
private final Map<String, SortedEntry> sorted;
private final Map<String, SortedSetEntry> sortedSets;
private final Map<String, SortedNumericEntry> sortedNumerics;
private final Map<String, DocValuesSkipperEntry> skippers;
private final IndexInput data;
private final int maxDoc;
private int version = -1;
Expand All @@ -80,12 +81,12 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {
String metaName =
IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
this.maxDoc = state.segmentInfo.maxDoc();
numerics = new IntObjectHashMap<>();
binaries = new IntObjectHashMap<>();
sorted = new IntObjectHashMap<>();
sortedSets = new IntObjectHashMap<>();
sortedNumerics = new IntObjectHashMap<>();
skippers = new IntObjectHashMap<>();
numerics = new HashMap<>();
binaries = new HashMap<>();
sorted = new HashMap<>();
sortedSets = new HashMap<>();
sortedNumerics = new HashMap<>();
skippers = new HashMap<>();
merging = false;

// read in the entries from the metadata file.
Expand Down Expand Up @@ -148,12 +149,12 @@ final class Lucene90DocValuesProducer extends DocValuesProducer {

// Used for cloning
private Lucene90DocValuesProducer(
IntObjectHashMap<NumericEntry> numerics,
IntObjectHashMap<BinaryEntry> binaries,
IntObjectHashMap<SortedEntry> sorted,
IntObjectHashMap<SortedSetEntry> sortedSets,
IntObjectHashMap<SortedNumericEntry> sortedNumerics,
IntObjectHashMap<DocValuesSkipperEntry> skippers,
Map<String, NumericEntry> numerics,
Map<String, BinaryEntry> binaries,
Map<String, SortedEntry> sorted,
Map<String, SortedSetEntry> sortedSets,
Map<String, SortedNumericEntry> sortedNumerics,
Map<String, DocValuesSkipperEntry> skippers,
IndexInput data,
int maxDoc,
int version,
Expand Down Expand Up @@ -193,18 +194,18 @@ private void readFields(IndexInput meta, FieldInfos infos) throws IOException {
}
byte type = meta.readByte();
if (info.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) {
skippers.put(info.number, readDocValueSkipperMeta(meta));
skippers.put(info.name, readDocValueSkipperMeta(meta));
}
if (type == Lucene90DocValuesFormat.NUMERIC) {
numerics.put(info.number, readNumeric(meta));
numerics.put(info.name, readNumeric(meta));
} else if (type == Lucene90DocValuesFormat.BINARY) {
binaries.put(info.number, readBinary(meta));
binaries.put(info.name, readBinary(meta));
} else if (type == Lucene90DocValuesFormat.SORTED) {
sorted.put(info.number, readSorted(meta));
sorted.put(info.name, readSorted(meta));
} else if (type == Lucene90DocValuesFormat.SORTED_SET) {
sortedSets.put(info.number, readSortedSet(meta));
sortedSets.put(info.name, readSortedSet(meta));
} else if (type == Lucene90DocValuesFormat.SORTED_NUMERIC) {
sortedNumerics.put(info.number, readSortedNumeric(meta));
sortedNumerics.put(info.name, readSortedNumeric(meta));
} else {
throw new CorruptIndexException("invalid type: " + type, meta);
}
Expand Down Expand Up @@ -429,7 +430,7 @@ private static class SortedNumericEntry extends NumericEntry {

@Override
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericEntry entry = numerics.get(field.number);
NumericEntry entry = numerics.get(field.name);
return getNumeric(entry);
}

Expand Down Expand Up @@ -785,7 +786,7 @@ public boolean advanceExact(int target) throws IOException {

@Override
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
BinaryEntry entry = binaries.get(field.number);
BinaryEntry entry = binaries.get(field.name);

if (entry.docsWithFieldOffset == -2) {
return DocValues.emptyBinary();
Expand Down Expand Up @@ -890,7 +891,7 @@ public BytesRef binaryValue() throws IOException {

@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
SortedEntry entry = sorted.get(field.number);
SortedEntry entry = sorted.get(field.name);
return getSorted(entry);
}

Expand Down Expand Up @@ -1367,7 +1368,7 @@ public int docFreq() throws IOException {

@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
SortedNumericEntry entry = sortedNumerics.get(field.number);
SortedNumericEntry entry = sortedNumerics.get(field.name);
return getSortedNumeric(entry);
}

Expand Down Expand Up @@ -1512,7 +1513,7 @@ private void set() {

@Override
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
SortedSetEntry entry = sortedSets.get(field.number);
SortedSetEntry entry = sortedSets.get(field.name);
if (entry.singleValueEntry != null) {
return DocValues.singleton(getSorted(entry.singleValueEntry));
}
Expand Down Expand Up @@ -1786,7 +1787,7 @@ long getLongValue(long index) throws IOException {

@Override
public DocValuesSkipper getSkipper(FieldInfo field) throws IOException {
final DocValuesSkipperEntry entry = skippers.get(field.number);
final DocValuesSkipperEntry entry = skippers.get(field.name);

final IndexInput input = data.slice("doc value skipper", entry.offset, entry.length);
// Prefetch the first page of data. Following pages are expected to get prefetched through
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.util.IOUtils;

/**
Expand Down Expand Up @@ -255,9 +254,9 @@ static String getFullSegmentSuffix(String outerSegmentSuffix, String segmentSuff
}
}

private static class FieldsReader extends DocValuesProducer {
private class FieldsReader extends DocValuesProducer {

private final IntObjectHashMap<DocValuesProducer> fields = new IntObjectHashMap<>();
private final Map<String, DocValuesProducer> fields = new HashMap<>();
private final Map<String, DocValuesProducer> formats = new HashMap<>();

// clone for merge
Expand All @@ -271,10 +270,10 @@ private static class FieldsReader extends DocValuesProducer {
}

// Then rebuild fields:
for (IntObjectHashMap.IntObjectCursor<DocValuesProducer> ent : other.fields) {
DocValuesProducer producer = oldToNew.get(ent.value);
for (Map.Entry<String, DocValuesProducer> ent : other.fields.entrySet()) {
DocValuesProducer producer = oldToNew.get(ent.getValue());
assert producer != null;
fields.put(ent.key, producer);
fields.put(ent.getKey(), producer);
}
}

Expand Down Expand Up @@ -303,7 +302,7 @@ public FieldsReader(final SegmentReadState readState) throws IOException {
segmentSuffix,
format.fieldsProducer(new SegmentReadState(readState, segmentSuffix)));
}
fields.put(fi.number, formats.get(segmentSuffix));
fields.put(fieldName, formats.get(segmentSuffix));
}
}
}
Expand All @@ -317,37 +316,37 @@ public FieldsReader(final SegmentReadState readState) throws IOException {

@Override
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
DocValuesProducer producer = fields.get(field.number);
DocValuesProducer producer = fields.get(field.name);
return producer == null ? null : producer.getNumeric(field);
}

@Override
public BinaryDocValues getBinary(FieldInfo field) throws IOException {
DocValuesProducer producer = fields.get(field.number);
DocValuesProducer producer = fields.get(field.name);
return producer == null ? null : producer.getBinary(field);
}

@Override
public SortedDocValues getSorted(FieldInfo field) throws IOException {
DocValuesProducer producer = fields.get(field.number);
DocValuesProducer producer = fields.get(field.name);
return producer == null ? null : producer.getSorted(field);
}

@Override
public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
DocValuesProducer producer = fields.get(field.number);
DocValuesProducer producer = fields.get(field.name);
return producer == null ? null : producer.getSortedNumeric(field);
}

@Override
public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
DocValuesProducer producer = fields.get(field.number);
DocValuesProducer producer = fields.get(field.name);
return producer == null ? null : producer.getSortedSet(field);
}

@Override
public DocValuesSkipper getSkipper(FieldInfo field) throws IOException {
DocValuesProducer producer = fields.get(field.number);
DocValuesProducer producer = fields.get(field.name);
return producer == null ? null : producer.getSkipper(field);
}

Expand Down
Loading

0 comments on commit 7ef7122

Please sign in to comment.