Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Optional caching of JSON nodes #21

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 12 additions & 4 deletions sample/Benchmark.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@
import java.util.Random;

import com.fasterxml.jackson.databind.JsonNode;
import com.maxmind.db.CHMCache;
import com.maxmind.db.InvalidDatabaseException;
import com.maxmind.db.NoCache;
import com.maxmind.db.NodeCache;
import com.maxmind.db.Reader;
import com.maxmind.db.Reader.FileMode;

Expand All @@ -17,14 +20,19 @@ public class Benchmark {

public static void main(String[] args) throws IOException, InvalidDatabaseException {
File file = new File(args.length > 0 ? args[0] : "GeoLite2-City.mmdb");
loop("Warming up", file, WARMUPS);
loop("Benchmarking", file, BENCHMARKS);
System.out.println("No caching");
loop("Warming up", file, WARMUPS, new NoCache());
loop("Benchmarking", file, BENCHMARKS, new NoCache());

System.out.println("With caching");
loop("Warming up", file, WARMUPS, new CHMCache());
loop("Benchmarking", file, BENCHMARKS, new CHMCache());
}

private static void loop(String msg, File file, int loops) throws IOException {
private static void loop(String msg, File file, int loops, NodeCache cache) throws IOException {
System.out.println(msg);
for (int i = 0; i < loops; i++) {
Reader r = new Reader(file, FileMode.MEMORY_MAPPED);
Reader r = new Reader(file, FileMode.MEMORY_MAPPED, cache);
bench(r, COUNT, i);
}
System.out.println();
Expand Down
51 changes: 51 additions & 0 deletions src/main/java/com/maxmind/db/CHMCache.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
package com.maxmind.db;

import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ContainerNode;

/**
* A simplistic cache using a {@link ConcurrentHashMap}. There's no eviction
* policy, it just fills up until reaching the specified capacity <small>(or
* close enough at least, bounds check is not atomic :)</small>
*/
public class CHMCache implements NodeCache {

private static final int DEFAULT_CAPACITY = 4096;

private final int capacity;
private final ConcurrentHashMap<Integer, JsonNode> cache;
private boolean cacheFull = false;

public CHMCache() {
this(DEFAULT_CAPACITY);
}

public CHMCache(int capacity) {
this.capacity = capacity;
this.cache = new ConcurrentHashMap<Integer, JsonNode>(capacity);
}

@Override
public JsonNode get(int key, Loader loader) throws IOException {
Integer k = key;
JsonNode value = cache.get(k);
if (value == null) {
value = loader.load(key);
if (!cacheFull) {
if (cache.size() < capacity) {
cache.put(k, value);
} else {
cacheFull = true;
}
}
}
if (value instanceof ContainerNode) {
value = value.deepCopy();
}
return value;
}

}
23 changes: 19 additions & 4 deletions src/main/java/com/maxmind/db/Decoder.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ final class Decoder {
// constructor to set this
boolean POINTER_TEST_HACK = false;

private final NodeCache cache;

private final long pointerBase;

private final CharsetDecoder utfDecoder = UTF_8.newDecoder();
Expand Down Expand Up @@ -77,13 +79,26 @@ void setOffset(int offset) {
this.offset = offset;
}

@Override
public String toString() {
return "Result[" + offset + " " + node.getNodeType() + " " + node.asText() + "]";
}

}

Decoder(ByteBuffer buffer, long pointerBase) {
Decoder(NodeCache cache, ByteBuffer buffer, long pointerBase) {
this.cache = cache;
this.pointerBase = pointerBase;
this.buffer = buffer;
}

private final NodeCache.Loader cacheLoader = new NodeCache.Loader() {
@Override
public JsonNode load(int key) throws IOException {
return decode(key).getNode();
}
};

Result decode(int offset) throws IOException {
if (offset >= this.buffer.capacity()) {
throw new InvalidDatabaseException(
Expand Down Expand Up @@ -112,9 +127,9 @@ Result decode(int offset) throws IOException {
return new Result(new LongNode(pointer), newOffset);
}

Result result = this.decode((int) pointer);
result.setOffset(newOffset);
return result;
int targetOffset = (int) pointer;
JsonNode node = cache.get(targetOffset, cacheLoader);
return new Result(node, newOffset);
}

if (type.equals(Type.EXTENDED)) {
Expand Down
17 changes: 17 additions & 0 deletions src/main/java/com/maxmind/db/NoCache.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
package com.maxmind.db;

import java.io.IOException;

import com.fasterxml.jackson.databind.JsonNode;

/**
* A no-op cache.
*/
public class NoCache implements NodeCache {

@Override
public JsonNode get(int key, Loader loader) throws IOException {
return loader.load(key);
}

}
15 changes: 15 additions & 0 deletions src/main/java/com/maxmind/db/NodeCache.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package com.maxmind.db;

import java.io.IOException;

import com.fasterxml.jackson.databind.JsonNode;

public interface NodeCache {

public interface Loader {
JsonNode load(int key) throws IOException;
}

public JsonNode get(int key, Loader loader) throws IOException;

}
75 changes: 62 additions & 13 deletions src/main/java/com/maxmind/db/Reader.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,12 @@ public final class Reader implements Closeable {
(byte) 0xCD, (byte) 0xEF, 'M', 'a', 'x', 'M', 'i', 'n', 'd', '.',
'c', 'o', 'm'};

private static final NodeCache NO_CACHE = new NoCache();

private final int ipV4Start;
private final Metadata metadata;
private final AtomicReference<BufferHolder> bufferHolderReference;
private final NodeCache cache;

/**
* The file mode to use when opening a MaxMind DB.
Expand All @@ -41,47 +44,93 @@ public enum FileMode {
}

/**
* Constructs a Reader for the MaxMind DB format. The file passed to it must
* be a valid MaxMind DB file such as a GeoIP2 database file.
* Constructs a Reader for the MaxMind DB format, with no caching. The file
* passed to it must be a valid MaxMind DB file such as a GeoIP2 database
* file.
*
* @param database the MaxMind DB file to use.
* @throws IOException if there is an error opening or reading from the file.
*/
public Reader(File database) throws IOException {
this(database, FileMode.MEMORY_MAPPED);
this(database, NO_CACHE);
}

/**
* Constructs a Reader as if in mode {@link FileMode#MEMORY}, without using
* a <code>File</code> instance.
* Constructs a Reader for the MaxMind DB format, with the specified backing
* cache. The file passed to it must be a valid MaxMind DB file such as a
* GeoIP2 database file.
*
* @param database the MaxMind DB file to use.
* @param cache backing cache instance
* @throws IOException if there is an error opening or reading from the file.
*/
public Reader(File database, NodeCache cache) throws IOException {
this(database, FileMode.MEMORY_MAPPED, cache);
}

/**
* Constructs a Reader with no caching, as if in mode
* {@link FileMode#MEMORY}, without using a <code>File</code> instance.
*
* @param source the InputStream that contains the MaxMind DB file.
* @throws IOException if there is an error reading from the Stream.
*/
public Reader(InputStream source) throws IOException {
this(new BufferHolder(source), "<InputStream>");
this(source, NO_CACHE);
}

/**
* Constructs a Reader for the MaxMind DB format. The file passed to it must
* be a valid MaxMind DB file such as a GeoIP2 database file.
* Constructs a Reader with the specified backing cache, as if in mode
* {@link FileMode#MEMORY}, without using a <code>File</code> instance.
*
* @param source the InputStream that contains the MaxMind DB file.
* @param cache backing cache instance
* @throws IOException if there is an error reading from the Stream.
*/
public Reader(InputStream source, NodeCache cache) throws IOException {
this(new BufferHolder(source), "<InputStream>", cache);
}

/**
* Constructs a Reader for the MaxMind DB format, with no caching. The file
* passed to it must be a valid MaxMind DB file such as a GeoIP2 database
* file.
*
* @param database the MaxMind DB file to use.
* @param fileMode the mode to open the file with.
* @throws IOException if there is an error opening or reading from the file.
*/
public Reader(File database, FileMode fileMode) throws IOException {
this(new BufferHolder(database, fileMode), database.getName());
this(database, fileMode, NO_CACHE);
}

/**
* Constructs a Reader for the MaxMind DB format, with the specified backing
* cache. The file passed to it must be a valid MaxMind DB file such as a
* GeoIP2 database file.
*
* @param database the MaxMind DB file to use.
* @param fileMode the mode to open the file with.
* @param cache backing cache instance
* @throws IOException if there is an error opening or reading from the file.
*/
public Reader(File database, FileMode fileMode, NodeCache cache) throws IOException {
this(new BufferHolder(database, fileMode), database.getName(), cache);
}

private Reader(BufferHolder bufferHolder, String name) throws IOException {
private Reader(BufferHolder bufferHolder, String name, NodeCache cache) throws IOException {
this.bufferHolderReference = new AtomicReference<BufferHolder>(
bufferHolder);

if (cache == null) {
throw new NullPointerException("Cache cannot be null");
}
this.cache = cache;

ByteBuffer buffer = bufferHolder.get();
int start = this.findMetadataStart(buffer, name);

Decoder metadataDecoder = new Decoder(buffer, start);
Decoder metadataDecoder = new Decoder(this.cache, buffer, start);
this.metadata = new Metadata(metadataDecoder.decode(start).getNode());

this.ipV4Start = this.findIpV4StartNode(buffer);
Expand Down Expand Up @@ -200,8 +249,8 @@ private JsonNode resolveDataPointer(ByteBuffer buffer, int pointer)

// We only want the data from the decoder, not the offset where it was
// found.
Decoder decoder = new Decoder(buffer, this.metadata.getSearchTreeSize()
+ DATA_SECTION_SEPARATOR_SIZE);
Decoder decoder = new Decoder(this.cache, buffer,
this.metadata.getSearchTreeSize() + DATA_SECTION_SEPARATOR_SIZE);
return decoder.decode(resolved).getNode();
}

Expand Down
4 changes: 3 additions & 1 deletion src/test/java/com/maxmind/db/DecoderTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,8 @@ public void testArrays() throws IOException {
private static <T> void testTypeDecoding(Decoder.Type type, Map<T, byte[]> tests)
throws IOException {

NodeCache cache = new CHMCache();

for (Map.Entry<T, byte[]> entry : tests.entrySet()) {
T expect = entry.getKey();
byte[] input = entry.getValue();
Expand All @@ -423,7 +425,7 @@ private static <T> void testTypeDecoding(Decoder.Type type, Map<T, byte[]> tests
MappedByteBuffer mmap = fc.map(MapMode.READ_ONLY, 0, fc.size());
try {

Decoder decoder = new Decoder(mmap, 0);
Decoder decoder = new Decoder(cache, mmap, 0);
decoder.POINTER_TEST_HACK = true;

// XXX - this could be streamlined
Expand Down
2 changes: 1 addition & 1 deletion src/test/java/com/maxmind/db/PointerTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ public void testWithPointers() throws
File file = new File(PointerTest.class.getResource(
"/maxmind-db/test-data/maps-with-pointers.raw").toURI());
BufferHolder ptf = new BufferHolder(file, FileMode.MEMORY);
Decoder decoder = new Decoder(ptf.get(), 0);
Decoder decoder = new Decoder(new NoCache(), ptf.get(), 0);

ObjectMapper om = new ObjectMapper();

Expand Down