Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

adds: GZip compression feature (#591) #596

Merged
merged 1 commit into from
Jul 10, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import com.algolia.search.models.HttpResponse;
import com.algolia.search.util.HttpStatusCodeUtils;
import java.io.IOException;
import java.io.InputStream;
import java.net.ConnectException;
import java.net.SocketTimeoutException;
import java.util.Map;
Expand All @@ -18,12 +17,12 @@
import org.apache.http.HttpEntity;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.DeflateDecompressingEntity;
import org.apache.http.client.entity.EntityBuilder;
import org.apache.http.client.entity.GzipDecompressingEntity;
import org.apache.http.client.methods.*;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClients;
import org.apache.http.util.EntityUtils;
Expand Down Expand Up @@ -121,19 +120,19 @@ private HttpRequestBase buildRequest(HttpRequest algoliaRequest) {

case HttpPost.METHOD_NAME:
HttpPost post = new HttpPost(algoliaRequest.getUri().toString());
if (algoliaRequest.getBody() != null) post.setEntity(addEntity(algoliaRequest.getBody()));
if (algoliaRequest.getBody() != null) post.setEntity(addEntity(algoliaRequest));
post.setConfig(buildRequestConfig(algoliaRequest));
return addHeaders(post, algoliaRequest.getHeaders());

case HttpPut.METHOD_NAME:
HttpPut put = new HttpPut(algoliaRequest.getUri().toString());
if (algoliaRequest.getBody() != null) put.setEntity(addEntity(algoliaRequest.getBody()));
if (algoliaRequest.getBody() != null) put.setEntity(addEntity(algoliaRequest));
put.setConfig(buildRequestConfig(algoliaRequest));
return addHeaders(put, algoliaRequest.getHeaders());

case HttpPatch.METHOD_NAME:
HttpPatch patch = new HttpPatch(algoliaRequest.getUri().toString());
if (algoliaRequest.getBody() != null) patch.setEntity(addEntity(algoliaRequest.getBody()));
if (algoliaRequest.getBody() != null) patch.setEntity(addEntity(algoliaRequest));
patch.setConfig(buildRequestConfig(algoliaRequest));
return addHeaders(patch, algoliaRequest.getHeaders());

Expand All @@ -153,11 +152,20 @@ private HttpRequestBase addHeaders(
return request;
}

private HttpEntity addEntity(InputStream data) {
return EntityBuilder.create()
.setStream(data)
.setContentType(ContentType.APPLICATION_JSON)
.build();
private HttpEntity addEntity(@Nonnull HttpRequest request) {
try {
InputStreamEntity entity =
new InputStreamEntity(
request.getBody(), request.getBody().available(), ContentType.APPLICATION_JSON);

if (request.canCompress()) {
entity.setContentEncoding(Defaults.CONTENT_ENCODING_GZIP);
}

return entity;
} catch (IOException e) {
throw new AlgoliaRuntimeException("Error while getting body's content length.", e);
}
}

private static HttpEntity handleCompressedEntity(org.apache.http.HttpEntity entity) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.algolia.search;

import com.algolia.search.models.common.CompressionType;
import com.algolia.search.models.indexing.ActionEnum;
import com.algolia.search.models.indexing.BatchOperation;
import com.algolia.search.models.indexing.IndicesResponse;
Expand Down Expand Up @@ -29,7 +30,12 @@ public class IntegrationTestExtension
public void beforeAll(ExtensionContext context) throws Exception {
checkEnvironmentVariable();
searchClient = DefaultSearchClient.create(ALGOLIA_APPLICATION_ID_1, ALGOLIA_API_KEY_1);
searchClient2 = DefaultSearchClient.create(ALGOLIA_APPLICATION_ID_2, ALGOLIA_API_KEY_2);
// Disabling gzip for client2 because GZip not is not enabled yet on the server
SearchConfig client2Config =
new SearchConfig.Builder(ALGOLIA_APPLICATION_ID_2, ALGOLIA_API_KEY_2)
.setCompressionType(CompressionType.NONE)
.build();
searchClient2 = DefaultSearchClient.create(client2Config);
cleanPreviousIndices();
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.algolia.search;

import com.algolia.search.models.common.CallType;
import com.algolia.search.models.common.CompressionType;
import java.util.*;
import javax.annotation.Nonnull;

Expand All @@ -16,7 +17,7 @@ public static class Builder extends ConfigBase.Builder<Builder> {
* @param apiKey The API Key
*/
public Builder(@Nonnull String applicationID, @Nonnull String apiKey) {
super(applicationID, apiKey, createDefaultHosts());
super(applicationID, apiKey, createDefaultHosts(), CompressionType.NONE);
Ant-hem marked this conversation as resolved.
Show resolved Hide resolved
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.algolia.search;

import com.algolia.search.models.common.CompressionType;
import com.algolia.search.util.AlgoliaUtils;
import java.util.HashMap;
import java.util.List;
Expand All @@ -23,6 +24,7 @@ public abstract class ConfigBase {
private final Integer connectTimeOut;
private final List<StatefulHost> hosts;
private final ExecutorService executor;
private final CompressionType compressionType;

/** Config base builder to ensure the immutability of the configuration. */
public abstract static class Builder<T extends Builder<T>> {
Expand All @@ -36,6 +38,7 @@ public abstract static class Builder<T extends Builder<T>> {
private Integer connectTimeOut;
private List<StatefulHost> hosts;
private ExecutorService executor;
protected CompressionType compressionType;

/**
* Builds a base configuration
Expand All @@ -48,13 +51,16 @@ public abstract static class Builder<T extends Builder<T>> {
public Builder(
@Nonnull String applicationID,
@Nonnull String apiKey,
@Nonnull List<StatefulHost> defaultHosts) {
@Nonnull List<StatefulHost> defaultHosts,
@Nonnull CompressionType compressionType) {

this.applicationID = applicationID;
this.apiKey = apiKey;

this.batchSize = 1000;
this.hosts = defaultHosts;
this.connectTimeOut = Defaults.CONNECT_TIMEOUT_MS;
this.compressionType = compressionType;

this.defaultHeaders = new HashMap<>();
this.defaultHeaders.put(Defaults.ALGOLIA_APPLICATION_HEADER, applicationID);
Expand Down Expand Up @@ -145,6 +151,7 @@ protected ConfigBase(Builder<?> builder) {
this.applicationID = builder.applicationID;
this.defaultHeaders = builder.defaultHeaders;
this.batchSize = builder.batchSize;
this.compressionType = builder.compressionType;
this.readTimeOut = builder.readTimeOut;
this.writeTimeOut = builder.writeTimeOut;
this.connectTimeOut = builder.connectTimeOut;
Expand All @@ -168,6 +175,10 @@ public int getBatchSize() {
return batchSize;
}

public CompressionType getCompressionType() {
return compressionType;
}

public Integer getReadTimeOut() {
return readTimeOut;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import java.util.concurrent.CompletableFuture;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.GZIPOutputStream;
import javax.annotation.Nonnull;

/**
Expand Down Expand Up @@ -217,24 +218,38 @@ private <TData> HttpRequest buildRequest(
? requestOptions.getTimeout()
: getTimeOut(callType);

HttpRequest request = new HttpRequest(method, fullPath, headersToSend, timeout);
HttpRequest request =
new HttpRequest(method, fullPath, headersToSend, timeout, config.getCompressionType());

if (data != null) {
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
request.setBody(serializeJSON(data, request));
logRequest(request, data);
}

Defaults.getObjectMapper().writeValue(out, data);
return request;
}

ByteArrayInputStream content = new ByteArrayInputStream(out.toByteArray());
private <TData> InputStream serializeJSON(TData data, HttpRequest request) {
if (request.canCompress()) {
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
GZIPOutputStream gzipOS = new GZIPOutputStream(bos)) {

logRequest(request, data);
request.setBody(content);
Defaults.getObjectMapper().writeValue(gzipOS, data);
return new ByteArrayInputStream(bos.toByteArray());

} catch (IOException e) {
throw new AlgoliaRuntimeException("Error while serializing the request", e);
}
}
} else {
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {

return request;
Defaults.getObjectMapper().writeValue(out, data);
return new ByteArrayInputStream(out.toByteArray());

} catch (IOException e) {
throw new AlgoliaRuntimeException("Error while serializing the request", e);
}
}
}

/**
Expand Down Expand Up @@ -323,19 +338,23 @@ private int getTimeOut(CallType callType) {
}
}

private <T> void logRequest(HttpRequest request, T data) throws JsonProcessingException {
private <T> void logRequest(HttpRequest request, T data) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest(
String.format(
"\n Method: %s \n Path: %s \n Headers: %s",
request.getMethod().toString(), request.getMethodPath(), request.getHeaders()));

LOGGER.finest(
String.format(
"Request body: \n %s ",
Defaults.getObjectMapper()
.writerWithDefaultPrettyPrinter()
.writeValueAsString(data)));
try {
LOGGER.finest(
String.format(
"Request body: \n %s ",
Defaults.getObjectMapper()
.writerWithDefaultPrettyPrinter()
.writeValueAsString(data)));
} catch (JsonProcessingException e) {
throw new AlgoliaRuntimeException("Error while serializing the request", e);
}
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.algolia.search;

import com.algolia.search.models.common.CallType;
import com.algolia.search.models.common.CompressionType;
import com.algolia.search.util.AlgoliaUtils;
import java.util.Collections;
import java.util.EnumSet;
Expand Down Expand Up @@ -29,7 +30,7 @@ public Builder(@Nonnull String applicationID, @Nonnull String apiKey) {
* @param apiKey The API Key
*/
public Builder(@Nonnull String applicationID, @Nonnull String apiKey, @Nonnull String region) {
super(applicationID, apiKey, createDefaultHosts(region));
super(applicationID, apiKey, createDefaultHosts(region), CompressionType.NONE);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.algolia.search;

import com.algolia.search.models.common.CallType;
import com.algolia.search.models.common.CompressionType;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
Expand All @@ -13,7 +14,7 @@ public static class Builder extends ConfigBase.Builder<Builder> {

/** Builds a {@link SearchConfig} with the default hosts */
public Builder(@Nonnull String applicationID, @Nonnull String apiKey) {
super(applicationID, apiKey, createDefaultHosts(applicationID));
super(applicationID, apiKey, createDefaultHosts(applicationID), CompressionType.GZIP);
}

@Override
Expand Down Expand Up @@ -51,6 +52,12 @@ private static List<StatefulHost> createDefaultHosts(@Nonnull String application

return Stream.concat(hosts.stream(), commonHosts.stream()).collect(Collectors.toList());
}

/** Enables compression for the SearchClient. See {@link CompressionType} */
public Builder setCompressionType(@Nonnull CompressionType compressionType) {
this.compressionType = compressionType;
return this;
}
}

private SearchConfig(Builder builder) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,23 @@
package com.algolia.search.models;

import com.algolia.search.models.common.CompressionType;
import java.io.InputStream;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;

public class HttpRequest {

public HttpRequest(
HttpMethod method,
String methodPath,
Map<String, String> headers,
int timeout,
CompressionType compressionType) {
this(method, methodPath, headers, timeout);
this.compressionType = compressionType;
}

public HttpRequest(
HttpMethod method, String methodPath, Map<String, String> headers, int timeout) {
this.method = method;
Expand Down Expand Up @@ -69,6 +80,31 @@ public HttpRequest setTimeout(int timeout) {
return this;
}

public CompressionType getCompressionType() {
return compressionType;
}

public HttpRequest setCompressionType(CompressionType compressionType) {
this.compressionType = compressionType;
return this;
}

/**
* Tells if any compression can be enabled for a request or not. Compression is enabled only for
* POST/PUT methods on the Search API (not on Analytics and Insights).
*/
public boolean canCompress() {
if (this.compressionType == null || this.method == null) {
return false;
}

boolean isMethodValid =
this.method.equals(HttpMethod.POST) || this.method.equals(HttpMethod.PUT);
boolean isCompressionEnabled = this.compressionType.equals(CompressionType.GZIP);

return isMethodValid && isCompressionEnabled;
}
Ant-hem marked this conversation as resolved.
Show resolved Hide resolved

public void incrementTimeout(int retryCount) {
this.timeout *= (retryCount + 1);
}
Expand All @@ -79,4 +115,5 @@ public void incrementTimeout(int retryCount) {
private Map<String, String> headers;
private InputStream body;
private int timeout;
private CompressionType compressionType;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
package com.algolia.search.models.common;

public enum CompressionType {
NONE,
GZIP
}