Skip to content

Commit

Permalink
Support head operations (#232)
Browse files Browse the repository at this point in the history
## Changes
Support head operations

## Tests
Generated SDK and run Files integration tests.
  • Loading branch information
hectorcast-db authored Feb 19, 2024
1 parent 8c41892 commit a583719
Show file tree
Hide file tree
Showing 11 changed files with 125 additions and 17 deletions.
4 changes: 0 additions & 4 deletions .codegen/impl.java.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,6 @@ class {{.PascalName}}Impl implements {{.PascalName}}Service {
{{ template "api-call" . }}
{{- else if .Response.ArrayValue -}} return apiClient.getCollection(path, null, {{template "type" .Response.ArrayValue}}.class, headers);
{{- else if .Response.MapValue -}} return apiClient.getStringMap(path, {{ template "request-param" .}}, headers);
{{- else if .IsResponseByteStream -}}
InputStream response = {{ template "api-call" . }}
return new {{ .Response.PascalName }}().set{{.ResponseBodyField.PascalName}}(response);
{{- else }}return {{ template "api-call" . }}
{{- end}}
}
Expand All @@ -46,7 +43,6 @@ class {{.PascalName}}Impl implements {{.PascalName}}Service {
apiClient.{{.Verb}}(path
{{- if .Request}}, {{ template "request-param" .}}{{end}}
, {{ if not .Response -}}Void
{{- else if .IsResponseByteStream}}InputStream
{{- else}}{{template "type" .Response}}{{- end -}}.class
, headers);
{{- end }}
Expand Down
3 changes: 2 additions & 1 deletion .codegen/model.java.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import java.util.Collection;
import java.util.Objects;

import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.Header;
import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;

Expand All @@ -29,7 +30,7 @@ public class {{.PascalName}} {
/**
{{.Comment " * " 80}}
*/
{{if .IsJson}}@JsonProperty("{{.Name}}"){{end}}{{if .IsQuery}}@QueryParam("{{.Name}}"){{end}}
{{if .IsJson}}@JsonProperty("{{.Name}}"){{end}}{{if .IsQuery}}@QueryParam("{{.Name}}"){{end}}{{if .IsHeader}}@Header("{{.Name}}"){{end}}
private {{template "type" .Entity }} {{.CamelName}}{{if .IsNameReserved}}Value{{end}};
{{end}}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.databricks.sdk.core.http.Response;
import com.databricks.sdk.core.utils.SystemTimer;
import com.databricks.sdk.core.utils.Timer;
import com.databricks.sdk.support.Header;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
Expand All @@ -14,6 +15,7 @@
import com.fasterxml.jackson.databind.SerializationFeature;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -131,6 +133,18 @@ protected <I, O> O withJavaType(
}
}

public <O> O HEAD(String path, Class<O> target, Map<String, String> headers) {
return HEAD(path, null, target, headers);
}

public <I, O> O HEAD(String path, I in, Class<O> target, Map<String, String> headers) {
try {
return execute(prepareRequest("HEAD", path, in, headers), target);
} catch (IOException e) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}

public <O> O GET(String path, Class<O> target, Map<String, String> headers) {
return GET(path, null, target, headers);
}
Expand Down Expand Up @@ -212,7 +226,7 @@ private <T> T execute(Request in, Class<T> target) throws IOException {
if (target == Void.class) {
return null;
}
return deserialize(out.getBody(), target);
return deserialize(out, target);
}

private Response getResponse(Request in) {
Expand Down Expand Up @@ -319,11 +333,18 @@ private String makeLogRecord(Request in, Response out) {
return sb.toString();
}

public <T> T deserialize(InputStream body, Class<T> target) throws IOException {
public <T> T deserialize(Response response, Class<T> target) throws IOException {
if (target == InputStream.class) {
return (T) body;
return (T) response.getBody();
}
return mapper.readValue(body, target);
T object;
try {
object = target.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new DatabricksException("Unable to initialize an instance of type " + target.getName());
}
deserialize(response, object);
return object;
}

public <T> T deserialize(InputStream body, JavaType target) throws IOException {
Expand All @@ -333,6 +354,60 @@ public <T> T deserialize(InputStream body, JavaType target) throws IOException {
return mapper.readValue(body, target);
}

private <T> void fillInHeaders(T target, Response response) {
for (Field field : target.getClass().getDeclaredFields()) {
Header headerAnnotation = field.getAnnotation(Header.class);
if (headerAnnotation == null) {
continue;
}
String firstHeader = response.getFirstHeader(headerAnnotation.value());
if (firstHeader == null) {
continue;
}
try {
field.setAccessible(true);
if (field.getType() == String.class) {
field.set(target, firstHeader);
} else if (field.getType() == Long.class) {
field.set(target, Long.parseLong(firstHeader));
} else {
LOG.warn("Unsupported header type: " + field.getType());
}
} catch (IllegalAccessException e) {
throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e);
} finally {
field.setAccessible(false);
}
}
}

private <T> Optional<Field> getContentsField(T target) {
for (Field field : target.getClass().getDeclaredFields()) {
if (field.getName().equals("contents") && field.getType() == InputStream.class) {
return Optional.of(field);
}
}
return Optional.empty();
}

public <T> void deserialize(Response response, T object) throws IOException {
fillInHeaders(object, response);
Optional<Field> contentsField = getContentsField(object);
if (contentsField.isPresent()) {
Field field = contentsField.get();
try {
field.setAccessible(true);
field.set(object, response.getBody());
} catch (IllegalAccessException e) {
throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e);
} finally {
field.setAccessible(false);
}
} else if (response.getBody() != null) {
mapper.readerForUpdating(object).readValue(response.getBody());
}
}

private String serialize(Object body) throws JsonProcessingException {
if (body == null) {
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,8 @@ private HttpUriRequest transformRequest(Request in) {
switch (in.getMethod()) {
case Request.GET:
return new HttpGet(in.getUri());
case Request.HEAD:
return new HttpHead(in.getUri());
case Request.DELETE:
return new HttpDelete(in.getUri());
case Request.POST:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

public class Request {
public static final String GET = "GET";
public static final String HEAD = "HEAD";
public static final String DELETE = "DELETE";
public static final String POST = "POST";
public static final String PUT = "PUT";
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package com.databricks.sdk.support;

import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;

/** Annotation to indicate that a field is a Header. */
@Target({ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
public @interface Header {
String value();
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,10 @@ private void writeFileAndReadFileInner(

// Write the file to DBFS.
workspace.files().upload(fileName, inputStream);

// TODO: Enable after SDK generated
// Check header deserialization
// GetMetadataResponse metadata = workspace.files().getMetadata(fileName);
// Assertions.assertEquals("application/octet-stream", metadata.getContentType());
// Read the file back from DBFS.
try (InputStream readContents = workspace.files().download(fileName).getContents()) {
byte[] result = new byte[fileContents.length];
Expand Down

0 comments on commit a583719

Please sign in to comment.