Skip to content

Commit

Permalink
Have Table Endpoint Filter Using QueryPlanningConstraint
Browse files Browse the repository at this point in the history
  • Loading branch information
QubitPi committed Oct 30, 2017
1 parent 2c923ab commit f78aad5
Show file tree
Hide file tree
Showing 11 changed files with 407 additions and 67 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -45,18 +45,35 @@ public DataSourceConstraint(DataApiRequest dataApiRequest, DruidAggregationQuery
this.metricDimensions = Collections.unmodifiableSet(templateDruidQuery.getMetricDimensions());
this.metricNames = Collections.unmodifiableSet(templateDruidQuery.getDependentFieldNames());
this.apiFilters = Collections.unmodifiableMap(dataApiRequest.getApiFilters());
this.allDimensions = Collections.unmodifiableSet(Stream.of(
getRequestDimensions().stream(),
getFilterDimensions().stream(),
getMetricDimensions().stream()
).flatMap(Function.identity()).collect(Collectors.toSet()));
this.allDimensionNames = Collections.unmodifiableSet(allDimensions.stream()
.map(Dimension::getApiName)
.collect(Collectors.toSet()));
this.allColumnNames = Collections.unmodifiableSet(Stream.concat(
allDimensionNames.stream(),
metricNames.stream()
).collect(Collectors.toSet()));
this.allDimensions = generateAllDimensions();
this.allDimensionNames = generateAllDimensionNames();
this.allColumnNames = generateAllColumnNames();
}

/**
* Constructor.
*
* @param requestDimensions Dimensions contained in request
* @param filterDimensions Filtered dimensions
* @param metricDimensions Metric related dimensions
* @param metricNames Names of metrics
* @param apiFilters Map of dimension to its set of API filters
*/
protected DataSourceConstraint(
Set<Dimension> requestDimensions,
Set<Dimension> filterDimensions,
Set<Dimension> metricDimensions,
Set<String> metricNames,
Map<Dimension, Set<ApiFilter>> apiFilters
) {
this.requestDimensions = Collections.unmodifiableSet(requestDimensions);
this.filterDimensions = Collections.unmodifiableSet(filterDimensions);
this.metricDimensions = Collections.unmodifiableSet(metricDimensions);
this.metricNames = Collections.unmodifiableSet(metricNames);
this.allDimensions = generateAllDimensions();
this.allDimensionNames = generateAllDimensionNames();
this.allColumnNames = generateAllColumnNames();
this.apiFilters = apiFilters;
}

/**
Expand All @@ -70,7 +87,11 @@ public DataSourceConstraint(DataApiRequest dataApiRequest, DruidAggregationQuery
* @param allDimensionNames Set of all dimension names
* @param allColumnNames Set of all column names
* @param apiFilters Map of dimension to its set of API filters
*
* @deprecated "allDimensions", "allDimensionNames", and "allColumnNames" can be generated by other arguments. Use
* {@link #DataSourceConstraint(Set, Set, Set, Set, Map)} instead, which saves caller argument-passing efforts.
*/
@Deprecated
protected DataSourceConstraint(
Set<Dimension> requestDimensions,
Set<Dimension> filterDimensions,
Expand Down Expand Up @@ -209,4 +230,56 @@ public boolean equals(final Object obj) {
public int hashCode() {
return Objects.hash(requestDimensions, filterDimensions, metricDimensions, metricNames, apiFilters);
}

/**
* Returns an immutable set of all dimensions contained in a request.
* <p>
* All dimensions consist of
* <ul>
* <li>{@link #requestDimensions}</li>
* <li>{@link #filterDimensions}</li>
* <li>{@link #metricDimensions}</li>
* </ul>
*
* @return an immutable set of all dimensions contained in a request
*/
private Set<Dimension> generateAllDimensions() {
return Collections.unmodifiableSet(
Stream.of(
getRequestDimensions().stream(),
getFilterDimensions().stream(),
getMetricDimensions().stream()
).flatMap(Function.identity()).collect(Collectors.toSet())
);
}

/**
* Returns an immutable set of all dimension names.
* <p>
* All dimensions are {@link #allDimensionNames}.
*
* @return an immutable set of all dimension names
*/
private Set<String> generateAllDimensionNames() {
return Collections.unmodifiableSet(
allDimensions.stream()
.map(Dimension::getApiName)
.collect(Collectors.toSet())
);
}

/**
* Returns an immutable set of all columns names.
* <p>
* All columns consist of those {@link #allDimensionNames} and {@link #metricNames}.
*
* @return an immutable set of all columns names
*/
private Set<String> generateAllColumnNames() {
return Collections.unmodifiableSet(
Stream.concat(
allDimensionNames.stream(),
metricNames.stream()
).collect(Collectors.toSet()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.yahoo.bard.webservice.druid.model.query.Granularity;
import com.yahoo.bard.webservice.table.LogicalTable;
import com.yahoo.bard.webservice.web.DataApiRequest;
import com.yahoo.bard.webservice.web.TablesApiRequest;

import org.joda.time.Interval;

Expand All @@ -32,8 +33,8 @@ public class QueryPlanningConstraint extends DataSourceConstraint {
/**
* Constructor.
*
* @param dataApiRequest Api request containing the constraints information.
* @param templateDruidQuery Query containing metric constraint information.
* @param dataApiRequest <b>Data API request</b> containing the constraints information
* @param templateDruidQuery Query containing metric constraint information
*/
public QueryPlanningConstraint(
@NotNull DataApiRequest dataApiRequest,
Expand All @@ -46,8 +47,29 @@ public QueryPlanningConstraint(
this.logicalMetrics = Collections.unmodifiableSet(dataApiRequest.getLogicalMetrics());
this.minimumGranularity = new RequestQueryGranularityResolver().apply(dataApiRequest, templateDruidQuery);
this.requestGranularity = dataApiRequest.getGranularity();
this.logicalMetricNames = Collections.unmodifiableSet(this.logicalMetrics.stream()
.map(LogicalMetric::getName).collect(Collectors.toSet()));
this.logicalMetricNames = generateLogicalMetricNames();
}


/**
* Constructor.
*
* @param tablesApiRequest <b>Tables API request</b> containing the constraints information.
*/
public QueryPlanningConstraint(@NotNull TablesApiRequest tablesApiRequest) {
super(
tablesApiRequest.getDimensions(),
tablesApiRequest.getFilterDimensions(),
Collections.emptySet(),
Collections.emptySet(),
tablesApiRequest.getApiFilters()
);
this.logicalTable = tablesApiRequest.getTable();
this.intervals = Collections.unmodifiableSet(tablesApiRequest.getIntervals());
this.logicalMetrics = Collections.unmodifiableSet(tablesApiRequest.getLogicalMetrics());
this.minimumGranularity = tablesApiRequest.getGranularity();
this.requestGranularity = tablesApiRequest.getGranularity();
this.logicalMetricNames = generateLogicalMetricNames();
}

public LogicalTable getLogicalTable() {
Expand Down Expand Up @@ -104,4 +126,17 @@ public int hashCode() {
logicalMetricNames
);
}

/**
* Return names of all {@link #logicalMetrics}.
*
* @return names of all {@link #logicalMetrics}
*/
private Set<String> generateLogicalMetricNames() {
return Collections.unmodifiableSet(
logicalMetrics.stream()
.map(LogicalMetric::getName)
.collect(Collectors.toSet())
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,8 @@ public enum ErrorMessageFormat implements MessageFormatter {

ETAG_MISSING_FROM_RESPONSE("JSON response is missing response etag"),

INTERNAL_SERVER_ERROR_ON_JSON_PROCESSING("Internal server error. JsonProcessingException : %s"),

INTERNAL_SERVER_ERROR_REASON_PHRASE(
"The server encountered an unexpected condition which prevented it from fulfilling the request."
),
Expand All @@ -259,7 +261,9 @@ public enum ErrorMessageFormat implements MessageFormatter {

UNABLE_TO_CREATE_DIR("Unable to create directory %s."),
UNABLE_TO_DELETE_DIR("Unable to delete directory %s."),
FAIL_TO_WIPTE_LUCENE_INDEX_DIR("Failed to wipe Lucene index at directory: %s")
FAIL_TO_WIPTE_LUCENE_INDEX_DIR("Failed to wipe Lucene index at directory: %s"),

REQUEST_PROCESSING_EXCEPTION("Exception processing request: %s")
;

private final String messageFormat;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,41 @@ public interface TablesApiRequest extends ApiRequest {
*/
Granularity getGranularity();

/**
* Returns the set of grouping dimensions on this request.
*
* @return the set of grouping dimensions on this request
*/
Set<Dimension> getDimensions();

/**
* Returns the dimensions used in filters on this request.
*
* @return the dimensions used in filters on this request
*/
Set<Dimension> getFilterDimensions();

/**
* Returns a map of filters by dimension for this request, grouped by dimensions.
*
* @return the map of filters by dimension for this request, grouped by dimensions
*/
Map<Dimension, Set<ApiFilter>> getApiFilters();

/**
* Returns the intervals for this query.
*
* @return the intervals for this query
*/
Set<Interval> getIntervals();

/**
* Returns the logical metrics requested in this query.
*
* @return the logical metrics requested in this query
*/
Set<LogicalMetric> getLogicalMetrics();

// CHECKSTYLE:OFF
TablesApiRequest withFormat(ResponseFormatType format);

Expand Down
Loading

0 comments on commit f78aad5

Please sign in to comment.