Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix dsc class scanner spec error - ignore #573

Merged
merged 2 commits into from
Nov 10, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,9 @@ Current

### Fixed:

- [Fix intermittent class scanner error on DataSourceConstraint equal](https://github.com/yahoo/fili/pull/573)
* Class Scanner Spec was injecting an improper dependant field due to type erasure. Made field type explicit.

- [Fix tests with wrong time offset calculation](https://github.com/yahoo/fili/pull/567)
* Time-checking based tests setup time offset in a wrong way. `timeZoneId.getOffset` is fixed to take the right
argument.
Expand Down Expand Up @@ -1342,4 +1345,4 @@ Jobs resource. Here are the highlights of what's in this release:

- [`DruidDimensionsLoader` doesn't set the dimension's lastUpdated date](https://github.com/yahoo/fili/pull/24)
* `DruidDimensionsLoader` now properly sets the `lastUpdated` field after it finished processing the Druid response

Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,18 @@
import com.yahoo.bard.webservice.data.dimension.Dimension;
import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery;
import com.yahoo.bard.webservice.table.PhysicalTable;
import com.yahoo.bard.webservice.web.ApiFilter;
import com.yahoo.bard.webservice.web.DataApiRequest;
import com.yahoo.bard.webservice.web.filters.ApiFilters;

import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import javax.validation.constraints.NotNull;

/**
* Constraints for retrieving potential table availability for a given query.
*/
Expand All @@ -26,7 +27,7 @@ public class DataSourceConstraint {
private final Set<Dimension> filterDimensions;
private final Set<Dimension> metricDimensions;
private final Set<String> metricNames;
private final Map<Dimension, Set<ApiFilter>> apiFilters;
private final ApiFilters apiFilters;

// Calculated fields
private final Set<Dimension> allDimensions;
Expand All @@ -44,7 +45,7 @@ public DataSourceConstraint(DataApiRequest dataApiRequest, DruidAggregationQuery
this.filterDimensions = Collections.unmodifiableSet(dataApiRequest.getFilterDimensions());
this.metricDimensions = Collections.unmodifiableSet(templateDruidQuery.getMetricDimensions());
this.metricNames = Collections.unmodifiableSet(templateDruidQuery.getDependentFieldNames());
this.apiFilters = Collections.unmodifiableMap(dataApiRequest.getApiFilters());
this.apiFilters = new ApiFilters(dataApiRequest.getApiFilters());
this.allDimensions = generateAllDimensions();
this.allDimensionNames = generateAllDimensionNames();
this.allColumnNames = generateAllColumnNames();
Expand All @@ -60,11 +61,11 @@ public DataSourceConstraint(DataApiRequest dataApiRequest, DruidAggregationQuery
* @param apiFilters Map of dimension to its set of API filters
*/
protected DataSourceConstraint(
Set<Dimension> requestDimensions,
Set<Dimension> filterDimensions,
Set<Dimension> metricDimensions,
Set<String> metricNames,
Map<Dimension, Set<ApiFilter>> apiFilters
@NotNull Set<Dimension> requestDimensions,
@NotNull Set<Dimension> filterDimensions,
@NotNull Set<Dimension> metricDimensions,
@NotNull Set<String> metricNames,
@NotNull ApiFilters apiFilters
) {
this.requestDimensions = Collections.unmodifiableSet(requestDimensions);
this.filterDimensions = Collections.unmodifiableSet(filterDimensions);
Expand All @@ -89,18 +90,19 @@ protected DataSourceConstraint(
* @param apiFilters Map of dimension to its set of API filters
*
* @deprecated "allDimensions", "allDimensionNames", and "allColumnNames" can be generated by other arguments. Use
* {@link #DataSourceConstraint(Set, Set, Set, Set, Map)} instead, which saves caller argument-passing efforts.
* {@link #DataSourceConstraint(Set, Set, Set, Set, ApiFilters)} instead, which saves caller argument-passing
* efforts.
*/
@Deprecated
protected DataSourceConstraint(
Set<Dimension> requestDimensions,
Set<Dimension> filterDimensions,
Set<Dimension> metricDimensions,
Set<String> metricNames,
Set<Dimension> allDimensions,
Set<String> allDimensionNames,
Set<String> allColumnNames,
Map<Dimension, Set<ApiFilter>> apiFilters
@NotNull Set<Dimension> requestDimensions,
@NotNull Set<Dimension> filterDimensions,
@NotNull Set<Dimension> metricDimensions,
@NotNull Set<String> metricNames,
@NotNull Set<Dimension> allDimensions,
@NotNull Set<String> allDimensionNames,
@NotNull Set<String> allColumnNames,
@NotNull ApiFilters apiFilters
) {
this.requestDimensions = requestDimensions;
this.filterDimensions = filterDimensions;
Expand Down Expand Up @@ -156,7 +158,7 @@ public Set<String> getAllColumnNames() {
return allColumnNames;
}

public Map<Dimension, Set<ApiFilter>> getApiFilters() {
public ApiFilters getApiFilters() {
return apiFilters;
}

Expand Down Expand Up @@ -205,7 +207,7 @@ public static DataSourceConstraint unconstrained(PhysicalTable table) {
.map(Dimension::getApiName)
.collect(Collectors.toSet()),
table.getSchema().getColumnNames(),
Collections.emptyMap()
new ApiFilters(Collections.emptyMap())
);
}

Expand All @@ -222,7 +224,6 @@ public boolean equals(final Object obj) {
&& Objects.equals(this.metricNames, that.metricNames)
&& Objects.equals(this.apiFilters, that.apiFilters);
}

return false;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,13 @@
import com.yahoo.bard.webservice.data.metric.TemplateDruidQuery;
import com.yahoo.bard.webservice.druid.model.query.Granularity;
import com.yahoo.bard.webservice.table.LogicalTable;
import com.yahoo.bard.webservice.web.ApiFilter;
import com.yahoo.bard.webservice.web.DataApiRequest;
import com.yahoo.bard.webservice.web.TablesApiRequest;
import com.yahoo.bard.webservice.web.filters.ApiFilters;

import org.joda.time.Interval;

import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -52,7 +51,7 @@ public QueryPlanningConstraint(
Set<Dimension> filterDimensions,
Set<Dimension> metricDimensions,
Set<String> metricNames,
Map<Dimension, Set<ApiFilter>> apiFilters,
ApiFilters apiFilters,
LogicalTable logicalTable,
Set<Interval> intervals,
Set<LogicalMetric> logicalMetrics,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import com.yahoo.bard.webservice.druid.model.query.Granularity;
import com.yahoo.bard.webservice.table.LogicalTable;
import com.yahoo.bard.webservice.web.apirequest.DataApiRequestImpl;
import com.yahoo.bard.webservice.web.filters.ApiFilters;
import com.yahoo.bard.webservice.web.util.PaginationParameters;

import org.joda.time.DateTimeZone;
Expand Down Expand Up @@ -155,7 +156,7 @@ public interface DataApiRequest extends ApiRequest {
*
* @return a map of filters by dimension
*/
Map<Dimension, Set<ApiFilter>> getApiFilters();
ApiFilters getApiFilters();

/**
* Generates filter objects on the based on the filter query in the api request.
Expand Down Expand Up @@ -195,7 +196,7 @@ DataApiRequestImpl withPerDimensionFields(LinkedHashMap<Dimension,

DataApiRequestImpl withIntervals(Set<Interval> intervals);

DataApiRequestImpl withFilters(Map<Dimension, Set<ApiFilter>> filters);
DataApiRequestImpl withFilters(ApiFilters filters);

DataApiRequestImpl withHavings(Map<LogicalMetric, Set<ApiHaving>> havings);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,18 @@
import com.yahoo.bard.webservice.data.metric.LogicalMetric;
import com.yahoo.bard.webservice.druid.model.query.Granularity;
import com.yahoo.bard.webservice.table.LogicalTable;
import com.yahoo.bard.webservice.web.filters.ApiFilters;
import com.yahoo.bard.webservice.web.util.PaginationParameters;

import org.joda.time.Interval;

import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.util.Map;
import java.util.Optional;
import java.util.Set;

import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;

/**
* Tables API Request. Such an API Request binds, validates, and models the parts of a request to the tables endpoint.
*/
Expand Down Expand Up @@ -61,7 +64,7 @@ public interface TablesApiRequest extends ApiRequest {
*
* @return the map of filters by dimension for this request, grouped by dimensions
*/
Map<Dimension, Set<ApiFilter>> getApiFilters();
ApiFilters getApiFilters();

/**
* Returns the intervals for this query.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import com.yahoo.bard.webservice.web.FilterOperation;
import com.yahoo.bard.webservice.web.ResponseFormatType;
import com.yahoo.bard.webservice.web.TimeMacros;
import com.yahoo.bard.webservice.web.filters.ApiFilters;
import com.yahoo.bard.webservice.web.util.PaginationLink;
import com.yahoo.bard.webservice.web.util.PaginationParameters;

Expand All @@ -57,11 +58,9 @@
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
Expand Down Expand Up @@ -436,15 +435,15 @@ protected static Set<Interval> generateIntervals(
* contains a 'startsWith' or 'contains' operation while the BardFeatureFlag.DATA_STARTS_WITH_CONTAINS_ENABLED is
* off.
*/
public Map<Dimension, Set<ApiFilter>> generateFilters(
public ApiFilters generateFilters(
String filterQuery,
LogicalTable table,
DimensionDictionary dimensionDictionary
) throws BadApiRequestException {
try (TimedPhase timer = RequestLog.startTiming("GeneratingFilters")) {
LOG.trace("Dimension Dictionary: {}", dimensionDictionary);
// Set of filter objects
Map<Dimension, Set<ApiFilter>> generated = new LinkedHashMap<>();
ApiFilters generated = new ApiFilters();

// Filters are optional hence check if filters are requested.
if (filterQuery == null || "".equals(filterQuery)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import com.yahoo.bard.webservice.web.FilterOperation;
import com.yahoo.bard.webservice.web.MetricParser;
import com.yahoo.bard.webservice.web.ResponseFormatType;
import com.yahoo.bard.webservice.web.filters.ApiFilters;
import com.yahoo.bard.webservice.web.util.BardConfigResources;
import com.yahoo.bard.webservice.web.util.PaginationParameters;

Expand Down Expand Up @@ -93,7 +94,7 @@ public class DataApiRequestImpl extends ApiRequestImpl implements DataApiRequest
private final LinkedHashMap<Dimension, LinkedHashSet<DimensionField>> perDimensionFields;
private final Set<LogicalMetric> logicalMetrics;
private final Set<Interval> intervals;
private final Map<Dimension, Set<ApiFilter>> apiFilters;
private final ApiFilters apiFilters;
private final Map<LogicalMetric, Set<ApiHaving>> havings;
private final Having having;
private final LinkedHashSet<OrderByColumn> sorts;
Expand Down Expand Up @@ -326,7 +327,7 @@ protected DataApiRequestImpl(
LinkedHashMap<Dimension, LinkedHashSet<DimensionField>> perDimensionFields,
Set<LogicalMetric> logicalMetrics,
Set<Interval> intervals,
Map<Dimension, Set<ApiFilter>> apiFilters,
ApiFilters apiFilters,
Map<LogicalMetric, Set<ApiHaving>> havings,
Having having,
LinkedHashSet<OrderByColumn> sorts,
Expand Down Expand Up @@ -878,7 +879,7 @@ public DataApiRequestImpl withIntervals(Set<Interval> intervals) {
}

@Override
public DataApiRequestImpl withFilters(Map<Dimension, Set<ApiFilter>> apiFilters) {
public DataApiRequestImpl withFilters(ApiFilters apiFilters) {
return new DataApiRequestImpl(format, paginationParameters, uriInfo, builder, table, granularity, dimensions, perDimensionFields, logicalMetrics, intervals, apiFilters, havings, having, sorts, count, topN, asyncAfter, timeZone, filterBuilder, havingApiGenerator, dateTimeSort);
}

Expand Down Expand Up @@ -965,7 +966,7 @@ public Set<Interval> getIntervals() {
}

@Override
public Map<Dimension, Set<ApiFilter>> getApiFilters() {
public ApiFilters getApiFilters() {
return this.apiFilters;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.yahoo.bard.webservice.web.BadApiRequestException;
import com.yahoo.bard.webservice.web.ResponseFormatType;
import com.yahoo.bard.webservice.web.TablesApiRequest;
import com.yahoo.bard.webservice.web.filters.ApiFilters;
import com.yahoo.bard.webservice.web.util.BardConfigResources;

import com.yahoo.bard.webservice.web.util.PaginationParameters;
Expand Down Expand Up @@ -56,7 +57,7 @@ public class TablesApiRequestImpl extends ApiRequestImpl implements TablesApiReq
private final Set<Dimension> dimensions;
private final Set<LogicalMetric> logicalMetrics;
private final Set<Interval> intervals;
private final Map<Dimension, Set<ApiFilter>> apiFilters;
private final ApiFilters apiFilters;

/**
* Parses the API request URL and generates the Api Request object.
Expand Down Expand Up @@ -104,7 +105,7 @@ public TablesApiRequestImpl(
dimensions = Collections.emptySet();
logicalMetrics = Collections.emptySet();
intervals = Collections.emptySet();
apiFilters = Collections.emptyMap();
apiFilters = new ApiFilters(Collections.emptyMap());

LOG.debug(
"Api request: Tables: {},\nGranularity: {},\nFormat: {}\nPagination: {}" +
Expand Down Expand Up @@ -252,7 +253,7 @@ private TablesApiRequestImpl(
Set<Dimension> dimensions,
Set<LogicalMetric> metrics,
Set<Interval> intervals,
Map<Dimension, Set<ApiFilter>> filters
ApiFilters filters
) {
super(format, SYNCHRONOUS_ASYNC_AFTER_VALUE, paginationParameters, uriInfo, builder);
this.tables = tables;
Expand Down Expand Up @@ -394,7 +395,7 @@ public Set<Dimension> getFilterDimensions() {
}

@Override
public Map<Dimension, Set<ApiFilter>> getApiFilters() {
public ApiFilters getApiFilters() {
return apiFilters;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
// Copyright 2017 Yahoo Inc.
// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms.
package com.yahoo.bard.webservice.web.filters;

import com.yahoo.bard.webservice.data.dimension.Dimension;
import com.yahoo.bard.webservice.web.ApiFilter;

import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;

/**
* ApiFilters specializes the type of the ApiFilterMap.
*/
public class ApiFilters extends LinkedHashMap<Dimension, Set<ApiFilter>> {

/**
* Constructor.
*/
public ApiFilters() {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does this constructor need to call super() or is it not needed?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It should work, good to merge

}

/**
* Constructor.
*
* @param filters A set of filters to copy.
*/
public ApiFilters(Map<Dimension, Set<ApiFilter>> filters) {
super(filters);
}
}
Loading